diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,111805 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0, + "eval_steps": 1000, + "global_step": 15949, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 6.269985579033168e-05, + "grad_norm": 0.9967993497848511, + "learning_rate": 4.1753653444676405e-08, + "loss": 2.6503, + "step": 1 + }, + { + "epoch": 0.00012539971158066337, + "grad_norm": 1.0711665153503418, + "learning_rate": 8.350730688935281e-08, + "loss": 2.4888, + "step": 2 + }, + { + "epoch": 0.00018809956737099505, + "grad_norm": 1.0414377450942993, + "learning_rate": 1.2526096033402926e-07, + "loss": 2.4691, + "step": 3 + }, + { + "epoch": 0.00025079942316132673, + "grad_norm": 0.9348357319831848, + "learning_rate": 1.6701461377870562e-07, + "loss": 2.4192, + "step": 4 + }, + { + "epoch": 0.0003134992789516584, + "grad_norm": 1.1945528984069824, + "learning_rate": 2.0876826722338207e-07, + "loss": 2.5575, + "step": 5 + }, + { + "epoch": 0.0003761991347419901, + "grad_norm": 1.0509024858474731, + "learning_rate": 2.505219206680585e-07, + "loss": 2.4228, + "step": 6 + }, + { + "epoch": 0.00043889899053232176, + "grad_norm": 1.1407896280288696, + "learning_rate": 2.9227557411273485e-07, + "loss": 2.4486, + "step": 7 + }, + { + "epoch": 0.0005015988463226535, + "grad_norm": 1.1219167709350586, + "learning_rate": 3.3402922755741124e-07, + "loss": 2.5565, + "step": 8 + }, + { + "epoch": 0.0005642987021129851, + "grad_norm": 1.1532440185546875, + "learning_rate": 3.757828810020877e-07, + "loss": 2.4757, + "step": 9 + }, + { + "epoch": 0.0006269985579033168, + "grad_norm": 1.0964901447296143, + "learning_rate": 4.1753653444676413e-07, + "loss": 2.6395, + "step": 10 + }, + { + "epoch": 0.0006896984136936485, + "grad_norm": 1.1684867143630981, + "learning_rate": 4.592901878914405e-07, + "loss": 2.6132, + "step": 11 + }, + { + "epoch": 0.0007523982694839802, + "grad_norm": 1.212607741355896, + "learning_rate": 5.01043841336117e-07, + "loss": 2.6892, + "step": 12 + }, + { + "epoch": 0.0008150981252743119, + "grad_norm": 1.0299021005630493, + "learning_rate": 5.427974947807934e-07, + "loss": 2.5408, + "step": 13 + }, + { + "epoch": 0.0008777979810646435, + "grad_norm": 0.9966466426849365, + "learning_rate": 5.845511482254697e-07, + "loss": 2.2694, + "step": 14 + }, + { + "epoch": 0.0009404978368549753, + "grad_norm": 1.1323179006576538, + "learning_rate": 6.263048016701462e-07, + "loss": 2.4469, + "step": 15 + }, + { + "epoch": 0.001003197692645307, + "grad_norm": 1.0493723154067993, + "learning_rate": 6.680584551148225e-07, + "loss": 2.4326, + "step": 16 + }, + { + "epoch": 0.0010658975484356386, + "grad_norm": 1.0687230825424194, + "learning_rate": 7.09812108559499e-07, + "loss": 2.4506, + "step": 17 + }, + { + "epoch": 0.0011285974042259703, + "grad_norm": 1.0347912311553955, + "learning_rate": 7.515657620041754e-07, + "loss": 2.3703, + "step": 18 + }, + { + "epoch": 0.001191297260016302, + "grad_norm": 1.0891659259796143, + "learning_rate": 7.933194154488519e-07, + "loss": 2.4047, + "step": 19 + }, + { + "epoch": 0.0012539971158066336, + "grad_norm": 1.0945051908493042, + "learning_rate": 8.350730688935283e-07, + "loss": 2.4363, + "step": 20 + }, + { + "epoch": 0.0013166969715969652, + "grad_norm": 1.0586761236190796, + "learning_rate": 8.768267223382048e-07, + "loss": 2.3807, + "step": 21 + }, + { + "epoch": 0.001379396827387297, + "grad_norm": 1.1339507102966309, + "learning_rate": 9.18580375782881e-07, + "loss": 2.5469, + "step": 22 + }, + { + "epoch": 0.0014420966831776288, + "grad_norm": 0.9808893203735352, + "learning_rate": 9.603340292275575e-07, + "loss": 2.4518, + "step": 23 + }, + { + "epoch": 0.0015047965389679604, + "grad_norm": 0.9815647006034851, + "learning_rate": 1.002087682672234e-06, + "loss": 2.3952, + "step": 24 + }, + { + "epoch": 0.001567496394758292, + "grad_norm": 1.155562400817871, + "learning_rate": 1.0438413361169103e-06, + "loss": 2.5554, + "step": 25 + }, + { + "epoch": 0.0016301962505486237, + "grad_norm": 1.0179023742675781, + "learning_rate": 1.0855949895615868e-06, + "loss": 2.4392, + "step": 26 + }, + { + "epoch": 0.0016928961063389554, + "grad_norm": 0.9111185669898987, + "learning_rate": 1.1273486430062631e-06, + "loss": 2.4574, + "step": 27 + }, + { + "epoch": 0.001755595962129287, + "grad_norm": 1.069554090499878, + "learning_rate": 1.1691022964509394e-06, + "loss": 2.51, + "step": 28 + }, + { + "epoch": 0.0018182958179196187, + "grad_norm": 0.9758409261703491, + "learning_rate": 1.210855949895616e-06, + "loss": 2.4383, + "step": 29 + }, + { + "epoch": 0.0018809956737099506, + "grad_norm": 0.8637948036193848, + "learning_rate": 1.2526096033402924e-06, + "loss": 2.3616, + "step": 30 + }, + { + "epoch": 0.0019436955295002822, + "grad_norm": 1.0513721704483032, + "learning_rate": 1.2943632567849687e-06, + "loss": 2.4461, + "step": 31 + }, + { + "epoch": 0.002006395385290614, + "grad_norm": 0.971481204032898, + "learning_rate": 1.336116910229645e-06, + "loss": 2.3418, + "step": 32 + }, + { + "epoch": 0.0020690952410809455, + "grad_norm": 1.0613864660263062, + "learning_rate": 1.3778705636743217e-06, + "loss": 2.4138, + "step": 33 + }, + { + "epoch": 0.002131795096871277, + "grad_norm": 1.0003950595855713, + "learning_rate": 1.419624217118998e-06, + "loss": 2.5472, + "step": 34 + }, + { + "epoch": 0.002194494952661609, + "grad_norm": 0.9751295447349548, + "learning_rate": 1.4613778705636745e-06, + "loss": 2.4862, + "step": 35 + }, + { + "epoch": 0.0022571948084519405, + "grad_norm": 1.0244653224945068, + "learning_rate": 1.5031315240083507e-06, + "loss": 2.5366, + "step": 36 + }, + { + "epoch": 0.002319894664242272, + "grad_norm": 1.0949269533157349, + "learning_rate": 1.544885177453027e-06, + "loss": 2.4916, + "step": 37 + }, + { + "epoch": 0.002382594520032604, + "grad_norm": 0.9809804558753967, + "learning_rate": 1.5866388308977037e-06, + "loss": 2.5742, + "step": 38 + }, + { + "epoch": 0.0024452943758229355, + "grad_norm": 0.9884042739868164, + "learning_rate": 1.62839248434238e-06, + "loss": 2.4928, + "step": 39 + }, + { + "epoch": 0.002507994231613267, + "grad_norm": 0.9333252906799316, + "learning_rate": 1.6701461377870565e-06, + "loss": 2.4945, + "step": 40 + }, + { + "epoch": 0.002570694087403599, + "grad_norm": 1.21970534324646, + "learning_rate": 1.7118997912317328e-06, + "loss": 2.4117, + "step": 41 + }, + { + "epoch": 0.0026333939431939304, + "grad_norm": 0.9486119747161865, + "learning_rate": 1.7536534446764095e-06, + "loss": 2.2696, + "step": 42 + }, + { + "epoch": 0.0026960937989842625, + "grad_norm": 0.9357393383979797, + "learning_rate": 1.7954070981210858e-06, + "loss": 2.4683, + "step": 43 + }, + { + "epoch": 0.002758793654774594, + "grad_norm": 0.9758220314979553, + "learning_rate": 1.837160751565762e-06, + "loss": 2.5524, + "step": 44 + }, + { + "epoch": 0.002821493510564926, + "grad_norm": 1.0653648376464844, + "learning_rate": 1.8789144050104386e-06, + "loss": 2.5154, + "step": 45 + }, + { + "epoch": 0.0028841933663552575, + "grad_norm": 0.9236341118812561, + "learning_rate": 1.920668058455115e-06, + "loss": 2.3676, + "step": 46 + }, + { + "epoch": 0.002946893222145589, + "grad_norm": 1.2181960344314575, + "learning_rate": 1.9624217118997914e-06, + "loss": 2.4562, + "step": 47 + }, + { + "epoch": 0.003009593077935921, + "grad_norm": 1.040271282196045, + "learning_rate": 2.004175365344468e-06, + "loss": 2.3936, + "step": 48 + }, + { + "epoch": 0.0030722929337262525, + "grad_norm": 0.9469027519226074, + "learning_rate": 2.0459290187891444e-06, + "loss": 2.436, + "step": 49 + }, + { + "epoch": 0.003134992789516584, + "grad_norm": 0.98686283826828, + "learning_rate": 2.0876826722338207e-06, + "loss": 2.5175, + "step": 50 + }, + { + "epoch": 0.003197692645306916, + "grad_norm": 0.9429983496665955, + "learning_rate": 2.129436325678497e-06, + "loss": 2.4689, + "step": 51 + }, + { + "epoch": 0.0032603925010972474, + "grad_norm": 0.9861007928848267, + "learning_rate": 2.1711899791231737e-06, + "loss": 2.4554, + "step": 52 + }, + { + "epoch": 0.003323092356887579, + "grad_norm": 1.1185660362243652, + "learning_rate": 2.21294363256785e-06, + "loss": 2.6261, + "step": 53 + }, + { + "epoch": 0.0033857922126779108, + "grad_norm": 0.910841703414917, + "learning_rate": 2.2546972860125262e-06, + "loss": 2.4456, + "step": 54 + }, + { + "epoch": 0.0034484920684682424, + "grad_norm": 0.9684902429580688, + "learning_rate": 2.2964509394572025e-06, + "loss": 2.3945, + "step": 55 + }, + { + "epoch": 0.003511191924258574, + "grad_norm": 0.9845953583717346, + "learning_rate": 2.338204592901879e-06, + "loss": 2.428, + "step": 56 + }, + { + "epoch": 0.0035738917800489057, + "grad_norm": 0.976205050945282, + "learning_rate": 2.3799582463465555e-06, + "loss": 2.4692, + "step": 57 + }, + { + "epoch": 0.0036365916358392374, + "grad_norm": 1.0082242488861084, + "learning_rate": 2.421711899791232e-06, + "loss": 2.5059, + "step": 58 + }, + { + "epoch": 0.003699291491629569, + "grad_norm": 1.2093454599380493, + "learning_rate": 2.4634655532359085e-06, + "loss": 2.5259, + "step": 59 + }, + { + "epoch": 0.003761991347419901, + "grad_norm": 1.062522292137146, + "learning_rate": 2.505219206680585e-06, + "loss": 2.4599, + "step": 60 + }, + { + "epoch": 0.003824691203210233, + "grad_norm": 0.9775207042694092, + "learning_rate": 2.5469728601252615e-06, + "loss": 2.4225, + "step": 61 + }, + { + "epoch": 0.0038873910590005644, + "grad_norm": 1.0343270301818848, + "learning_rate": 2.5887265135699374e-06, + "loss": 2.5013, + "step": 62 + }, + { + "epoch": 0.003950090914790896, + "grad_norm": 1.0336889028549194, + "learning_rate": 2.630480167014614e-06, + "loss": 2.5241, + "step": 63 + }, + { + "epoch": 0.004012790770581228, + "grad_norm": 1.0296003818511963, + "learning_rate": 2.67223382045929e-06, + "loss": 2.4329, + "step": 64 + }, + { + "epoch": 0.004075490626371559, + "grad_norm": 1.0824295282363892, + "learning_rate": 2.7139874739039666e-06, + "loss": 2.372, + "step": 65 + }, + { + "epoch": 0.004138190482161891, + "grad_norm": 1.0267020463943481, + "learning_rate": 2.7557411273486434e-06, + "loss": 2.4339, + "step": 66 + }, + { + "epoch": 0.004200890337952222, + "grad_norm": 0.9898827075958252, + "learning_rate": 2.7974947807933196e-06, + "loss": 2.3381, + "step": 67 + }, + { + "epoch": 0.004263590193742554, + "grad_norm": 0.8969103097915649, + "learning_rate": 2.839248434237996e-06, + "loss": 2.3229, + "step": 68 + }, + { + "epoch": 0.0043262900495328865, + "grad_norm": 0.9571344256401062, + "learning_rate": 2.8810020876826722e-06, + "loss": 2.3976, + "step": 69 + }, + { + "epoch": 0.004388989905323218, + "grad_norm": 1.0572232007980347, + "learning_rate": 2.922755741127349e-06, + "loss": 2.2933, + "step": 70 + }, + { + "epoch": 0.00445168976111355, + "grad_norm": 0.9999732375144958, + "learning_rate": 2.9645093945720256e-06, + "loss": 2.2945, + "step": 71 + }, + { + "epoch": 0.004514389616903881, + "grad_norm": 0.9934488534927368, + "learning_rate": 3.0062630480167015e-06, + "loss": 2.3743, + "step": 72 + }, + { + "epoch": 0.004577089472694213, + "grad_norm": 0.974748432636261, + "learning_rate": 3.048016701461378e-06, + "loss": 2.3807, + "step": 73 + }, + { + "epoch": 0.004639789328484544, + "grad_norm": 0.9322019815444946, + "learning_rate": 3.089770354906054e-06, + "loss": 2.3503, + "step": 74 + }, + { + "epoch": 0.004702489184274876, + "grad_norm": 1.0754220485687256, + "learning_rate": 3.1315240083507308e-06, + "loss": 2.3963, + "step": 75 + }, + { + "epoch": 0.004765189040065208, + "grad_norm": 0.8638653755187988, + "learning_rate": 3.1732776617954075e-06, + "loss": 2.0859, + "step": 76 + }, + { + "epoch": 0.00482788889585554, + "grad_norm": 0.9467566013336182, + "learning_rate": 3.2150313152400838e-06, + "loss": 2.2798, + "step": 77 + }, + { + "epoch": 0.004890588751645871, + "grad_norm": 0.9386559724807739, + "learning_rate": 3.25678496868476e-06, + "loss": 2.3277, + "step": 78 + }, + { + "epoch": 0.004953288607436203, + "grad_norm": 1.0520833730697632, + "learning_rate": 3.2985386221294363e-06, + "loss": 2.2284, + "step": 79 + }, + { + "epoch": 0.005015988463226534, + "grad_norm": 0.999544084072113, + "learning_rate": 3.340292275574113e-06, + "loss": 2.3167, + "step": 80 + }, + { + "epoch": 0.005078688319016866, + "grad_norm": 1.0557825565338135, + "learning_rate": 3.3820459290187898e-06, + "loss": 2.4481, + "step": 81 + }, + { + "epoch": 0.005141388174807198, + "grad_norm": 1.0022861957550049, + "learning_rate": 3.4237995824634656e-06, + "loss": 2.289, + "step": 82 + }, + { + "epoch": 0.00520408803059753, + "grad_norm": 0.8973371386528015, + "learning_rate": 3.4655532359081423e-06, + "loss": 2.4027, + "step": 83 + }, + { + "epoch": 0.005266787886387861, + "grad_norm": 0.8840094208717346, + "learning_rate": 3.507306889352819e-06, + "loss": 2.2119, + "step": 84 + }, + { + "epoch": 0.005329487742178193, + "grad_norm": 1.0561867952346802, + "learning_rate": 3.549060542797495e-06, + "loss": 2.4136, + "step": 85 + }, + { + "epoch": 0.005392187597968525, + "grad_norm": 0.9274253845214844, + "learning_rate": 3.5908141962421716e-06, + "loss": 2.2111, + "step": 86 + }, + { + "epoch": 0.005454887453758856, + "grad_norm": 1.184488296508789, + "learning_rate": 3.632567849686848e-06, + "loss": 2.2843, + "step": 87 + }, + { + "epoch": 0.005517587309549188, + "grad_norm": 0.9451871514320374, + "learning_rate": 3.674321503131524e-06, + "loss": 2.3264, + "step": 88 + }, + { + "epoch": 0.00558028716533952, + "grad_norm": 1.0250643491744995, + "learning_rate": 3.716075156576201e-06, + "loss": 2.2453, + "step": 89 + }, + { + "epoch": 0.005642987021129852, + "grad_norm": 0.93082195520401, + "learning_rate": 3.757828810020877e-06, + "loss": 2.1852, + "step": 90 + }, + { + "epoch": 0.005705686876920183, + "grad_norm": 1.0534427165985107, + "learning_rate": 3.799582463465554e-06, + "loss": 2.3456, + "step": 91 + }, + { + "epoch": 0.005768386732710515, + "grad_norm": 0.9255128502845764, + "learning_rate": 3.84133611691023e-06, + "loss": 2.242, + "step": 92 + }, + { + "epoch": 0.005831086588500846, + "grad_norm": 1.0715028047561646, + "learning_rate": 3.883089770354906e-06, + "loss": 2.3232, + "step": 93 + }, + { + "epoch": 0.005893786444291178, + "grad_norm": 0.8962722420692444, + "learning_rate": 3.924843423799583e-06, + "loss": 2.1898, + "step": 94 + }, + { + "epoch": 0.0059564863000815095, + "grad_norm": 0.9496030211448669, + "learning_rate": 3.9665970772442595e-06, + "loss": 2.2958, + "step": 95 + }, + { + "epoch": 0.006019186155871842, + "grad_norm": 0.9951572418212891, + "learning_rate": 4.008350730688936e-06, + "loss": 2.1861, + "step": 96 + }, + { + "epoch": 0.006081886011662173, + "grad_norm": 1.0552579164505005, + "learning_rate": 4.050104384133612e-06, + "loss": 2.2548, + "step": 97 + }, + { + "epoch": 0.006144585867452505, + "grad_norm": 0.985287070274353, + "learning_rate": 4.091858037578289e-06, + "loss": 2.2435, + "step": 98 + }, + { + "epoch": 0.006207285723242836, + "grad_norm": 1.0381906032562256, + "learning_rate": 4.1336116910229655e-06, + "loss": 2.4271, + "step": 99 + }, + { + "epoch": 0.006269985579033168, + "grad_norm": 0.9367284178733826, + "learning_rate": 4.175365344467641e-06, + "loss": 2.165, + "step": 100 + }, + { + "epoch": 0.0063326854348234995, + "grad_norm": 1.0043895244598389, + "learning_rate": 4.217118997912318e-06, + "loss": 2.2083, + "step": 101 + }, + { + "epoch": 0.006395385290613832, + "grad_norm": 0.9036383628845215, + "learning_rate": 4.258872651356994e-06, + "loss": 2.2541, + "step": 102 + }, + { + "epoch": 0.006458085146404164, + "grad_norm": 1.0184564590454102, + "learning_rate": 4.300626304801671e-06, + "loss": 2.2005, + "step": 103 + }, + { + "epoch": 0.006520785002194495, + "grad_norm": 0.9729160666465759, + "learning_rate": 4.342379958246347e-06, + "loss": 2.2979, + "step": 104 + }, + { + "epoch": 0.006583484857984827, + "grad_norm": 1.03202486038208, + "learning_rate": 4.384133611691023e-06, + "loss": 2.4243, + "step": 105 + }, + { + "epoch": 0.006646184713775158, + "grad_norm": 1.0540050268173218, + "learning_rate": 4.4258872651357e-06, + "loss": 2.208, + "step": 106 + }, + { + "epoch": 0.00670888456956549, + "grad_norm": 0.9765122532844543, + "learning_rate": 4.467640918580376e-06, + "loss": 2.268, + "step": 107 + }, + { + "epoch": 0.0067715844253558215, + "grad_norm": 0.9441769123077393, + "learning_rate": 4.5093945720250525e-06, + "loss": 2.1327, + "step": 108 + }, + { + "epoch": 0.006834284281146154, + "grad_norm": 0.9583860039710999, + "learning_rate": 4.551148225469729e-06, + "loss": 2.1924, + "step": 109 + }, + { + "epoch": 0.006896984136936485, + "grad_norm": 0.9766218066215515, + "learning_rate": 4.592901878914405e-06, + "loss": 2.2658, + "step": 110 + }, + { + "epoch": 0.006959683992726817, + "grad_norm": 1.069108486175537, + "learning_rate": 4.634655532359082e-06, + "loss": 2.1737, + "step": 111 + }, + { + "epoch": 0.007022383848517148, + "grad_norm": 0.994482696056366, + "learning_rate": 4.676409185803758e-06, + "loss": 2.0852, + "step": 112 + }, + { + "epoch": 0.00708508370430748, + "grad_norm": 0.9729325175285339, + "learning_rate": 4.718162839248434e-06, + "loss": 2.1692, + "step": 113 + }, + { + "epoch": 0.0071477835600978114, + "grad_norm": 1.0518100261688232, + "learning_rate": 4.759916492693111e-06, + "loss": 2.3036, + "step": 114 + }, + { + "epoch": 0.0072104834158881435, + "grad_norm": 1.0110325813293457, + "learning_rate": 4.801670146137788e-06, + "loss": 2.3103, + "step": 115 + }, + { + "epoch": 0.007273183271678475, + "grad_norm": 1.1045182943344116, + "learning_rate": 4.843423799582464e-06, + "loss": 2.2672, + "step": 116 + }, + { + "epoch": 0.007335883127468807, + "grad_norm": 0.9378653764724731, + "learning_rate": 4.88517745302714e-06, + "loss": 2.0802, + "step": 117 + }, + { + "epoch": 0.007398582983259138, + "grad_norm": 1.0254353284835815, + "learning_rate": 4.926931106471817e-06, + "loss": 2.1875, + "step": 118 + }, + { + "epoch": 0.00746128283904947, + "grad_norm": 0.9633567333221436, + "learning_rate": 4.968684759916494e-06, + "loss": 2.2667, + "step": 119 + }, + { + "epoch": 0.007523982694839802, + "grad_norm": 0.9521273970603943, + "learning_rate": 5.01043841336117e-06, + "loss": 2.1304, + "step": 120 + }, + { + "epoch": 0.0075866825506301335, + "grad_norm": 1.2760639190673828, + "learning_rate": 5.0521920668058454e-06, + "loss": 2.1109, + "step": 121 + }, + { + "epoch": 0.007649382406420466, + "grad_norm": 1.1743707656860352, + "learning_rate": 5.093945720250523e-06, + "loss": 2.0993, + "step": 122 + }, + { + "epoch": 0.007712082262210797, + "grad_norm": 1.104156255722046, + "learning_rate": 5.135699373695199e-06, + "loss": 2.0756, + "step": 123 + }, + { + "epoch": 0.007774782118001129, + "grad_norm": 1.0778262615203857, + "learning_rate": 5.177453027139875e-06, + "loss": 2.1037, + "step": 124 + }, + { + "epoch": 0.007837481973791461, + "grad_norm": 0.9676424264907837, + "learning_rate": 5.219206680584552e-06, + "loss": 2.0932, + "step": 125 + }, + { + "epoch": 0.007900181829581791, + "grad_norm": 1.0520482063293457, + "learning_rate": 5.260960334029228e-06, + "loss": 2.0762, + "step": 126 + }, + { + "epoch": 0.007962881685372123, + "grad_norm": 0.9828984141349792, + "learning_rate": 5.302713987473904e-06, + "loss": 2.0143, + "step": 127 + }, + { + "epoch": 0.008025581541162456, + "grad_norm": 1.020370602607727, + "learning_rate": 5.34446764091858e-06, + "loss": 2.2145, + "step": 128 + }, + { + "epoch": 0.008088281396952788, + "grad_norm": 1.0402941703796387, + "learning_rate": 5.3862212943632574e-06, + "loss": 2.0247, + "step": 129 + }, + { + "epoch": 0.008150981252743118, + "grad_norm": 1.0229604244232178, + "learning_rate": 5.427974947807933e-06, + "loss": 2.1403, + "step": 130 + }, + { + "epoch": 0.00821368110853345, + "grad_norm": 1.0654464960098267, + "learning_rate": 5.46972860125261e-06, + "loss": 2.0039, + "step": 131 + }, + { + "epoch": 0.008276380964323782, + "grad_norm": 1.05459725856781, + "learning_rate": 5.511482254697287e-06, + "loss": 1.9616, + "step": 132 + }, + { + "epoch": 0.008339080820114114, + "grad_norm": 1.0279122591018677, + "learning_rate": 5.553235908141963e-06, + "loss": 2.1342, + "step": 133 + }, + { + "epoch": 0.008401780675904445, + "grad_norm": 1.017016887664795, + "learning_rate": 5.594989561586639e-06, + "loss": 2.0282, + "step": 134 + }, + { + "epoch": 0.008464480531694777, + "grad_norm": 1.20408296585083, + "learning_rate": 5.636743215031316e-06, + "loss": 2.1382, + "step": 135 + }, + { + "epoch": 0.008527180387485109, + "grad_norm": 1.0252665281295776, + "learning_rate": 5.678496868475992e-06, + "loss": 2.0855, + "step": 136 + }, + { + "epoch": 0.00858988024327544, + "grad_norm": 1.0058069229125977, + "learning_rate": 5.7202505219206686e-06, + "loss": 1.994, + "step": 137 + }, + { + "epoch": 0.008652580099065773, + "grad_norm": 1.1932650804519653, + "learning_rate": 5.7620041753653444e-06, + "loss": 2.096, + "step": 138 + }, + { + "epoch": 0.008715279954856103, + "grad_norm": 1.0368096828460693, + "learning_rate": 5.803757828810022e-06, + "loss": 2.1142, + "step": 139 + }, + { + "epoch": 0.008777979810646435, + "grad_norm": 1.2195768356323242, + "learning_rate": 5.845511482254698e-06, + "loss": 1.9273, + "step": 140 + }, + { + "epoch": 0.008840679666436767, + "grad_norm": 1.3561420440673828, + "learning_rate": 5.887265135699374e-06, + "loss": 1.9895, + "step": 141 + }, + { + "epoch": 0.0089033795222271, + "grad_norm": 0.9587535262107849, + "learning_rate": 5.929018789144051e-06, + "loss": 1.9432, + "step": 142 + }, + { + "epoch": 0.00896607937801743, + "grad_norm": 1.261263132095337, + "learning_rate": 5.970772442588727e-06, + "loss": 2.0848, + "step": 143 + }, + { + "epoch": 0.009028779233807762, + "grad_norm": 1.1552629470825195, + "learning_rate": 6.012526096033403e-06, + "loss": 1.9612, + "step": 144 + }, + { + "epoch": 0.009091479089598094, + "grad_norm": 1.1663166284561157, + "learning_rate": 6.0542797494780806e-06, + "loss": 1.9042, + "step": 145 + }, + { + "epoch": 0.009154178945388426, + "grad_norm": 1.1865906715393066, + "learning_rate": 6.096033402922756e-06, + "loss": 1.8924, + "step": 146 + }, + { + "epoch": 0.009216878801178757, + "grad_norm": 1.0863982439041138, + "learning_rate": 6.137787056367432e-06, + "loss": 2.1315, + "step": 147 + }, + { + "epoch": 0.009279578656969089, + "grad_norm": 1.0531682968139648, + "learning_rate": 6.179540709812108e-06, + "loss": 1.9931, + "step": 148 + }, + { + "epoch": 0.00934227851275942, + "grad_norm": 1.1481752395629883, + "learning_rate": 6.221294363256786e-06, + "loss": 1.7561, + "step": 149 + }, + { + "epoch": 0.009404978368549753, + "grad_norm": 1.0352803468704224, + "learning_rate": 6.2630480167014616e-06, + "loss": 2.0781, + "step": 150 + }, + { + "epoch": 0.009467678224340083, + "grad_norm": 1.1782400608062744, + "learning_rate": 6.304801670146138e-06, + "loss": 1.9191, + "step": 151 + }, + { + "epoch": 0.009530378080130415, + "grad_norm": 1.223947286605835, + "learning_rate": 6.346555323590815e-06, + "loss": 1.9274, + "step": 152 + }, + { + "epoch": 0.009593077935920747, + "grad_norm": 1.295857310295105, + "learning_rate": 6.388308977035491e-06, + "loss": 1.9514, + "step": 153 + }, + { + "epoch": 0.00965577779171108, + "grad_norm": 1.1061877012252808, + "learning_rate": 6.4300626304801676e-06, + "loss": 1.9201, + "step": 154 + }, + { + "epoch": 0.009718477647501412, + "grad_norm": 1.2246606349945068, + "learning_rate": 6.471816283924844e-06, + "loss": 1.7818, + "step": 155 + }, + { + "epoch": 0.009781177503291742, + "grad_norm": 1.1734474897384644, + "learning_rate": 6.51356993736952e-06, + "loss": 1.9321, + "step": 156 + }, + { + "epoch": 0.009843877359082074, + "grad_norm": 1.1356160640716553, + "learning_rate": 6.555323590814197e-06, + "loss": 2.0381, + "step": 157 + }, + { + "epoch": 0.009906577214872406, + "grad_norm": 1.3087940216064453, + "learning_rate": 6.597077244258873e-06, + "loss": 1.8519, + "step": 158 + }, + { + "epoch": 0.009969277070662738, + "grad_norm": 1.3664237260818481, + "learning_rate": 6.638830897703549e-06, + "loss": 1.7068, + "step": 159 + }, + { + "epoch": 0.010031976926453069, + "grad_norm": 1.206440806388855, + "learning_rate": 6.680584551148226e-06, + "loss": 1.7494, + "step": 160 + }, + { + "epoch": 0.0100946767822434, + "grad_norm": 1.311210036277771, + "learning_rate": 6.722338204592902e-06, + "loss": 1.9161, + "step": 161 + }, + { + "epoch": 0.010157376638033733, + "grad_norm": 1.177560806274414, + "learning_rate": 6.7640918580375795e-06, + "loss": 1.8291, + "step": 162 + }, + { + "epoch": 0.010220076493824065, + "grad_norm": 1.157193660736084, + "learning_rate": 6.805845511482255e-06, + "loss": 2.005, + "step": 163 + }, + { + "epoch": 0.010282776349614395, + "grad_norm": 1.212584137916565, + "learning_rate": 6.847599164926931e-06, + "loss": 1.9554, + "step": 164 + }, + { + "epoch": 0.010345476205404727, + "grad_norm": 1.1762878894805908, + "learning_rate": 6.889352818371609e-06, + "loss": 1.9557, + "step": 165 + }, + { + "epoch": 0.01040817606119506, + "grad_norm": 1.1320064067840576, + "learning_rate": 6.931106471816285e-06, + "loss": 1.7723, + "step": 166 + }, + { + "epoch": 0.010470875916985391, + "grad_norm": 1.396710753440857, + "learning_rate": 6.9728601252609605e-06, + "loss": 1.6212, + "step": 167 + }, + { + "epoch": 0.010533575772775722, + "grad_norm": 1.2250912189483643, + "learning_rate": 7.014613778705638e-06, + "loss": 1.9444, + "step": 168 + }, + { + "epoch": 0.010596275628566054, + "grad_norm": 1.2113361358642578, + "learning_rate": 7.056367432150314e-06, + "loss": 1.7804, + "step": 169 + }, + { + "epoch": 0.010658975484356386, + "grad_norm": 1.1860241889953613, + "learning_rate": 7.09812108559499e-06, + "loss": 1.8715, + "step": 170 + }, + { + "epoch": 0.010721675340146718, + "grad_norm": 1.2954696416854858, + "learning_rate": 7.139874739039666e-06, + "loss": 1.7197, + "step": 171 + }, + { + "epoch": 0.01078437519593705, + "grad_norm": 1.2316635847091675, + "learning_rate": 7.181628392484343e-06, + "loss": 1.9889, + "step": 172 + }, + { + "epoch": 0.01084707505172738, + "grad_norm": 1.1930004358291626, + "learning_rate": 7.223382045929019e-06, + "loss": 1.8838, + "step": 173 + }, + { + "epoch": 0.010909774907517713, + "grad_norm": 1.1981168985366821, + "learning_rate": 7.265135699373696e-06, + "loss": 1.8061, + "step": 174 + }, + { + "epoch": 0.010972474763308045, + "grad_norm": 1.180354118347168, + "learning_rate": 7.3068893528183725e-06, + "loss": 1.677, + "step": 175 + }, + { + "epoch": 0.011035174619098377, + "grad_norm": 1.212662935256958, + "learning_rate": 7.348643006263048e-06, + "loss": 1.8406, + "step": 176 + }, + { + "epoch": 0.011097874474888707, + "grad_norm": 1.2467939853668213, + "learning_rate": 7.390396659707725e-06, + "loss": 1.9419, + "step": 177 + }, + { + "epoch": 0.01116057433067904, + "grad_norm": 1.2484508752822876, + "learning_rate": 7.432150313152402e-06, + "loss": 1.7922, + "step": 178 + }, + { + "epoch": 0.011223274186469371, + "grad_norm": 1.379010796546936, + "learning_rate": 7.473903966597078e-06, + "loss": 1.7706, + "step": 179 + }, + { + "epoch": 0.011285974042259703, + "grad_norm": 1.3610695600509644, + "learning_rate": 7.515657620041754e-06, + "loss": 1.8213, + "step": 180 + }, + { + "epoch": 0.011348673898050034, + "grad_norm": 1.3827191591262817, + "learning_rate": 7.55741127348643e-06, + "loss": 1.9397, + "step": 181 + }, + { + "epoch": 0.011411373753840366, + "grad_norm": 1.3725078105926514, + "learning_rate": 7.599164926931108e-06, + "loss": 1.6847, + "step": 182 + }, + { + "epoch": 0.011474073609630698, + "grad_norm": 1.361005187034607, + "learning_rate": 7.640918580375784e-06, + "loss": 1.8681, + "step": 183 + }, + { + "epoch": 0.01153677346542103, + "grad_norm": 1.2094449996948242, + "learning_rate": 7.68267223382046e-06, + "loss": 1.8126, + "step": 184 + }, + { + "epoch": 0.01159947332121136, + "grad_norm": 1.3203554153442383, + "learning_rate": 7.724425887265137e-06, + "loss": 2.0338, + "step": 185 + }, + { + "epoch": 0.011662173177001692, + "grad_norm": 1.263249397277832, + "learning_rate": 7.766179540709812e-06, + "loss": 1.6861, + "step": 186 + }, + { + "epoch": 0.011724873032792025, + "grad_norm": 1.24921452999115, + "learning_rate": 7.807933194154489e-06, + "loss": 1.8486, + "step": 187 + }, + { + "epoch": 0.011787572888582357, + "grad_norm": 1.3264135122299194, + "learning_rate": 7.849686847599166e-06, + "loss": 1.854, + "step": 188 + }, + { + "epoch": 0.011850272744372689, + "grad_norm": 1.2219328880310059, + "learning_rate": 7.891440501043842e-06, + "loss": 1.651, + "step": 189 + }, + { + "epoch": 0.011912972600163019, + "grad_norm": 1.3236995935440063, + "learning_rate": 7.933194154488519e-06, + "loss": 1.7987, + "step": 190 + }, + { + "epoch": 0.011975672455953351, + "grad_norm": 1.3293006420135498, + "learning_rate": 7.974947807933194e-06, + "loss": 1.6247, + "step": 191 + }, + { + "epoch": 0.012038372311743683, + "grad_norm": 1.3775371313095093, + "learning_rate": 8.016701461377872e-06, + "loss": 1.6492, + "step": 192 + }, + { + "epoch": 0.012101072167534015, + "grad_norm": 1.2720634937286377, + "learning_rate": 8.058455114822547e-06, + "loss": 1.8926, + "step": 193 + }, + { + "epoch": 0.012163772023324346, + "grad_norm": 1.2621182203292847, + "learning_rate": 8.100208768267224e-06, + "loss": 1.765, + "step": 194 + }, + { + "epoch": 0.012226471879114678, + "grad_norm": 1.4317082166671753, + "learning_rate": 8.1419624217119e-06, + "loss": 1.7005, + "step": 195 + }, + { + "epoch": 0.01228917173490501, + "grad_norm": 1.344675064086914, + "learning_rate": 8.183716075156577e-06, + "loss": 1.8575, + "step": 196 + }, + { + "epoch": 0.012351871590695342, + "grad_norm": 1.5069423913955688, + "learning_rate": 8.225469728601253e-06, + "loss": 1.9589, + "step": 197 + }, + { + "epoch": 0.012414571446485672, + "grad_norm": 1.3297098875045776, + "learning_rate": 8.267223382045931e-06, + "loss": 1.6608, + "step": 198 + }, + { + "epoch": 0.012477271302276004, + "grad_norm": 1.322779893875122, + "learning_rate": 8.308977035490606e-06, + "loss": 1.7165, + "step": 199 + }, + { + "epoch": 0.012539971158066337, + "grad_norm": 1.4265215396881104, + "learning_rate": 8.350730688935283e-06, + "loss": 1.8151, + "step": 200 + }, + { + "epoch": 0.012602671013856669, + "grad_norm": 1.4575409889221191, + "learning_rate": 8.392484342379958e-06, + "loss": 1.4593, + "step": 201 + }, + { + "epoch": 0.012665370869646999, + "grad_norm": 1.343617558479309, + "learning_rate": 8.434237995824636e-06, + "loss": 1.912, + "step": 202 + }, + { + "epoch": 0.012728070725437331, + "grad_norm": 1.427098274230957, + "learning_rate": 8.475991649269311e-06, + "loss": 1.6602, + "step": 203 + }, + { + "epoch": 0.012790770581227663, + "grad_norm": 1.7326325178146362, + "learning_rate": 8.517745302713988e-06, + "loss": 1.6171, + "step": 204 + }, + { + "epoch": 0.012853470437017995, + "grad_norm": 1.3790652751922607, + "learning_rate": 8.559498956158664e-06, + "loss": 1.5782, + "step": 205 + }, + { + "epoch": 0.012916170292808327, + "grad_norm": 1.4717655181884766, + "learning_rate": 8.601252609603341e-06, + "loss": 1.7403, + "step": 206 + }, + { + "epoch": 0.012978870148598658, + "grad_norm": 1.64073646068573, + "learning_rate": 8.643006263048018e-06, + "loss": 1.7881, + "step": 207 + }, + { + "epoch": 0.01304157000438899, + "grad_norm": 1.490596055984497, + "learning_rate": 8.684759916492695e-06, + "loss": 1.8143, + "step": 208 + }, + { + "epoch": 0.013104269860179322, + "grad_norm": 1.5460245609283447, + "learning_rate": 8.72651356993737e-06, + "loss": 1.591, + "step": 209 + }, + { + "epoch": 0.013166969715969654, + "grad_norm": 1.5106841325759888, + "learning_rate": 8.768267223382046e-06, + "loss": 1.6798, + "step": 210 + }, + { + "epoch": 0.013229669571759984, + "grad_norm": 1.4770811796188354, + "learning_rate": 8.810020876826723e-06, + "loss": 1.509, + "step": 211 + }, + { + "epoch": 0.013292369427550316, + "grad_norm": 1.3062046766281128, + "learning_rate": 8.8517745302714e-06, + "loss": 1.7718, + "step": 212 + }, + { + "epoch": 0.013355069283340648, + "grad_norm": 1.3659995794296265, + "learning_rate": 8.893528183716076e-06, + "loss": 1.7344, + "step": 213 + }, + { + "epoch": 0.01341776913913098, + "grad_norm": 1.333016276359558, + "learning_rate": 8.935281837160751e-06, + "loss": 1.5113, + "step": 214 + }, + { + "epoch": 0.013480468994921311, + "grad_norm": 1.408619999885559, + "learning_rate": 8.97703549060543e-06, + "loss": 1.8282, + "step": 215 + }, + { + "epoch": 0.013543168850711643, + "grad_norm": 1.2167294025421143, + "learning_rate": 9.018789144050105e-06, + "loss": 1.6203, + "step": 216 + }, + { + "epoch": 0.013605868706501975, + "grad_norm": 1.36732816696167, + "learning_rate": 9.060542797494782e-06, + "loss": 1.7549, + "step": 217 + }, + { + "epoch": 0.013668568562292307, + "grad_norm": 1.5483906269073486, + "learning_rate": 9.102296450939458e-06, + "loss": 1.8476, + "step": 218 + }, + { + "epoch": 0.013731268418082638, + "grad_norm": 1.6553313732147217, + "learning_rate": 9.144050104384135e-06, + "loss": 1.6852, + "step": 219 + }, + { + "epoch": 0.01379396827387297, + "grad_norm": 1.476178526878357, + "learning_rate": 9.18580375782881e-06, + "loss": 1.7519, + "step": 220 + }, + { + "epoch": 0.013856668129663302, + "grad_norm": 1.599279522895813, + "learning_rate": 9.227557411273487e-06, + "loss": 1.6042, + "step": 221 + }, + { + "epoch": 0.013919367985453634, + "grad_norm": 1.8371257781982422, + "learning_rate": 9.269311064718163e-06, + "loss": 1.5535, + "step": 222 + }, + { + "epoch": 0.013982067841243966, + "grad_norm": 1.5733819007873535, + "learning_rate": 9.31106471816284e-06, + "loss": 1.4482, + "step": 223 + }, + { + "epoch": 0.014044767697034296, + "grad_norm": 1.7122869491577148, + "learning_rate": 9.352818371607515e-06, + "loss": 1.4362, + "step": 224 + }, + { + "epoch": 0.014107467552824628, + "grad_norm": 1.6643214225769043, + "learning_rate": 9.394572025052194e-06, + "loss": 1.7065, + "step": 225 + }, + { + "epoch": 0.01417016740861496, + "grad_norm": 1.5319055318832397, + "learning_rate": 9.436325678496869e-06, + "loss": 1.6328, + "step": 226 + }, + { + "epoch": 0.014232867264405293, + "grad_norm": 1.43195378780365, + "learning_rate": 9.478079331941545e-06, + "loss": 1.6536, + "step": 227 + }, + { + "epoch": 0.014295567120195623, + "grad_norm": 1.3615461587905884, + "learning_rate": 9.519832985386222e-06, + "loss": 1.8153, + "step": 228 + }, + { + "epoch": 0.014358266975985955, + "grad_norm": 1.499516248703003, + "learning_rate": 9.561586638830899e-06, + "loss": 1.8518, + "step": 229 + }, + { + "epoch": 0.014420966831776287, + "grad_norm": 1.3591948747634888, + "learning_rate": 9.603340292275575e-06, + "loss": 1.7543, + "step": 230 + }, + { + "epoch": 0.01448366668756662, + "grad_norm": 1.5437679290771484, + "learning_rate": 9.64509394572025e-06, + "loss": 1.7262, + "step": 231 + }, + { + "epoch": 0.01454636654335695, + "grad_norm": 1.5946369171142578, + "learning_rate": 9.686847599164927e-06, + "loss": 1.6421, + "step": 232 + }, + { + "epoch": 0.014609066399147282, + "grad_norm": 1.3398301601409912, + "learning_rate": 9.728601252609604e-06, + "loss": 1.7401, + "step": 233 + }, + { + "epoch": 0.014671766254937614, + "grad_norm": 1.7036612033843994, + "learning_rate": 9.77035490605428e-06, + "loss": 1.539, + "step": 234 + }, + { + "epoch": 0.014734466110727946, + "grad_norm": 1.5460608005523682, + "learning_rate": 9.812108559498957e-06, + "loss": 1.6491, + "step": 235 + }, + { + "epoch": 0.014797165966518276, + "grad_norm": 1.6770840883255005, + "learning_rate": 9.853862212943634e-06, + "loss": 1.5587, + "step": 236 + }, + { + "epoch": 0.014859865822308608, + "grad_norm": 1.6569596529006958, + "learning_rate": 9.895615866388309e-06, + "loss": 1.8544, + "step": 237 + }, + { + "epoch": 0.01492256567809894, + "grad_norm": 1.7661346197128296, + "learning_rate": 9.937369519832987e-06, + "loss": 1.7034, + "step": 238 + }, + { + "epoch": 0.014985265533889272, + "grad_norm": 1.5679106712341309, + "learning_rate": 9.979123173277662e-06, + "loss": 1.6614, + "step": 239 + }, + { + "epoch": 0.015047965389679605, + "grad_norm": 1.5936975479125977, + "learning_rate": 1.002087682672234e-05, + "loss": 1.7205, + "step": 240 + }, + { + "epoch": 0.015110665245469935, + "grad_norm": 1.5638277530670166, + "learning_rate": 1.0062630480167014e-05, + "loss": 1.7471, + "step": 241 + }, + { + "epoch": 0.015173365101260267, + "grad_norm": 1.638043761253357, + "learning_rate": 1.0104384133611691e-05, + "loss": 1.7141, + "step": 242 + }, + { + "epoch": 0.015236064957050599, + "grad_norm": 1.623812198638916, + "learning_rate": 1.014613778705637e-05, + "loss": 1.7364, + "step": 243 + }, + { + "epoch": 0.015298764812840931, + "grad_norm": 1.4750950336456299, + "learning_rate": 1.0187891440501046e-05, + "loss": 1.5104, + "step": 244 + }, + { + "epoch": 0.015361464668631261, + "grad_norm": 1.5466961860656738, + "learning_rate": 1.0229645093945721e-05, + "loss": 1.6894, + "step": 245 + }, + { + "epoch": 0.015424164524421594, + "grad_norm": 1.5613946914672852, + "learning_rate": 1.0271398747390398e-05, + "loss": 1.6228, + "step": 246 + }, + { + "epoch": 0.015486864380211926, + "grad_norm": 1.4508006572723389, + "learning_rate": 1.0313152400835074e-05, + "loss": 1.771, + "step": 247 + }, + { + "epoch": 0.015549564236002258, + "grad_norm": 1.4288867712020874, + "learning_rate": 1.035490605427975e-05, + "loss": 1.7569, + "step": 248 + }, + { + "epoch": 0.015612264091792588, + "grad_norm": 1.5802481174468994, + "learning_rate": 1.0396659707724426e-05, + "loss": 1.6978, + "step": 249 + }, + { + "epoch": 0.015674963947582922, + "grad_norm": 1.6312034130096436, + "learning_rate": 1.0438413361169105e-05, + "loss": 1.7688, + "step": 250 + }, + { + "epoch": 0.015737663803373252, + "grad_norm": 1.6179579496383667, + "learning_rate": 1.048016701461378e-05, + "loss": 1.6194, + "step": 251 + }, + { + "epoch": 0.015800363659163583, + "grad_norm": 1.542527198791504, + "learning_rate": 1.0521920668058456e-05, + "loss": 1.6603, + "step": 252 + }, + { + "epoch": 0.015863063514953916, + "grad_norm": 1.664562463760376, + "learning_rate": 1.0563674321503133e-05, + "loss": 1.6121, + "step": 253 + }, + { + "epoch": 0.015925763370744247, + "grad_norm": 1.6250574588775635, + "learning_rate": 1.0605427974947808e-05, + "loss": 1.8352, + "step": 254 + }, + { + "epoch": 0.01598846322653458, + "grad_norm": 1.6226924657821655, + "learning_rate": 1.0647181628392485e-05, + "loss": 1.546, + "step": 255 + }, + { + "epoch": 0.01605116308232491, + "grad_norm": 1.5476453304290771, + "learning_rate": 1.068893528183716e-05, + "loss": 1.7095, + "step": 256 + }, + { + "epoch": 0.01611386293811524, + "grad_norm": 1.4193332195281982, + "learning_rate": 1.0730688935281838e-05, + "loss": 1.7408, + "step": 257 + }, + { + "epoch": 0.016176562793905575, + "grad_norm": 1.8587031364440918, + "learning_rate": 1.0772442588726515e-05, + "loss": 1.5404, + "step": 258 + }, + { + "epoch": 0.016239262649695906, + "grad_norm": 1.5988738536834717, + "learning_rate": 1.0814196242171192e-05, + "loss": 1.5854, + "step": 259 + }, + { + "epoch": 0.016301962505486236, + "grad_norm": 1.5160491466522217, + "learning_rate": 1.0855949895615867e-05, + "loss": 1.6675, + "step": 260 + }, + { + "epoch": 0.01636466236127657, + "grad_norm": 1.6182063817977905, + "learning_rate": 1.0897703549060543e-05, + "loss": 1.726, + "step": 261 + }, + { + "epoch": 0.0164273622170669, + "grad_norm": 1.5410940647125244, + "learning_rate": 1.093945720250522e-05, + "loss": 1.6274, + "step": 262 + }, + { + "epoch": 0.016490062072857234, + "grad_norm": 1.4704010486602783, + "learning_rate": 1.0981210855949897e-05, + "loss": 1.5971, + "step": 263 + }, + { + "epoch": 0.016552761928647564, + "grad_norm": 1.6385776996612549, + "learning_rate": 1.1022964509394573e-05, + "loss": 1.7429, + "step": 264 + }, + { + "epoch": 0.016615461784437895, + "grad_norm": 1.597712755203247, + "learning_rate": 1.106471816283925e-05, + "loss": 1.5628, + "step": 265 + }, + { + "epoch": 0.01667816164022823, + "grad_norm": 1.5227406024932861, + "learning_rate": 1.1106471816283925e-05, + "loss": 1.6555, + "step": 266 + }, + { + "epoch": 0.01674086149601856, + "grad_norm": 1.623064398765564, + "learning_rate": 1.1148225469728602e-05, + "loss": 1.4613, + "step": 267 + }, + { + "epoch": 0.01680356135180889, + "grad_norm": 1.9817156791687012, + "learning_rate": 1.1189979123173279e-05, + "loss": 1.3833, + "step": 268 + }, + { + "epoch": 0.016866261207599223, + "grad_norm": 1.8802618980407715, + "learning_rate": 1.1231732776617954e-05, + "loss": 1.5041, + "step": 269 + }, + { + "epoch": 0.016928961063389553, + "grad_norm": 2.020453691482544, + "learning_rate": 1.1273486430062632e-05, + "loss": 1.7043, + "step": 270 + }, + { + "epoch": 0.016991660919179887, + "grad_norm": 1.4996933937072754, + "learning_rate": 1.1315240083507309e-05, + "loss": 1.6218, + "step": 271 + }, + { + "epoch": 0.017054360774970218, + "grad_norm": 1.684226393699646, + "learning_rate": 1.1356993736951984e-05, + "loss": 1.8224, + "step": 272 + }, + { + "epoch": 0.017117060630760548, + "grad_norm": 1.7319085597991943, + "learning_rate": 1.139874739039666e-05, + "loss": 1.7156, + "step": 273 + }, + { + "epoch": 0.01717976048655088, + "grad_norm": 1.7185893058776855, + "learning_rate": 1.1440501043841337e-05, + "loss": 1.6405, + "step": 274 + }, + { + "epoch": 0.017242460342341212, + "grad_norm": 1.5740883350372314, + "learning_rate": 1.1482254697286012e-05, + "loss": 1.7371, + "step": 275 + }, + { + "epoch": 0.017305160198131546, + "grad_norm": 1.492624282836914, + "learning_rate": 1.1524008350730689e-05, + "loss": 1.6634, + "step": 276 + }, + { + "epoch": 0.017367860053921876, + "grad_norm": 1.8643437623977661, + "learning_rate": 1.1565762004175367e-05, + "loss": 1.5118, + "step": 277 + }, + { + "epoch": 0.017430559909712207, + "grad_norm": 1.532167911529541, + "learning_rate": 1.1607515657620044e-05, + "loss": 1.6077, + "step": 278 + }, + { + "epoch": 0.01749325976550254, + "grad_norm": 1.8116816282272339, + "learning_rate": 1.1649269311064719e-05, + "loss": 1.4991, + "step": 279 + }, + { + "epoch": 0.01755595962129287, + "grad_norm": 1.7584807872772217, + "learning_rate": 1.1691022964509396e-05, + "loss": 1.559, + "step": 280 + }, + { + "epoch": 0.0176186594770832, + "grad_norm": 1.8566431999206543, + "learning_rate": 1.173277661795407e-05, + "loss": 1.579, + "step": 281 + }, + { + "epoch": 0.017681359332873535, + "grad_norm": 2.157989025115967, + "learning_rate": 1.1774530271398747e-05, + "loss": 1.5152, + "step": 282 + }, + { + "epoch": 0.017744059188663865, + "grad_norm": 1.6349374055862427, + "learning_rate": 1.1816283924843426e-05, + "loss": 1.6336, + "step": 283 + }, + { + "epoch": 0.0178067590444542, + "grad_norm": 1.7327972650527954, + "learning_rate": 1.1858037578288103e-05, + "loss": 1.6437, + "step": 284 + }, + { + "epoch": 0.01786945890024453, + "grad_norm": 1.8810933828353882, + "learning_rate": 1.1899791231732778e-05, + "loss": 1.7612, + "step": 285 + }, + { + "epoch": 0.01793215875603486, + "grad_norm": 1.8906854391098022, + "learning_rate": 1.1941544885177454e-05, + "loss": 1.7508, + "step": 286 + }, + { + "epoch": 0.017994858611825194, + "grad_norm": 2.0000662803649902, + "learning_rate": 1.198329853862213e-05, + "loss": 1.383, + "step": 287 + }, + { + "epoch": 0.018057558467615524, + "grad_norm": 1.8455754518508911, + "learning_rate": 1.2025052192066806e-05, + "loss": 1.6466, + "step": 288 + }, + { + "epoch": 0.018120258323405858, + "grad_norm": 1.8509551286697388, + "learning_rate": 1.2066805845511483e-05, + "loss": 1.8247, + "step": 289 + }, + { + "epoch": 0.018182958179196188, + "grad_norm": 1.650930404663086, + "learning_rate": 1.2108559498956161e-05, + "loss": 1.6039, + "step": 290 + }, + { + "epoch": 0.01824565803498652, + "grad_norm": 1.9533010721206665, + "learning_rate": 1.2150313152400836e-05, + "loss": 1.5656, + "step": 291 + }, + { + "epoch": 0.018308357890776852, + "grad_norm": 1.860772728919983, + "learning_rate": 1.2192066805845513e-05, + "loss": 1.7005, + "step": 292 + }, + { + "epoch": 0.018371057746567183, + "grad_norm": 1.6571053266525269, + "learning_rate": 1.223382045929019e-05, + "loss": 1.374, + "step": 293 + }, + { + "epoch": 0.018433757602357513, + "grad_norm": 1.7327181100845337, + "learning_rate": 1.2275574112734865e-05, + "loss": 1.5988, + "step": 294 + }, + { + "epoch": 0.018496457458147847, + "grad_norm": 1.67135488986969, + "learning_rate": 1.2317327766179541e-05, + "loss": 1.6969, + "step": 295 + }, + { + "epoch": 0.018559157313938177, + "grad_norm": 1.8460007905960083, + "learning_rate": 1.2359081419624216e-05, + "loss": 1.4091, + "step": 296 + }, + { + "epoch": 0.01862185716972851, + "grad_norm": 1.5971873998641968, + "learning_rate": 1.2400835073068895e-05, + "loss": 1.515, + "step": 297 + }, + { + "epoch": 0.01868455702551884, + "grad_norm": 1.6943672895431519, + "learning_rate": 1.2442588726513571e-05, + "loss": 1.6694, + "step": 298 + }, + { + "epoch": 0.018747256881309172, + "grad_norm": 1.872808814048767, + "learning_rate": 1.2484342379958248e-05, + "loss": 1.454, + "step": 299 + }, + { + "epoch": 0.018809956737099506, + "grad_norm": 1.5459387302398682, + "learning_rate": 1.2526096033402923e-05, + "loss": 1.6713, + "step": 300 + }, + { + "epoch": 0.018872656592889836, + "grad_norm": 1.7468551397323608, + "learning_rate": 1.25678496868476e-05, + "loss": 1.538, + "step": 301 + }, + { + "epoch": 0.018935356448680166, + "grad_norm": 1.6663907766342163, + "learning_rate": 1.2609603340292277e-05, + "loss": 1.7065, + "step": 302 + }, + { + "epoch": 0.0189980563044705, + "grad_norm": 1.7575440406799316, + "learning_rate": 1.2651356993736953e-05, + "loss": 1.6628, + "step": 303 + }, + { + "epoch": 0.01906075616026083, + "grad_norm": 1.4012240171432495, + "learning_rate": 1.269311064718163e-05, + "loss": 1.6163, + "step": 304 + }, + { + "epoch": 0.019123456016051164, + "grad_norm": 1.6922297477722168, + "learning_rate": 1.2734864300626307e-05, + "loss": 1.7434, + "step": 305 + }, + { + "epoch": 0.019186155871841495, + "grad_norm": 1.9017504453659058, + "learning_rate": 1.2776617954070982e-05, + "loss": 1.7474, + "step": 306 + }, + { + "epoch": 0.019248855727631825, + "grad_norm": 2.0925979614257812, + "learning_rate": 1.2818371607515658e-05, + "loss": 1.5475, + "step": 307 + }, + { + "epoch": 0.01931155558342216, + "grad_norm": 1.6217782497406006, + "learning_rate": 1.2860125260960335e-05, + "loss": 1.5416, + "step": 308 + }, + { + "epoch": 0.01937425543921249, + "grad_norm": 1.9575340747833252, + "learning_rate": 1.290187891440501e-05, + "loss": 1.5867, + "step": 309 + }, + { + "epoch": 0.019436955295002823, + "grad_norm": 2.3139469623565674, + "learning_rate": 1.2943632567849689e-05, + "loss": 1.4454, + "step": 310 + }, + { + "epoch": 0.019499655150793153, + "grad_norm": 1.8500087261199951, + "learning_rate": 1.2985386221294365e-05, + "loss": 1.5465, + "step": 311 + }, + { + "epoch": 0.019562355006583484, + "grad_norm": 1.8941069841384888, + "learning_rate": 1.302713987473904e-05, + "loss": 1.3009, + "step": 312 + }, + { + "epoch": 0.019625054862373818, + "grad_norm": 1.999627947807312, + "learning_rate": 1.3068893528183717e-05, + "loss": 1.6057, + "step": 313 + }, + { + "epoch": 0.019687754718164148, + "grad_norm": 1.941070318222046, + "learning_rate": 1.3110647181628394e-05, + "loss": 1.6589, + "step": 314 + }, + { + "epoch": 0.01975045457395448, + "grad_norm": 1.9182966947555542, + "learning_rate": 1.3152400835073069e-05, + "loss": 1.5471, + "step": 315 + }, + { + "epoch": 0.019813154429744812, + "grad_norm": 1.8325494527816772, + "learning_rate": 1.3194154488517745e-05, + "loss": 1.6538, + "step": 316 + }, + { + "epoch": 0.019875854285535142, + "grad_norm": 2.179591178894043, + "learning_rate": 1.3235908141962424e-05, + "loss": 1.5045, + "step": 317 + }, + { + "epoch": 0.019938554141325476, + "grad_norm": 1.911596655845642, + "learning_rate": 1.3277661795407099e-05, + "loss": 1.593, + "step": 318 + }, + { + "epoch": 0.020001253997115807, + "grad_norm": 2.0274112224578857, + "learning_rate": 1.3319415448851776e-05, + "loss": 1.6429, + "step": 319 + }, + { + "epoch": 0.020063953852906137, + "grad_norm": 1.9693150520324707, + "learning_rate": 1.3361169102296452e-05, + "loss": 1.5013, + "step": 320 + }, + { + "epoch": 0.02012665370869647, + "grad_norm": 1.9491040706634521, + "learning_rate": 1.3402922755741127e-05, + "loss": 1.5154, + "step": 321 + }, + { + "epoch": 0.0201893535644868, + "grad_norm": 1.7042237520217896, + "learning_rate": 1.3444676409185804e-05, + "loss": 1.7131, + "step": 322 + }, + { + "epoch": 0.020252053420277135, + "grad_norm": 1.6741865873336792, + "learning_rate": 1.3486430062630482e-05, + "loss": 1.5162, + "step": 323 + }, + { + "epoch": 0.020314753276067465, + "grad_norm": 1.8741329908370972, + "learning_rate": 1.3528183716075159e-05, + "loss": 1.6085, + "step": 324 + }, + { + "epoch": 0.020377453131857796, + "grad_norm": 1.7532190084457397, + "learning_rate": 1.3569937369519834e-05, + "loss": 1.6195, + "step": 325 + }, + { + "epoch": 0.02044015298764813, + "grad_norm": 1.925694465637207, + "learning_rate": 1.361169102296451e-05, + "loss": 1.5423, + "step": 326 + }, + { + "epoch": 0.02050285284343846, + "grad_norm": 1.8379743099212646, + "learning_rate": 1.3653444676409186e-05, + "loss": 1.5022, + "step": 327 + }, + { + "epoch": 0.02056555269922879, + "grad_norm": 1.8483569622039795, + "learning_rate": 1.3695198329853863e-05, + "loss": 1.5112, + "step": 328 + }, + { + "epoch": 0.020628252555019124, + "grad_norm": 1.7630701065063477, + "learning_rate": 1.373695198329854e-05, + "loss": 1.6864, + "step": 329 + }, + { + "epoch": 0.020690952410809454, + "grad_norm": 1.8686586618423462, + "learning_rate": 1.3778705636743218e-05, + "loss": 1.6727, + "step": 330 + }, + { + "epoch": 0.02075365226659979, + "grad_norm": 1.8003140687942505, + "learning_rate": 1.3820459290187893e-05, + "loss": 1.5201, + "step": 331 + }, + { + "epoch": 0.02081635212239012, + "grad_norm": 1.7014751434326172, + "learning_rate": 1.386221294363257e-05, + "loss": 1.6241, + "step": 332 + }, + { + "epoch": 0.02087905197818045, + "grad_norm": 1.8168505430221558, + "learning_rate": 1.3903966597077246e-05, + "loss": 1.6343, + "step": 333 + }, + { + "epoch": 0.020941751833970783, + "grad_norm": 1.8605718612670898, + "learning_rate": 1.3945720250521921e-05, + "loss": 1.6501, + "step": 334 + }, + { + "epoch": 0.021004451689761113, + "grad_norm": 2.1046152114868164, + "learning_rate": 1.3987473903966598e-05, + "loss": 1.5323, + "step": 335 + }, + { + "epoch": 0.021067151545551444, + "grad_norm": 2.1209912300109863, + "learning_rate": 1.4029227557411276e-05, + "loss": 1.5148, + "step": 336 + }, + { + "epoch": 0.021129851401341777, + "grad_norm": 2.2453677654266357, + "learning_rate": 1.4070981210855951e-05, + "loss": 1.3033, + "step": 337 + }, + { + "epoch": 0.021192551257132108, + "grad_norm": 2.1279962062835693, + "learning_rate": 1.4112734864300628e-05, + "loss": 1.6883, + "step": 338 + }, + { + "epoch": 0.02125525111292244, + "grad_norm": 1.96370530128479, + "learning_rate": 1.4154488517745305e-05, + "loss": 1.4324, + "step": 339 + }, + { + "epoch": 0.021317950968712772, + "grad_norm": 2.0340590476989746, + "learning_rate": 1.419624217118998e-05, + "loss": 1.3692, + "step": 340 + }, + { + "epoch": 0.021380650824503102, + "grad_norm": 1.7325291633605957, + "learning_rate": 1.4237995824634656e-05, + "loss": 1.6261, + "step": 341 + }, + { + "epoch": 0.021443350680293436, + "grad_norm": 2.250493049621582, + "learning_rate": 1.4279749478079331e-05, + "loss": 1.6097, + "step": 342 + }, + { + "epoch": 0.021506050536083766, + "grad_norm": 2.4427640438079834, + "learning_rate": 1.432150313152401e-05, + "loss": 1.4015, + "step": 343 + }, + { + "epoch": 0.0215687503918741, + "grad_norm": 1.9867295026779175, + "learning_rate": 1.4363256784968686e-05, + "loss": 1.4347, + "step": 344 + }, + { + "epoch": 0.02163145024766443, + "grad_norm": 2.1825671195983887, + "learning_rate": 1.4405010438413363e-05, + "loss": 1.382, + "step": 345 + }, + { + "epoch": 0.02169415010345476, + "grad_norm": 2.197808027267456, + "learning_rate": 1.4446764091858038e-05, + "loss": 1.396, + "step": 346 + }, + { + "epoch": 0.021756849959245095, + "grad_norm": 1.767224907875061, + "learning_rate": 1.4488517745302715e-05, + "loss": 1.6123, + "step": 347 + }, + { + "epoch": 0.021819549815035425, + "grad_norm": 2.1484487056732178, + "learning_rate": 1.4530271398747392e-05, + "loss": 1.2907, + "step": 348 + }, + { + "epoch": 0.021882249670825756, + "grad_norm": 3.3351798057556152, + "learning_rate": 1.4572025052192067e-05, + "loss": 1.5678, + "step": 349 + }, + { + "epoch": 0.02194494952661609, + "grad_norm": 1.904037356376648, + "learning_rate": 1.4613778705636745e-05, + "loss": 1.4607, + "step": 350 + }, + { + "epoch": 0.02200764938240642, + "grad_norm": 2.1238811016082764, + "learning_rate": 1.4655532359081422e-05, + "loss": 1.5448, + "step": 351 + }, + { + "epoch": 0.022070349238196754, + "grad_norm": 2.1285881996154785, + "learning_rate": 1.4697286012526097e-05, + "loss": 1.5984, + "step": 352 + }, + { + "epoch": 0.022133049093987084, + "grad_norm": 1.893250584602356, + "learning_rate": 1.4739039665970773e-05, + "loss": 1.6551, + "step": 353 + }, + { + "epoch": 0.022195748949777414, + "grad_norm": 2.035120964050293, + "learning_rate": 1.478079331941545e-05, + "loss": 1.4023, + "step": 354 + }, + { + "epoch": 0.022258448805567748, + "grad_norm": 1.7584789991378784, + "learning_rate": 1.4822546972860125e-05, + "loss": 1.5845, + "step": 355 + }, + { + "epoch": 0.02232114866135808, + "grad_norm": 2.0815346240997314, + "learning_rate": 1.4864300626304804e-05, + "loss": 1.4014, + "step": 356 + }, + { + "epoch": 0.022383848517148412, + "grad_norm": 2.1335716247558594, + "learning_rate": 1.490605427974948e-05, + "loss": 1.5758, + "step": 357 + }, + { + "epoch": 0.022446548372938743, + "grad_norm": 2.0626659393310547, + "learning_rate": 1.4947807933194155e-05, + "loss": 1.6037, + "step": 358 + }, + { + "epoch": 0.022509248228729073, + "grad_norm": 2.0992183685302734, + "learning_rate": 1.4989561586638832e-05, + "loss": 1.5395, + "step": 359 + }, + { + "epoch": 0.022571948084519407, + "grad_norm": 2.0195586681365967, + "learning_rate": 1.5031315240083509e-05, + "loss": 1.5025, + "step": 360 + }, + { + "epoch": 0.022634647940309737, + "grad_norm": 1.987336277961731, + "learning_rate": 1.5073068893528184e-05, + "loss": 1.39, + "step": 361 + }, + { + "epoch": 0.022697347796100067, + "grad_norm": 1.8955408334732056, + "learning_rate": 1.511482254697286e-05, + "loss": 1.4405, + "step": 362 + }, + { + "epoch": 0.0227600476518904, + "grad_norm": 2.053431272506714, + "learning_rate": 1.5156576200417539e-05, + "loss": 1.6595, + "step": 363 + }, + { + "epoch": 0.02282274750768073, + "grad_norm": 2.4586710929870605, + "learning_rate": 1.5198329853862216e-05, + "loss": 1.2722, + "step": 364 + }, + { + "epoch": 0.022885447363471065, + "grad_norm": 1.953789472579956, + "learning_rate": 1.524008350730689e-05, + "loss": 1.5727, + "step": 365 + }, + { + "epoch": 0.022948147219261396, + "grad_norm": 2.251375913619995, + "learning_rate": 1.5281837160751567e-05, + "loss": 1.3974, + "step": 366 + }, + { + "epoch": 0.023010847075051726, + "grad_norm": 1.8354485034942627, + "learning_rate": 1.5323590814196244e-05, + "loss": 1.6466, + "step": 367 + }, + { + "epoch": 0.02307354693084206, + "grad_norm": 1.8804957866668701, + "learning_rate": 1.536534446764092e-05, + "loss": 1.5206, + "step": 368 + }, + { + "epoch": 0.02313624678663239, + "grad_norm": 2.1873042583465576, + "learning_rate": 1.5407098121085594e-05, + "loss": 1.5332, + "step": 369 + }, + { + "epoch": 0.02319894664242272, + "grad_norm": 1.797953486442566, + "learning_rate": 1.5448851774530274e-05, + "loss": 1.4437, + "step": 370 + }, + { + "epoch": 0.023261646498213055, + "grad_norm": 2.642439365386963, + "learning_rate": 1.549060542797495e-05, + "loss": 1.3947, + "step": 371 + }, + { + "epoch": 0.023324346354003385, + "grad_norm": 2.2403194904327393, + "learning_rate": 1.5532359081419624e-05, + "loss": 1.4972, + "step": 372 + }, + { + "epoch": 0.02338704620979372, + "grad_norm": 1.9281260967254639, + "learning_rate": 1.55741127348643e-05, + "loss": 1.6348, + "step": 373 + }, + { + "epoch": 0.02344974606558405, + "grad_norm": 2.2053444385528564, + "learning_rate": 1.5615866388308978e-05, + "loss": 1.4554, + "step": 374 + }, + { + "epoch": 0.02351244592137438, + "grad_norm": 2.0307257175445557, + "learning_rate": 1.5657620041753654e-05, + "loss": 1.5388, + "step": 375 + }, + { + "epoch": 0.023575145777164713, + "grad_norm": 2.2661986351013184, + "learning_rate": 1.569937369519833e-05, + "loss": 1.4504, + "step": 376 + }, + { + "epoch": 0.023637845632955044, + "grad_norm": 1.9572361707687378, + "learning_rate": 1.5741127348643008e-05, + "loss": 1.4571, + "step": 377 + }, + { + "epoch": 0.023700545488745377, + "grad_norm": 2.217830181121826, + "learning_rate": 1.5782881002087684e-05, + "loss": 1.4405, + "step": 378 + }, + { + "epoch": 0.023763245344535708, + "grad_norm": 2.0943470001220703, + "learning_rate": 1.582463465553236e-05, + "loss": 1.3909, + "step": 379 + }, + { + "epoch": 0.023825945200326038, + "grad_norm": 2.1130013465881348, + "learning_rate": 1.5866388308977038e-05, + "loss": 1.5773, + "step": 380 + }, + { + "epoch": 0.023888645056116372, + "grad_norm": 1.8700965642929077, + "learning_rate": 1.590814196242171e-05, + "loss": 1.5882, + "step": 381 + }, + { + "epoch": 0.023951344911906702, + "grad_norm": 2.171607255935669, + "learning_rate": 1.5949895615866388e-05, + "loss": 1.6153, + "step": 382 + }, + { + "epoch": 0.024014044767697033, + "grad_norm": 2.650376796722412, + "learning_rate": 1.5991649269311068e-05, + "loss": 1.2768, + "step": 383 + }, + { + "epoch": 0.024076744623487367, + "grad_norm": 2.0647857189178467, + "learning_rate": 1.6033402922755745e-05, + "loss": 1.5323, + "step": 384 + }, + { + "epoch": 0.024139444479277697, + "grad_norm": 2.0851104259490967, + "learning_rate": 1.6075156576200418e-05, + "loss": 1.58, + "step": 385 + }, + { + "epoch": 0.02420214433506803, + "grad_norm": 1.9086259603500366, + "learning_rate": 1.6116910229645095e-05, + "loss": 1.664, + "step": 386 + }, + { + "epoch": 0.02426484419085836, + "grad_norm": 1.9826158285140991, + "learning_rate": 1.615866388308977e-05, + "loss": 1.614, + "step": 387 + }, + { + "epoch": 0.02432754404664869, + "grad_norm": 2.082916259765625, + "learning_rate": 1.6200417536534448e-05, + "loss": 1.5045, + "step": 388 + }, + { + "epoch": 0.024390243902439025, + "grad_norm": 2.247500419616699, + "learning_rate": 1.6242171189979125e-05, + "loss": 1.3859, + "step": 389 + }, + { + "epoch": 0.024452943758229356, + "grad_norm": 2.0015430450439453, + "learning_rate": 1.62839248434238e-05, + "loss": 1.5263, + "step": 390 + }, + { + "epoch": 0.02451564361401969, + "grad_norm": 1.9787845611572266, + "learning_rate": 1.6325678496868478e-05, + "loss": 1.4285, + "step": 391 + }, + { + "epoch": 0.02457834346981002, + "grad_norm": 1.9868314266204834, + "learning_rate": 1.6367432150313155e-05, + "loss": 1.5311, + "step": 392 + }, + { + "epoch": 0.02464104332560035, + "grad_norm": 2.1493732929229736, + "learning_rate": 1.640918580375783e-05, + "loss": 1.4514, + "step": 393 + }, + { + "epoch": 0.024703743181390684, + "grad_norm": 2.084571599960327, + "learning_rate": 1.6450939457202505e-05, + "loss": 1.5396, + "step": 394 + }, + { + "epoch": 0.024766443037181014, + "grad_norm": 2.3157448768615723, + "learning_rate": 1.6492693110647182e-05, + "loss": 1.4585, + "step": 395 + }, + { + "epoch": 0.024829142892971345, + "grad_norm": 2.18863582611084, + "learning_rate": 1.6534446764091862e-05, + "loss": 1.4122, + "step": 396 + }, + { + "epoch": 0.02489184274876168, + "grad_norm": 2.0981016159057617, + "learning_rate": 1.6576200417536535e-05, + "loss": 1.4313, + "step": 397 + }, + { + "epoch": 0.02495454260455201, + "grad_norm": 2.579850435256958, + "learning_rate": 1.6617954070981212e-05, + "loss": 1.4934, + "step": 398 + }, + { + "epoch": 0.025017242460342343, + "grad_norm": 2.1514015197753906, + "learning_rate": 1.665970772442589e-05, + "loss": 1.4886, + "step": 399 + }, + { + "epoch": 0.025079942316132673, + "grad_norm": 2.420623779296875, + "learning_rate": 1.6701461377870565e-05, + "loss": 1.4962, + "step": 400 + }, + { + "epoch": 0.025142642171923003, + "grad_norm": 2.0778849124908447, + "learning_rate": 1.6743215031315242e-05, + "loss": 1.359, + "step": 401 + }, + { + "epoch": 0.025205342027713337, + "grad_norm": 2.171297073364258, + "learning_rate": 1.6784968684759915e-05, + "loss": 1.3232, + "step": 402 + }, + { + "epoch": 0.025268041883503668, + "grad_norm": 2.417734146118164, + "learning_rate": 1.6826722338204595e-05, + "loss": 1.3865, + "step": 403 + }, + { + "epoch": 0.025330741739293998, + "grad_norm": 2.702072858810425, + "learning_rate": 1.6868475991649272e-05, + "loss": 1.5441, + "step": 404 + }, + { + "epoch": 0.025393441595084332, + "grad_norm": 2.5470051765441895, + "learning_rate": 1.691022964509395e-05, + "loss": 1.6052, + "step": 405 + }, + { + "epoch": 0.025456141450874662, + "grad_norm": 2.4018118381500244, + "learning_rate": 1.6951983298538622e-05, + "loss": 1.3258, + "step": 406 + }, + { + "epoch": 0.025518841306664996, + "grad_norm": 2.180907726287842, + "learning_rate": 1.69937369519833e-05, + "loss": 1.4594, + "step": 407 + }, + { + "epoch": 0.025581541162455326, + "grad_norm": 2.145601749420166, + "learning_rate": 1.7035490605427976e-05, + "loss": 1.4112, + "step": 408 + }, + { + "epoch": 0.025644241018245657, + "grad_norm": 1.978317379951477, + "learning_rate": 1.7077244258872652e-05, + "loss": 1.6394, + "step": 409 + }, + { + "epoch": 0.02570694087403599, + "grad_norm": 2.112196922302246, + "learning_rate": 1.711899791231733e-05, + "loss": 1.7007, + "step": 410 + }, + { + "epoch": 0.02576964072982632, + "grad_norm": 2.3247666358947754, + "learning_rate": 1.7160751565762006e-05, + "loss": 1.4791, + "step": 411 + }, + { + "epoch": 0.025832340585616655, + "grad_norm": 1.9711734056472778, + "learning_rate": 1.7202505219206682e-05, + "loss": 1.4379, + "step": 412 + }, + { + "epoch": 0.025895040441406985, + "grad_norm": 2.1576883792877197, + "learning_rate": 1.724425887265136e-05, + "loss": 1.553, + "step": 413 + }, + { + "epoch": 0.025957740297197315, + "grad_norm": 2.3024449348449707, + "learning_rate": 1.7286012526096036e-05, + "loss": 1.5356, + "step": 414 + }, + { + "epoch": 0.02602044015298765, + "grad_norm": 1.9897478818893433, + "learning_rate": 1.732776617954071e-05, + "loss": 1.6403, + "step": 415 + }, + { + "epoch": 0.02608314000877798, + "grad_norm": 2.098860263824463, + "learning_rate": 1.736951983298539e-05, + "loss": 1.5215, + "step": 416 + }, + { + "epoch": 0.02614583986456831, + "grad_norm": 2.281498432159424, + "learning_rate": 1.7411273486430066e-05, + "loss": 1.4896, + "step": 417 + }, + { + "epoch": 0.026208539720358644, + "grad_norm": 2.041102170944214, + "learning_rate": 1.745302713987474e-05, + "loss": 1.5855, + "step": 418 + }, + { + "epoch": 0.026271239576148974, + "grad_norm": 2.2687225341796875, + "learning_rate": 1.7494780793319416e-05, + "loss": 1.6141, + "step": 419 + }, + { + "epoch": 0.026333939431939308, + "grad_norm": 2.1427271366119385, + "learning_rate": 1.7536534446764093e-05, + "loss": 1.6017, + "step": 420 + }, + { + "epoch": 0.026396639287729638, + "grad_norm": 2.007707118988037, + "learning_rate": 1.757828810020877e-05, + "loss": 1.4858, + "step": 421 + }, + { + "epoch": 0.02645933914351997, + "grad_norm": 2.29459285736084, + "learning_rate": 1.7620041753653446e-05, + "loss": 1.46, + "step": 422 + }, + { + "epoch": 0.026522038999310302, + "grad_norm": 1.9730898141860962, + "learning_rate": 1.7661795407098123e-05, + "loss": 1.5687, + "step": 423 + }, + { + "epoch": 0.026584738855100633, + "grad_norm": 2.2537200450897217, + "learning_rate": 1.77035490605428e-05, + "loss": 1.443, + "step": 424 + }, + { + "epoch": 0.026647438710890967, + "grad_norm": 2.92638897895813, + "learning_rate": 1.7745302713987476e-05, + "loss": 1.3227, + "step": 425 + }, + { + "epoch": 0.026710138566681297, + "grad_norm": 2.2713091373443604, + "learning_rate": 1.7787056367432153e-05, + "loss": 1.3257, + "step": 426 + }, + { + "epoch": 0.026772838422471627, + "grad_norm": 2.481208086013794, + "learning_rate": 1.7828810020876826e-05, + "loss": 1.3549, + "step": 427 + }, + { + "epoch": 0.02683553827826196, + "grad_norm": 2.7240359783172607, + "learning_rate": 1.7870563674321503e-05, + "loss": 1.4382, + "step": 428 + }, + { + "epoch": 0.02689823813405229, + "grad_norm": 2.2681493759155273, + "learning_rate": 1.791231732776618e-05, + "loss": 1.642, + "step": 429 + }, + { + "epoch": 0.026960937989842622, + "grad_norm": 2.301711320877075, + "learning_rate": 1.795407098121086e-05, + "loss": 1.4414, + "step": 430 + }, + { + "epoch": 0.027023637845632956, + "grad_norm": 2.068291187286377, + "learning_rate": 1.7995824634655533e-05, + "loss": 1.4987, + "step": 431 + }, + { + "epoch": 0.027086337701423286, + "grad_norm": 2.1843581199645996, + "learning_rate": 1.803757828810021e-05, + "loss": 1.5864, + "step": 432 + }, + { + "epoch": 0.02714903755721362, + "grad_norm": 2.1155004501342773, + "learning_rate": 1.8079331941544887e-05, + "loss": 1.4381, + "step": 433 + }, + { + "epoch": 0.02721173741300395, + "grad_norm": 2.2611005306243896, + "learning_rate": 1.8121085594989563e-05, + "loss": 1.4782, + "step": 434 + }, + { + "epoch": 0.02727443726879428, + "grad_norm": 2.5349278450012207, + "learning_rate": 1.816283924843424e-05, + "loss": 1.5608, + "step": 435 + }, + { + "epoch": 0.027337137124584614, + "grad_norm": 2.2360355854034424, + "learning_rate": 1.8204592901878917e-05, + "loss": 1.4853, + "step": 436 + }, + { + "epoch": 0.027399836980374945, + "grad_norm": 2.1211512088775635, + "learning_rate": 1.8246346555323593e-05, + "loss": 1.5916, + "step": 437 + }, + { + "epoch": 0.027462536836165275, + "grad_norm": 2.133639097213745, + "learning_rate": 1.828810020876827e-05, + "loss": 1.5424, + "step": 438 + }, + { + "epoch": 0.02752523669195561, + "grad_norm": 2.5048553943634033, + "learning_rate": 1.8329853862212947e-05, + "loss": 1.6344, + "step": 439 + }, + { + "epoch": 0.02758793654774594, + "grad_norm": 2.2127513885498047, + "learning_rate": 1.837160751565762e-05, + "loss": 1.5137, + "step": 440 + }, + { + "epoch": 0.027650636403536273, + "grad_norm": 2.317344903945923, + "learning_rate": 1.8413361169102297e-05, + "loss": 1.3563, + "step": 441 + }, + { + "epoch": 0.027713336259326603, + "grad_norm": 2.432835102081299, + "learning_rate": 1.8455114822546974e-05, + "loss": 1.3773, + "step": 442 + }, + { + "epoch": 0.027776036115116934, + "grad_norm": 2.7050552368164062, + "learning_rate": 1.849686847599165e-05, + "loss": 1.3746, + "step": 443 + }, + { + "epoch": 0.027838735970907268, + "grad_norm": 2.426168441772461, + "learning_rate": 1.8538622129436327e-05, + "loss": 1.6728, + "step": 444 + }, + { + "epoch": 0.027901435826697598, + "grad_norm": 2.597895622253418, + "learning_rate": 1.8580375782881004e-05, + "loss": 1.2525, + "step": 445 + }, + { + "epoch": 0.027964135682487932, + "grad_norm": 2.0578854084014893, + "learning_rate": 1.862212943632568e-05, + "loss": 1.4414, + "step": 446 + }, + { + "epoch": 0.028026835538278262, + "grad_norm": 2.0529673099517822, + "learning_rate": 1.8663883089770357e-05, + "loss": 1.4659, + "step": 447 + }, + { + "epoch": 0.028089535394068593, + "grad_norm": 2.3072078227996826, + "learning_rate": 1.870563674321503e-05, + "loss": 1.5159, + "step": 448 + }, + { + "epoch": 0.028152235249858926, + "grad_norm": 2.4748339653015137, + "learning_rate": 1.8747390396659707e-05, + "loss": 1.4199, + "step": 449 + }, + { + "epoch": 0.028214935105649257, + "grad_norm": 2.2878804206848145, + "learning_rate": 1.8789144050104387e-05, + "loss": 1.3817, + "step": 450 + }, + { + "epoch": 0.028277634961439587, + "grad_norm": 2.4191272258758545, + "learning_rate": 1.8830897703549064e-05, + "loss": 1.4656, + "step": 451 + }, + { + "epoch": 0.02834033481722992, + "grad_norm": 2.1919734477996826, + "learning_rate": 1.8872651356993737e-05, + "loss": 1.6242, + "step": 452 + }, + { + "epoch": 0.02840303467302025, + "grad_norm": 2.2783408164978027, + "learning_rate": 1.8914405010438414e-05, + "loss": 1.4236, + "step": 453 + }, + { + "epoch": 0.028465734528810585, + "grad_norm": 2.1138598918914795, + "learning_rate": 1.895615866388309e-05, + "loss": 1.4629, + "step": 454 + }, + { + "epoch": 0.028528434384600915, + "grad_norm": 2.4304888248443604, + "learning_rate": 1.8997912317327767e-05, + "loss": 1.4228, + "step": 455 + }, + { + "epoch": 0.028591134240391246, + "grad_norm": 2.2193706035614014, + "learning_rate": 1.9039665970772444e-05, + "loss": 1.2357, + "step": 456 + }, + { + "epoch": 0.02865383409618158, + "grad_norm": 2.4814953804016113, + "learning_rate": 1.908141962421712e-05, + "loss": 1.3538, + "step": 457 + }, + { + "epoch": 0.02871653395197191, + "grad_norm": 2.4209325313568115, + "learning_rate": 1.9123173277661798e-05, + "loss": 1.3294, + "step": 458 + }, + { + "epoch": 0.028779233807762244, + "grad_norm": 2.284658193588257, + "learning_rate": 1.9164926931106474e-05, + "loss": 1.4853, + "step": 459 + }, + { + "epoch": 0.028841933663552574, + "grad_norm": 2.0292747020721436, + "learning_rate": 1.920668058455115e-05, + "loss": 1.4798, + "step": 460 + }, + { + "epoch": 0.028904633519342905, + "grad_norm": 2.364474058151245, + "learning_rate": 1.9248434237995824e-05, + "loss": 1.4244, + "step": 461 + }, + { + "epoch": 0.02896733337513324, + "grad_norm": 2.2657997608184814, + "learning_rate": 1.92901878914405e-05, + "loss": 1.6755, + "step": 462 + }, + { + "epoch": 0.02903003323092357, + "grad_norm": 2.243767023086548, + "learning_rate": 1.933194154488518e-05, + "loss": 1.4153, + "step": 463 + }, + { + "epoch": 0.0290927330867139, + "grad_norm": 2.3476529121398926, + "learning_rate": 1.9373695198329854e-05, + "loss": 1.3194, + "step": 464 + }, + { + "epoch": 0.029155432942504233, + "grad_norm": 2.6297860145568848, + "learning_rate": 1.941544885177453e-05, + "loss": 1.301, + "step": 465 + }, + { + "epoch": 0.029218132798294563, + "grad_norm": 2.0840892791748047, + "learning_rate": 1.9457202505219208e-05, + "loss": 1.3797, + "step": 466 + }, + { + "epoch": 0.029280832654084897, + "grad_norm": 2.1932859420776367, + "learning_rate": 1.9498956158663885e-05, + "loss": 1.4004, + "step": 467 + }, + { + "epoch": 0.029343532509875227, + "grad_norm": 2.637216806411743, + "learning_rate": 1.954070981210856e-05, + "loss": 1.4432, + "step": 468 + }, + { + "epoch": 0.029406232365665558, + "grad_norm": 2.5117383003234863, + "learning_rate": 1.9582463465553238e-05, + "loss": 1.3113, + "step": 469 + }, + { + "epoch": 0.02946893222145589, + "grad_norm": 2.5250244140625, + "learning_rate": 1.9624217118997915e-05, + "loss": 1.5266, + "step": 470 + }, + { + "epoch": 0.029531632077246222, + "grad_norm": 2.5667710304260254, + "learning_rate": 1.966597077244259e-05, + "loss": 1.5483, + "step": 471 + }, + { + "epoch": 0.029594331933036552, + "grad_norm": 3.0721311569213867, + "learning_rate": 1.9707724425887268e-05, + "loss": 1.1959, + "step": 472 + }, + { + "epoch": 0.029657031788826886, + "grad_norm": 3.174610137939453, + "learning_rate": 1.974947807933194e-05, + "loss": 1.3905, + "step": 473 + }, + { + "epoch": 0.029719731644617216, + "grad_norm": 2.4306414127349854, + "learning_rate": 1.9791231732776618e-05, + "loss": 1.4174, + "step": 474 + }, + { + "epoch": 0.02978243150040755, + "grad_norm": 2.225858211517334, + "learning_rate": 1.9832985386221295e-05, + "loss": 1.4218, + "step": 475 + }, + { + "epoch": 0.02984513135619788, + "grad_norm": 2.040678024291992, + "learning_rate": 1.9874739039665975e-05, + "loss": 1.513, + "step": 476 + }, + { + "epoch": 0.02990783121198821, + "grad_norm": 2.185588836669922, + "learning_rate": 1.9916492693110648e-05, + "loss": 1.5503, + "step": 477 + }, + { + "epoch": 0.029970531067778545, + "grad_norm": 2.3558242321014404, + "learning_rate": 1.9958246346555325e-05, + "loss": 1.4989, + "step": 478 + }, + { + "epoch": 0.030033230923568875, + "grad_norm": 2.3874950408935547, + "learning_rate": 2e-05, + "loss": 1.4232, + "step": 479 + }, + { + "epoch": 0.03009593077935921, + "grad_norm": 2.303556203842163, + "learning_rate": 1.9999999793799785e-05, + "loss": 1.4464, + "step": 480 + }, + { + "epoch": 0.03015863063514954, + "grad_norm": 2.0799388885498047, + "learning_rate": 1.9999999175199135e-05, + "loss": 1.4966, + "step": 481 + }, + { + "epoch": 0.03022133049093987, + "grad_norm": 2.2455639839172363, + "learning_rate": 1.999999814419809e-05, + "loss": 1.3466, + "step": 482 + }, + { + "epoch": 0.030284030346730204, + "grad_norm": 2.0857245922088623, + "learning_rate": 1.9999996700796677e-05, + "loss": 1.5, + "step": 483 + }, + { + "epoch": 0.030346730202520534, + "grad_norm": 2.207616090774536, + "learning_rate": 1.9999994844994968e-05, + "loss": 1.6854, + "step": 484 + }, + { + "epoch": 0.030409430058310864, + "grad_norm": 2.031473398208618, + "learning_rate": 1.999999257679303e-05, + "loss": 1.4607, + "step": 485 + }, + { + "epoch": 0.030472129914101198, + "grad_norm": 2.128164768218994, + "learning_rate": 1.9999989896190967e-05, + "loss": 1.4284, + "step": 486 + }, + { + "epoch": 0.03053482976989153, + "grad_norm": 2.3680503368377686, + "learning_rate": 1.9999986803188885e-05, + "loss": 1.6086, + "step": 487 + }, + { + "epoch": 0.030597529625681862, + "grad_norm": 2.1776907444000244, + "learning_rate": 1.9999983297786906e-05, + "loss": 1.3547, + "step": 488 + }, + { + "epoch": 0.030660229481472193, + "grad_norm": 2.1806793212890625, + "learning_rate": 1.999997937998518e-05, + "loss": 1.4885, + "step": 489 + }, + { + "epoch": 0.030722929337262523, + "grad_norm": 2.5050745010375977, + "learning_rate": 1.999997504978387e-05, + "loss": 1.4671, + "step": 490 + }, + { + "epoch": 0.030785629193052857, + "grad_norm": 2.1996240615844727, + "learning_rate": 1.9999970307183152e-05, + "loss": 1.4488, + "step": 491 + }, + { + "epoch": 0.030848329048843187, + "grad_norm": 2.1373445987701416, + "learning_rate": 1.999996515218322e-05, + "loss": 1.554, + "step": 492 + }, + { + "epoch": 0.03091102890463352, + "grad_norm": 2.1344754695892334, + "learning_rate": 1.999995958478429e-05, + "loss": 1.3875, + "step": 493 + }, + { + "epoch": 0.03097372876042385, + "grad_norm": 2.2490153312683105, + "learning_rate": 1.9999953604986587e-05, + "loss": 1.4558, + "step": 494 + }, + { + "epoch": 0.03103642861621418, + "grad_norm": 2.619656562805176, + "learning_rate": 1.9999947212790363e-05, + "loss": 1.3894, + "step": 495 + }, + { + "epoch": 0.031099128472004516, + "grad_norm": 2.2263669967651367, + "learning_rate": 1.9999940408195878e-05, + "loss": 1.5144, + "step": 496 + }, + { + "epoch": 0.031161828327794846, + "grad_norm": 2.5345981121063232, + "learning_rate": 1.999993319120342e-05, + "loss": 1.5095, + "step": 497 + }, + { + "epoch": 0.031224528183585176, + "grad_norm": 2.3442211151123047, + "learning_rate": 1.999992556181327e-05, + "loss": 1.3002, + "step": 498 + }, + { + "epoch": 0.03128722803937551, + "grad_norm": 2.2023112773895264, + "learning_rate": 1.999991752002576e-05, + "loss": 1.5147, + "step": 499 + }, + { + "epoch": 0.031349927895165844, + "grad_norm": 2.5338807106018066, + "learning_rate": 1.9999909065841215e-05, + "loss": 1.3403, + "step": 500 + }, + { + "epoch": 0.03141262775095617, + "grad_norm": 2.0078024864196777, + "learning_rate": 1.999990019925998e-05, + "loss": 1.5277, + "step": 501 + }, + { + "epoch": 0.031475327606746505, + "grad_norm": 2.6252927780151367, + "learning_rate": 1.9999890920282426e-05, + "loss": 1.2971, + "step": 502 + }, + { + "epoch": 0.03153802746253684, + "grad_norm": 2.376293420791626, + "learning_rate": 1.9999881228908932e-05, + "loss": 1.5069, + "step": 503 + }, + { + "epoch": 0.031600727318327165, + "grad_norm": 2.5545594692230225, + "learning_rate": 1.99998711251399e-05, + "loss": 1.3968, + "step": 504 + }, + { + "epoch": 0.0316634271741175, + "grad_norm": 2.4383645057678223, + "learning_rate": 1.999986060897575e-05, + "loss": 1.5806, + "step": 505 + }, + { + "epoch": 0.03172612702990783, + "grad_norm": 2.1803290843963623, + "learning_rate": 1.999984968041691e-05, + "loss": 1.3194, + "step": 506 + }, + { + "epoch": 0.03178882688569816, + "grad_norm": 2.2251033782958984, + "learning_rate": 1.999983833946383e-05, + "loss": 1.4525, + "step": 507 + }, + { + "epoch": 0.031851526741488494, + "grad_norm": 2.22983717918396, + "learning_rate": 1.9999826586116983e-05, + "loss": 1.5096, + "step": 508 + }, + { + "epoch": 0.03191422659727883, + "grad_norm": 2.2592928409576416, + "learning_rate": 1.9999814420376847e-05, + "loss": 1.4426, + "step": 509 + }, + { + "epoch": 0.03197692645306916, + "grad_norm": 2.4302804470062256, + "learning_rate": 1.9999801842243932e-05, + "loss": 1.3532, + "step": 510 + }, + { + "epoch": 0.03203962630885949, + "grad_norm": 2.642951011657715, + "learning_rate": 1.999978885171875e-05, + "loss": 1.2365, + "step": 511 + }, + { + "epoch": 0.03210232616464982, + "grad_norm": 2.233609676361084, + "learning_rate": 1.9999775448801836e-05, + "loss": 1.4263, + "step": 512 + }, + { + "epoch": 0.032165026020440156, + "grad_norm": 2.6236438751220703, + "learning_rate": 1.9999761633493754e-05, + "loss": 1.3082, + "step": 513 + }, + { + "epoch": 0.03222772587623048, + "grad_norm": 2.5305075645446777, + "learning_rate": 1.9999747405795057e-05, + "loss": 1.4062, + "step": 514 + }, + { + "epoch": 0.03229042573202082, + "grad_norm": 2.28743839263916, + "learning_rate": 1.9999732765706344e-05, + "loss": 1.3424, + "step": 515 + }, + { + "epoch": 0.03235312558781115, + "grad_norm": 2.403034210205078, + "learning_rate": 1.9999717713228217e-05, + "loss": 1.4298, + "step": 516 + }, + { + "epoch": 0.03241582544360148, + "grad_norm": 2.3247034549713135, + "learning_rate": 1.999970224836129e-05, + "loss": 1.3659, + "step": 517 + }, + { + "epoch": 0.03247852529939181, + "grad_norm": 2.4271724224090576, + "learning_rate": 1.999968637110621e-05, + "loss": 1.4296, + "step": 518 + }, + { + "epoch": 0.032541225155182145, + "grad_norm": 2.1600661277770996, + "learning_rate": 1.9999670081463626e-05, + "loss": 1.4148, + "step": 519 + }, + { + "epoch": 0.03260392501097247, + "grad_norm": 2.7695841789245605, + "learning_rate": 1.999965337943421e-05, + "loss": 1.4814, + "step": 520 + }, + { + "epoch": 0.032666624866762806, + "grad_norm": 2.3678624629974365, + "learning_rate": 1.9999636265018655e-05, + "loss": 1.2434, + "step": 521 + }, + { + "epoch": 0.03272932472255314, + "grad_norm": 2.508059024810791, + "learning_rate": 1.999961873821766e-05, + "loss": 1.2588, + "step": 522 + }, + { + "epoch": 0.032792024578343466, + "grad_norm": 2.3360366821289062, + "learning_rate": 1.9999600799031957e-05, + "loss": 1.3405, + "step": 523 + }, + { + "epoch": 0.0328547244341338, + "grad_norm": 2.2513554096221924, + "learning_rate": 1.9999582447462273e-05, + "loss": 1.415, + "step": 524 + }, + { + "epoch": 0.032917424289924134, + "grad_norm": 2.8387839794158936, + "learning_rate": 1.9999563683509377e-05, + "loss": 1.4267, + "step": 525 + }, + { + "epoch": 0.03298012414571447, + "grad_norm": 2.459407329559326, + "learning_rate": 1.999954450717404e-05, + "loss": 1.3775, + "step": 526 + }, + { + "epoch": 0.033042824001504795, + "grad_norm": 2.384125232696533, + "learning_rate": 1.9999524918457045e-05, + "loss": 1.365, + "step": 527 + }, + { + "epoch": 0.03310552385729513, + "grad_norm": 2.581301212310791, + "learning_rate": 1.9999504917359208e-05, + "loss": 1.3383, + "step": 528 + }, + { + "epoch": 0.03316822371308546, + "grad_norm": 2.308518648147583, + "learning_rate": 1.9999484503881354e-05, + "loss": 1.4182, + "step": 529 + }, + { + "epoch": 0.03323092356887579, + "grad_norm": 2.3699300289154053, + "learning_rate": 1.9999463678024317e-05, + "loss": 1.487, + "step": 530 + }, + { + "epoch": 0.03329362342466612, + "grad_norm": 2.464110851287842, + "learning_rate": 1.9999442439788964e-05, + "loss": 1.3863, + "step": 531 + }, + { + "epoch": 0.03335632328045646, + "grad_norm": 2.4526705741882324, + "learning_rate": 1.999942078917617e-05, + "loss": 1.471, + "step": 532 + }, + { + "epoch": 0.033419023136246784, + "grad_norm": 2.2141103744506836, + "learning_rate": 1.9999398726186826e-05, + "loss": 1.437, + "step": 533 + }, + { + "epoch": 0.03348172299203712, + "grad_norm": 2.384971857070923, + "learning_rate": 1.999937625082184e-05, + "loss": 1.5261, + "step": 534 + }, + { + "epoch": 0.03354442284782745, + "grad_norm": 2.5445940494537354, + "learning_rate": 1.999935336308214e-05, + "loss": 1.3912, + "step": 535 + }, + { + "epoch": 0.03360712270361778, + "grad_norm": 2.0035400390625, + "learning_rate": 1.9999330062968672e-05, + "loss": 1.3724, + "step": 536 + }, + { + "epoch": 0.03366982255940811, + "grad_norm": 2.316481113433838, + "learning_rate": 1.9999306350482396e-05, + "loss": 1.4334, + "step": 537 + }, + { + "epoch": 0.033732522415198446, + "grad_norm": 2.6266367435455322, + "learning_rate": 1.999928222562429e-05, + "loss": 1.4794, + "step": 538 + }, + { + "epoch": 0.03379522227098878, + "grad_norm": 2.634350061416626, + "learning_rate": 1.9999257688395347e-05, + "loss": 1.3511, + "step": 539 + }, + { + "epoch": 0.03385792212677911, + "grad_norm": 2.107006072998047, + "learning_rate": 1.9999232738796578e-05, + "loss": 1.4739, + "step": 540 + }, + { + "epoch": 0.03392062198256944, + "grad_norm": 2.24641489982605, + "learning_rate": 1.9999207376829016e-05, + "loss": 1.2919, + "step": 541 + }, + { + "epoch": 0.033983321838359774, + "grad_norm": 2.314833164215088, + "learning_rate": 1.9999181602493707e-05, + "loss": 1.552, + "step": 542 + }, + { + "epoch": 0.0340460216941501, + "grad_norm": 2.6825571060180664, + "learning_rate": 1.9999155415791713e-05, + "loss": 1.429, + "step": 543 + }, + { + "epoch": 0.034108721549940435, + "grad_norm": 2.4365108013153076, + "learning_rate": 1.999912881672411e-05, + "loss": 1.4155, + "step": 544 + }, + { + "epoch": 0.03417142140573077, + "grad_norm": 2.88944673538208, + "learning_rate": 1.9999101805291996e-05, + "loss": 1.3605, + "step": 545 + }, + { + "epoch": 0.034234121261521096, + "grad_norm": 2.4805328845977783, + "learning_rate": 1.999907438149649e-05, + "loss": 1.412, + "step": 546 + }, + { + "epoch": 0.03429682111731143, + "grad_norm": 2.5567739009857178, + "learning_rate": 1.999904654533872e-05, + "loss": 1.4153, + "step": 547 + }, + { + "epoch": 0.03435952097310176, + "grad_norm": 2.2676682472229004, + "learning_rate": 1.9999018296819832e-05, + "loss": 1.392, + "step": 548 + }, + { + "epoch": 0.03442222082889209, + "grad_norm": 2.2924914360046387, + "learning_rate": 1.9998989635940996e-05, + "loss": 1.3364, + "step": 549 + }, + { + "epoch": 0.034484920684682424, + "grad_norm": 2.5132558345794678, + "learning_rate": 1.9998960562703387e-05, + "loss": 1.3552, + "step": 550 + }, + { + "epoch": 0.03454762054047276, + "grad_norm": 2.435018301010132, + "learning_rate": 1.999893107710821e-05, + "loss": 1.568, + "step": 551 + }, + { + "epoch": 0.03461032039626309, + "grad_norm": 2.5402867794036865, + "learning_rate": 1.999890117915668e-05, + "loss": 1.5698, + "step": 552 + }, + { + "epoch": 0.03467302025205342, + "grad_norm": 2.517937421798706, + "learning_rate": 1.9998870868850026e-05, + "loss": 1.3935, + "step": 553 + }, + { + "epoch": 0.03473572010784375, + "grad_norm": 2.1765856742858887, + "learning_rate": 1.99988401461895e-05, + "loss": 1.4678, + "step": 554 + }, + { + "epoch": 0.034798419963634086, + "grad_norm": 2.5489859580993652, + "learning_rate": 1.9998809011176373e-05, + "loss": 1.2337, + "step": 555 + }, + { + "epoch": 0.03486111981942441, + "grad_norm": 2.7024948596954346, + "learning_rate": 1.9998777463811924e-05, + "loss": 1.2657, + "step": 556 + }, + { + "epoch": 0.03492381967521475, + "grad_norm": 2.398784875869751, + "learning_rate": 1.9998745504097457e-05, + "loss": 1.5924, + "step": 557 + }, + { + "epoch": 0.03498651953100508, + "grad_norm": 2.4631686210632324, + "learning_rate": 1.9998713132034287e-05, + "loss": 1.2103, + "step": 558 + }, + { + "epoch": 0.03504921938679541, + "grad_norm": 2.2555642127990723, + "learning_rate": 1.9998680347623752e-05, + "loss": 1.4858, + "step": 559 + }, + { + "epoch": 0.03511191924258574, + "grad_norm": 2.2257473468780518, + "learning_rate": 1.99986471508672e-05, + "loss": 1.5208, + "step": 560 + }, + { + "epoch": 0.035174619098376075, + "grad_norm": 2.325604200363159, + "learning_rate": 1.9998613541766004e-05, + "loss": 1.4691, + "step": 561 + }, + { + "epoch": 0.0352373189541664, + "grad_norm": 2.5190088748931885, + "learning_rate": 1.999857952032155e-05, + "loss": 1.3986, + "step": 562 + }, + { + "epoch": 0.035300018809956736, + "grad_norm": 2.4859986305236816, + "learning_rate": 1.9998545086535242e-05, + "loss": 1.4884, + "step": 563 + }, + { + "epoch": 0.03536271866574707, + "grad_norm": 2.7685961723327637, + "learning_rate": 1.9998510240408495e-05, + "loss": 1.4114, + "step": 564 + }, + { + "epoch": 0.035425418521537404, + "grad_norm": 2.5349342823028564, + "learning_rate": 1.9998474981942752e-05, + "loss": 1.3394, + "step": 565 + }, + { + "epoch": 0.03548811837732773, + "grad_norm": 2.1383492946624756, + "learning_rate": 1.999843931113946e-05, + "loss": 1.4322, + "step": 566 + }, + { + "epoch": 0.035550818233118064, + "grad_norm": 2.513566017150879, + "learning_rate": 1.99984032280001e-05, + "loss": 1.3954, + "step": 567 + }, + { + "epoch": 0.0356135180889084, + "grad_norm": 2.201481342315674, + "learning_rate": 1.999836673252615e-05, + "loss": 1.3792, + "step": 568 + }, + { + "epoch": 0.035676217944698725, + "grad_norm": 2.204160213470459, + "learning_rate": 1.9998329824719115e-05, + "loss": 1.4545, + "step": 569 + }, + { + "epoch": 0.03573891780048906, + "grad_norm": 2.163015842437744, + "learning_rate": 1.9998292504580528e-05, + "loss": 1.4725, + "step": 570 + }, + { + "epoch": 0.03580161765627939, + "grad_norm": 2.2695558071136475, + "learning_rate": 1.999825477211192e-05, + "loss": 1.3867, + "step": 571 + }, + { + "epoch": 0.03586431751206972, + "grad_norm": 2.0422115325927734, + "learning_rate": 1.999821662731485e-05, + "loss": 1.3611, + "step": 572 + }, + { + "epoch": 0.035927017367860054, + "grad_norm": 2.3288047313690186, + "learning_rate": 1.9998178070190887e-05, + "loss": 1.4412, + "step": 573 + }, + { + "epoch": 0.03598971722365039, + "grad_norm": 2.5950069427490234, + "learning_rate": 1.9998139100741624e-05, + "loss": 1.262, + "step": 574 + }, + { + "epoch": 0.036052417079440714, + "grad_norm": 2.5998637676239014, + "learning_rate": 1.999809971896867e-05, + "loss": 1.1875, + "step": 575 + }, + { + "epoch": 0.03611511693523105, + "grad_norm": 2.287534236907959, + "learning_rate": 1.9998059924873644e-05, + "loss": 1.4513, + "step": 576 + }, + { + "epoch": 0.03617781679102138, + "grad_norm": 2.6133172512054443, + "learning_rate": 1.9998019718458194e-05, + "loss": 1.4987, + "step": 577 + }, + { + "epoch": 0.036240516646811716, + "grad_norm": 2.4394280910491943, + "learning_rate": 1.999797909972397e-05, + "loss": 1.4412, + "step": 578 + }, + { + "epoch": 0.03630321650260204, + "grad_norm": 2.466886043548584, + "learning_rate": 1.9997938068672652e-05, + "loss": 1.3971, + "step": 579 + }, + { + "epoch": 0.036365916358392376, + "grad_norm": 2.1882126331329346, + "learning_rate": 1.9997896625305935e-05, + "loss": 1.442, + "step": 580 + }, + { + "epoch": 0.03642861621418271, + "grad_norm": 2.745283603668213, + "learning_rate": 1.999785476962552e-05, + "loss": 1.238, + "step": 581 + }, + { + "epoch": 0.03649131606997304, + "grad_norm": 2.3348307609558105, + "learning_rate": 1.999781250163314e-05, + "loss": 1.5127, + "step": 582 + }, + { + "epoch": 0.03655401592576337, + "grad_norm": 2.3215179443359375, + "learning_rate": 1.9997769821330534e-05, + "loss": 1.5158, + "step": 583 + }, + { + "epoch": 0.036616715781553705, + "grad_norm": 2.787693977355957, + "learning_rate": 1.9997726728719468e-05, + "loss": 1.2859, + "step": 584 + }, + { + "epoch": 0.03667941563734403, + "grad_norm": 2.41329026222229, + "learning_rate": 1.999768322380171e-05, + "loss": 1.4452, + "step": 585 + }, + { + "epoch": 0.036742115493134365, + "grad_norm": 2.156097888946533, + "learning_rate": 1.9997639306579065e-05, + "loss": 1.3444, + "step": 586 + }, + { + "epoch": 0.0368048153489247, + "grad_norm": 2.1439146995544434, + "learning_rate": 1.9997594977053335e-05, + "loss": 1.5435, + "step": 587 + }, + { + "epoch": 0.036867515204715026, + "grad_norm": 2.393010139465332, + "learning_rate": 1.999755023522635e-05, + "loss": 1.3145, + "step": 588 + }, + { + "epoch": 0.03693021506050536, + "grad_norm": 2.5643293857574463, + "learning_rate": 1.9997505081099962e-05, + "loss": 1.4686, + "step": 589 + }, + { + "epoch": 0.036992914916295694, + "grad_norm": 2.183877944946289, + "learning_rate": 1.999745951467602e-05, + "loss": 1.3487, + "step": 590 + }, + { + "epoch": 0.03705561477208603, + "grad_norm": 2.4417922496795654, + "learning_rate": 1.999741353595642e-05, + "loss": 1.3703, + "step": 591 + }, + { + "epoch": 0.037118314627876355, + "grad_norm": 2.4882142543792725, + "learning_rate": 1.9997367144943044e-05, + "loss": 1.4433, + "step": 592 + }, + { + "epoch": 0.03718101448366669, + "grad_norm": 2.5950324535369873, + "learning_rate": 1.999732034163781e-05, + "loss": 1.3433, + "step": 593 + }, + { + "epoch": 0.03724371433945702, + "grad_norm": 2.2926390171051025, + "learning_rate": 1.9997273126042648e-05, + "loss": 1.3892, + "step": 594 + }, + { + "epoch": 0.03730641419524735, + "grad_norm": 2.5568268299102783, + "learning_rate": 1.9997225498159508e-05, + "loss": 1.4381, + "step": 595 + }, + { + "epoch": 0.03736911405103768, + "grad_norm": 2.772700548171997, + "learning_rate": 1.999717745799035e-05, + "loss": 1.2037, + "step": 596 + }, + { + "epoch": 0.03743181390682802, + "grad_norm": 2.6039559841156006, + "learning_rate": 1.999712900553716e-05, + "loss": 1.4387, + "step": 597 + }, + { + "epoch": 0.037494513762618344, + "grad_norm": 2.530191421508789, + "learning_rate": 1.9997080140801932e-05, + "loss": 1.2339, + "step": 598 + }, + { + "epoch": 0.03755721361840868, + "grad_norm": 2.3436741828918457, + "learning_rate": 1.999703086378668e-05, + "loss": 1.3905, + "step": 599 + }, + { + "epoch": 0.03761991347419901, + "grad_norm": 2.3883416652679443, + "learning_rate": 1.9996981174493442e-05, + "loss": 1.2881, + "step": 600 + }, + { + "epoch": 0.03768261332998934, + "grad_norm": 2.4039556980133057, + "learning_rate": 1.999693107292426e-05, + "loss": 1.226, + "step": 601 + }, + { + "epoch": 0.03774531318577967, + "grad_norm": 2.490000009536743, + "learning_rate": 1.999688055908121e-05, + "loss": 1.4631, + "step": 602 + }, + { + "epoch": 0.037808013041570006, + "grad_norm": 2.640787124633789, + "learning_rate": 1.9996829632966363e-05, + "loss": 1.215, + "step": 603 + }, + { + "epoch": 0.03787071289736033, + "grad_norm": 2.4822075366973877, + "learning_rate": 1.9996778294581828e-05, + "loss": 1.4881, + "step": 604 + }, + { + "epoch": 0.037933412753150667, + "grad_norm": 2.944305658340454, + "learning_rate": 1.9996726543929717e-05, + "loss": 1.3627, + "step": 605 + }, + { + "epoch": 0.037996112608941, + "grad_norm": 2.483760356903076, + "learning_rate": 1.9996674381012174e-05, + "loss": 1.2855, + "step": 606 + }, + { + "epoch": 0.038058812464731334, + "grad_norm": 2.4037163257598877, + "learning_rate": 1.9996621805831335e-05, + "loss": 1.3848, + "step": 607 + }, + { + "epoch": 0.03812151232052166, + "grad_norm": 2.250889301300049, + "learning_rate": 1.999656881838938e-05, + "loss": 1.4228, + "step": 608 + }, + { + "epoch": 0.038184212176311995, + "grad_norm": 2.420522451400757, + "learning_rate": 1.9996515418688493e-05, + "loss": 1.5468, + "step": 609 + }, + { + "epoch": 0.03824691203210233, + "grad_norm": 4.539295196533203, + "learning_rate": 1.999646160673087e-05, + "loss": 1.3044, + "step": 610 + }, + { + "epoch": 0.038309611887892656, + "grad_norm": 2.247469902038574, + "learning_rate": 1.999640738251873e-05, + "loss": 1.285, + "step": 611 + }, + { + "epoch": 0.03837231174368299, + "grad_norm": 2.1443984508514404, + "learning_rate": 1.999635274605432e-05, + "loss": 1.3527, + "step": 612 + }, + { + "epoch": 0.03843501159947332, + "grad_norm": 2.265641450881958, + "learning_rate": 1.9996297697339883e-05, + "loss": 1.3853, + "step": 613 + }, + { + "epoch": 0.03849771145526365, + "grad_norm": 2.5877130031585693, + "learning_rate": 1.999624223637769e-05, + "loss": 1.561, + "step": 614 + }, + { + "epoch": 0.038560411311053984, + "grad_norm": 2.744838237762451, + "learning_rate": 1.9996186363170037e-05, + "loss": 1.4388, + "step": 615 + }, + { + "epoch": 0.03862311116684432, + "grad_norm": 2.541522979736328, + "learning_rate": 1.9996130077719215e-05, + "loss": 1.2611, + "step": 616 + }, + { + "epoch": 0.038685811022634645, + "grad_norm": 2.3541834354400635, + "learning_rate": 1.9996073380027555e-05, + "loss": 1.2719, + "step": 617 + }, + { + "epoch": 0.03874851087842498, + "grad_norm": 2.433357000350952, + "learning_rate": 1.9996016270097394e-05, + "loss": 1.4154, + "step": 618 + }, + { + "epoch": 0.03881121073421531, + "grad_norm": 2.651721715927124, + "learning_rate": 1.9995958747931083e-05, + "loss": 1.2991, + "step": 619 + }, + { + "epoch": 0.038873910590005646, + "grad_norm": 2.4954795837402344, + "learning_rate": 1.9995900813530998e-05, + "loss": 1.4563, + "step": 620 + }, + { + "epoch": 0.03893661044579597, + "grad_norm": 2.4512975215911865, + "learning_rate": 1.9995842466899527e-05, + "loss": 1.398, + "step": 621 + }, + { + "epoch": 0.03899931030158631, + "grad_norm": 2.2399954795837402, + "learning_rate": 1.9995783708039075e-05, + "loss": 1.2401, + "step": 622 + }, + { + "epoch": 0.03906201015737664, + "grad_norm": 2.3984668254852295, + "learning_rate": 1.999572453695207e-05, + "loss": 1.4581, + "step": 623 + }, + { + "epoch": 0.03912471001316697, + "grad_norm": 2.138087511062622, + "learning_rate": 1.999566495364094e-05, + "loss": 1.2462, + "step": 624 + }, + { + "epoch": 0.0391874098689573, + "grad_norm": 2.232602834701538, + "learning_rate": 1.999560495810816e-05, + "loss": 1.3955, + "step": 625 + }, + { + "epoch": 0.039250109724747635, + "grad_norm": 2.891139268875122, + "learning_rate": 1.999554455035619e-05, + "loss": 1.2508, + "step": 626 + }, + { + "epoch": 0.03931280958053796, + "grad_norm": 2.4998199939727783, + "learning_rate": 1.9995483730387527e-05, + "loss": 1.2291, + "step": 627 + }, + { + "epoch": 0.039375509436328296, + "grad_norm": 2.4808998107910156, + "learning_rate": 1.9995422498204683e-05, + "loss": 1.5509, + "step": 628 + }, + { + "epoch": 0.03943820929211863, + "grad_norm": 2.5691699981689453, + "learning_rate": 1.9995360853810172e-05, + "loss": 1.372, + "step": 629 + }, + { + "epoch": 0.03950090914790896, + "grad_norm": 2.724029064178467, + "learning_rate": 1.9995298797206545e-05, + "loss": 1.2732, + "step": 630 + }, + { + "epoch": 0.03956360900369929, + "grad_norm": 2.5944278240203857, + "learning_rate": 1.9995236328396364e-05, + "loss": 1.3989, + "step": 631 + }, + { + "epoch": 0.039626308859489624, + "grad_norm": 2.5144546031951904, + "learning_rate": 1.9995173447382193e-05, + "loss": 1.2205, + "step": 632 + }, + { + "epoch": 0.03968900871527996, + "grad_norm": 2.4939486980438232, + "learning_rate": 1.9995110154166636e-05, + "loss": 1.4848, + "step": 633 + }, + { + "epoch": 0.039751708571070285, + "grad_norm": 2.6257059574127197, + "learning_rate": 1.99950464487523e-05, + "loss": 1.4012, + "step": 634 + }, + { + "epoch": 0.03981440842686062, + "grad_norm": 2.6233177185058594, + "learning_rate": 1.9994982331141813e-05, + "loss": 1.4211, + "step": 635 + }, + { + "epoch": 0.03987710828265095, + "grad_norm": 2.7006516456604004, + "learning_rate": 1.9994917801337817e-05, + "loss": 1.2735, + "step": 636 + }, + { + "epoch": 0.03993980813844128, + "grad_norm": 2.6941404342651367, + "learning_rate": 1.999485285934297e-05, + "loss": 1.3535, + "step": 637 + }, + { + "epoch": 0.04000250799423161, + "grad_norm": 2.428571939468384, + "learning_rate": 1.999478750515996e-05, + "loss": 1.541, + "step": 638 + }, + { + "epoch": 0.04006520785002195, + "grad_norm": 2.4473507404327393, + "learning_rate": 1.9994721738791475e-05, + "loss": 1.3492, + "step": 639 + }, + { + "epoch": 0.040127907705812274, + "grad_norm": 2.2464020252227783, + "learning_rate": 1.9994655560240233e-05, + "loss": 1.4625, + "step": 640 + }, + { + "epoch": 0.04019060756160261, + "grad_norm": 2.1565909385681152, + "learning_rate": 1.9994588969508955e-05, + "loss": 1.3128, + "step": 641 + }, + { + "epoch": 0.04025330741739294, + "grad_norm": 2.3269944190979004, + "learning_rate": 1.999452196660039e-05, + "loss": 1.5784, + "step": 642 + }, + { + "epoch": 0.04031600727318327, + "grad_norm": 2.511423349380493, + "learning_rate": 1.9994454551517305e-05, + "loss": 1.3202, + "step": 643 + }, + { + "epoch": 0.0403787071289736, + "grad_norm": 2.244643211364746, + "learning_rate": 1.999438672426248e-05, + "loss": 1.4608, + "step": 644 + }, + { + "epoch": 0.040441406984763936, + "grad_norm": 2.254852771759033, + "learning_rate": 1.9994318484838706e-05, + "loss": 1.5347, + "step": 645 + }, + { + "epoch": 0.04050410684055427, + "grad_norm": 2.8063580989837646, + "learning_rate": 1.9994249833248804e-05, + "loss": 1.3225, + "step": 646 + }, + { + "epoch": 0.0405668066963446, + "grad_norm": 2.653256416320801, + "learning_rate": 1.99941807694956e-05, + "loss": 1.4579, + "step": 647 + }, + { + "epoch": 0.04062950655213493, + "grad_norm": 2.7741034030914307, + "learning_rate": 1.9994111293581947e-05, + "loss": 1.2852, + "step": 648 + }, + { + "epoch": 0.040692206407925265, + "grad_norm": 2.4841299057006836, + "learning_rate": 1.9994041405510705e-05, + "loss": 1.4233, + "step": 649 + }, + { + "epoch": 0.04075490626371559, + "grad_norm": 2.489394426345825, + "learning_rate": 1.999397110528476e-05, + "loss": 1.4529, + "step": 650 + }, + { + "epoch": 0.040817606119505925, + "grad_norm": 2.716517210006714, + "learning_rate": 1.9993900392907012e-05, + "loss": 1.4073, + "step": 651 + }, + { + "epoch": 0.04088030597529626, + "grad_norm": 2.334965705871582, + "learning_rate": 1.9993829268380374e-05, + "loss": 1.258, + "step": 652 + }, + { + "epoch": 0.040943005831086586, + "grad_norm": 2.459202289581299, + "learning_rate": 1.9993757731707782e-05, + "loss": 1.5198, + "step": 653 + }, + { + "epoch": 0.04100570568687692, + "grad_norm": 2.5302581787109375, + "learning_rate": 1.9993685782892184e-05, + "loss": 1.2613, + "step": 654 + }, + { + "epoch": 0.041068405542667254, + "grad_norm": 2.3875105381011963, + "learning_rate": 1.9993613421936547e-05, + "loss": 1.3729, + "step": 655 + }, + { + "epoch": 0.04113110539845758, + "grad_norm": 2.382755756378174, + "learning_rate": 1.9993540648843857e-05, + "loss": 1.3791, + "step": 656 + }, + { + "epoch": 0.041193805254247914, + "grad_norm": 2.557313919067383, + "learning_rate": 1.9993467463617115e-05, + "loss": 1.3291, + "step": 657 + }, + { + "epoch": 0.04125650511003825, + "grad_norm": 2.579967498779297, + "learning_rate": 1.9993393866259336e-05, + "loss": 1.3413, + "step": 658 + }, + { + "epoch": 0.04131920496582858, + "grad_norm": 2.5337727069854736, + "learning_rate": 1.9993319856773558e-05, + "loss": 1.2791, + "step": 659 + }, + { + "epoch": 0.04138190482161891, + "grad_norm": 2.923696756362915, + "learning_rate": 1.9993245435162834e-05, + "loss": 1.4038, + "step": 660 + }, + { + "epoch": 0.04144460467740924, + "grad_norm": 2.4548096656799316, + "learning_rate": 1.9993170601430233e-05, + "loss": 1.3479, + "step": 661 + }, + { + "epoch": 0.04150730453319958, + "grad_norm": 2.7807576656341553, + "learning_rate": 1.9993095355578834e-05, + "loss": 1.17, + "step": 662 + }, + { + "epoch": 0.0415700043889899, + "grad_norm": 2.3954408168792725, + "learning_rate": 1.999301969761175e-05, + "loss": 1.39, + "step": 663 + }, + { + "epoch": 0.04163270424478024, + "grad_norm": 2.431708574295044, + "learning_rate": 1.99929436275321e-05, + "loss": 1.4403, + "step": 664 + }, + { + "epoch": 0.04169540410057057, + "grad_norm": 2.129969596862793, + "learning_rate": 1.9992867145343016e-05, + "loss": 1.2689, + "step": 665 + }, + { + "epoch": 0.0417581039563609, + "grad_norm": 2.4871673583984375, + "learning_rate": 1.9992790251047655e-05, + "loss": 1.2811, + "step": 666 + }, + { + "epoch": 0.04182080381215123, + "grad_norm": 2.223844528198242, + "learning_rate": 1.9992712944649187e-05, + "loss": 1.444, + "step": 667 + }, + { + "epoch": 0.041883503667941566, + "grad_norm": 2.332399606704712, + "learning_rate": 1.9992635226150802e-05, + "loss": 1.3754, + "step": 668 + }, + { + "epoch": 0.04194620352373189, + "grad_norm": 2.6748688220977783, + "learning_rate": 1.9992557095555702e-05, + "loss": 1.4477, + "step": 669 + }, + { + "epoch": 0.042008903379522226, + "grad_norm": 2.2546045780181885, + "learning_rate": 1.999247855286711e-05, + "loss": 1.3863, + "step": 670 + }, + { + "epoch": 0.04207160323531256, + "grad_norm": 2.449944019317627, + "learning_rate": 1.999239959808827e-05, + "loss": 1.1769, + "step": 671 + }, + { + "epoch": 0.04213430309110289, + "grad_norm": 2.296433210372925, + "learning_rate": 1.999232023122243e-05, + "loss": 1.5186, + "step": 672 + }, + { + "epoch": 0.04219700294689322, + "grad_norm": 2.494285821914673, + "learning_rate": 1.999224045227287e-05, + "loss": 1.4455, + "step": 673 + }, + { + "epoch": 0.042259702802683555, + "grad_norm": 2.4280591011047363, + "learning_rate": 1.999216026124288e-05, + "loss": 1.385, + "step": 674 + }, + { + "epoch": 0.04232240265847389, + "grad_norm": 2.3340649604797363, + "learning_rate": 1.9992079658135757e-05, + "loss": 1.324, + "step": 675 + }, + { + "epoch": 0.042385102514264215, + "grad_norm": 2.6076440811157227, + "learning_rate": 1.999199864295484e-05, + "loss": 1.2649, + "step": 676 + }, + { + "epoch": 0.04244780237005455, + "grad_norm": 2.9232311248779297, + "learning_rate": 1.999191721570346e-05, + "loss": 1.3006, + "step": 677 + }, + { + "epoch": 0.04251050222584488, + "grad_norm": 2.414222478866577, + "learning_rate": 1.9991835376384975e-05, + "loss": 1.3614, + "step": 678 + }, + { + "epoch": 0.04257320208163521, + "grad_norm": 2.4160547256469727, + "learning_rate": 1.9991753125002766e-05, + "loss": 1.4801, + "step": 679 + }, + { + "epoch": 0.042635901937425544, + "grad_norm": 2.526182174682617, + "learning_rate": 1.9991670461560218e-05, + "loss": 1.2612, + "step": 680 + }, + { + "epoch": 0.04269860179321588, + "grad_norm": 2.5602972507476807, + "learning_rate": 1.999158738606075e-05, + "loss": 1.3159, + "step": 681 + }, + { + "epoch": 0.042761301649006205, + "grad_norm": 2.4063048362731934, + "learning_rate": 1.9991503898507778e-05, + "loss": 1.4211, + "step": 682 + }, + { + "epoch": 0.04282400150479654, + "grad_norm": 2.5643422603607178, + "learning_rate": 1.999141999890475e-05, + "loss": 1.2567, + "step": 683 + }, + { + "epoch": 0.04288670136058687, + "grad_norm": 2.443889856338501, + "learning_rate": 1.9991335687255122e-05, + "loss": 1.4194, + "step": 684 + }, + { + "epoch": 0.0429494012163772, + "grad_norm": 2.4567017555236816, + "learning_rate": 1.9991250963562376e-05, + "loss": 1.3063, + "step": 685 + }, + { + "epoch": 0.04301210107216753, + "grad_norm": 2.6434214115142822, + "learning_rate": 1.9991165827830006e-05, + "loss": 1.3528, + "step": 686 + }, + { + "epoch": 0.04307480092795787, + "grad_norm": 2.3046786785125732, + "learning_rate": 1.9991080280061518e-05, + "loss": 1.3707, + "step": 687 + }, + { + "epoch": 0.0431375007837482, + "grad_norm": 2.367568016052246, + "learning_rate": 1.9990994320260443e-05, + "loss": 1.4009, + "step": 688 + }, + { + "epoch": 0.04320020063953853, + "grad_norm": 2.434142827987671, + "learning_rate": 1.9990907948430327e-05, + "loss": 1.4287, + "step": 689 + }, + { + "epoch": 0.04326290049532886, + "grad_norm": 2.7404747009277344, + "learning_rate": 1.9990821164574733e-05, + "loss": 1.5103, + "step": 690 + }, + { + "epoch": 0.043325600351119195, + "grad_norm": 2.2679052352905273, + "learning_rate": 1.9990733968697235e-05, + "loss": 1.3125, + "step": 691 + }, + { + "epoch": 0.04338830020690952, + "grad_norm": 2.3885421752929688, + "learning_rate": 1.999064636080143e-05, + "loss": 1.2739, + "step": 692 + }, + { + "epoch": 0.043451000062699856, + "grad_norm": 2.311049699783325, + "learning_rate": 1.9990558340890937e-05, + "loss": 1.2033, + "step": 693 + }, + { + "epoch": 0.04351369991849019, + "grad_norm": 2.3197884559631348, + "learning_rate": 1.999046990896938e-05, + "loss": 1.474, + "step": 694 + }, + { + "epoch": 0.043576399774280516, + "grad_norm": 2.448979616165161, + "learning_rate": 1.9990381065040406e-05, + "loss": 1.2917, + "step": 695 + }, + { + "epoch": 0.04363909963007085, + "grad_norm": 2.66035795211792, + "learning_rate": 1.9990291809107683e-05, + "loss": 1.3359, + "step": 696 + }, + { + "epoch": 0.043701799485861184, + "grad_norm": 2.3976988792419434, + "learning_rate": 1.999020214117489e-05, + "loss": 1.5109, + "step": 697 + }, + { + "epoch": 0.04376449934165151, + "grad_norm": 2.4336955547332764, + "learning_rate": 1.9990112061245723e-05, + "loss": 1.2348, + "step": 698 + }, + { + "epoch": 0.043827199197441845, + "grad_norm": 2.388904333114624, + "learning_rate": 1.99900215693239e-05, + "loss": 1.4709, + "step": 699 + }, + { + "epoch": 0.04388989905323218, + "grad_norm": 2.7825820446014404, + "learning_rate": 1.9989930665413148e-05, + "loss": 1.3056, + "step": 700 + }, + { + "epoch": 0.04395259890902251, + "grad_norm": 2.73049259185791, + "learning_rate": 1.998983934951722e-05, + "loss": 1.2193, + "step": 701 + }, + { + "epoch": 0.04401529876481284, + "grad_norm": 2.703331708908081, + "learning_rate": 1.998974762163988e-05, + "loss": 1.4003, + "step": 702 + }, + { + "epoch": 0.04407799862060317, + "grad_norm": 2.2793080806732178, + "learning_rate": 1.9989655481784917e-05, + "loss": 1.4748, + "step": 703 + }, + { + "epoch": 0.04414069847639351, + "grad_norm": 2.3875200748443604, + "learning_rate": 1.998956292995612e-05, + "loss": 1.4786, + "step": 704 + }, + { + "epoch": 0.044203398332183834, + "grad_norm": 2.3624136447906494, + "learning_rate": 1.9989469966157314e-05, + "loss": 1.2901, + "step": 705 + }, + { + "epoch": 0.04426609818797417, + "grad_norm": 2.3757517337799072, + "learning_rate": 1.998937659039233e-05, + "loss": 1.2098, + "step": 706 + }, + { + "epoch": 0.0443287980437645, + "grad_norm": 2.1854116916656494, + "learning_rate": 1.998928280266502e-05, + "loss": 1.4003, + "step": 707 + }, + { + "epoch": 0.04439149789955483, + "grad_norm": 2.321620464324951, + "learning_rate": 1.9989188602979254e-05, + "loss": 1.4654, + "step": 708 + }, + { + "epoch": 0.04445419775534516, + "grad_norm": 2.4137284755706787, + "learning_rate": 1.9989093991338907e-05, + "loss": 1.1895, + "step": 709 + }, + { + "epoch": 0.044516897611135496, + "grad_norm": 2.442777395248413, + "learning_rate": 1.998899896774789e-05, + "loss": 1.2754, + "step": 710 + }, + { + "epoch": 0.04457959746692582, + "grad_norm": 2.3496556282043457, + "learning_rate": 1.9988903532210122e-05, + "loss": 1.5404, + "step": 711 + }, + { + "epoch": 0.04464229732271616, + "grad_norm": 2.369337797164917, + "learning_rate": 1.9988807684729535e-05, + "loss": 1.4099, + "step": 712 + }, + { + "epoch": 0.04470499717850649, + "grad_norm": 2.3889167308807373, + "learning_rate": 1.998871142531008e-05, + "loss": 1.459, + "step": 713 + }, + { + "epoch": 0.044767697034296824, + "grad_norm": 2.5084729194641113, + "learning_rate": 1.9988614753955734e-05, + "loss": 1.2318, + "step": 714 + }, + { + "epoch": 0.04483039689008715, + "grad_norm": 2.3253326416015625, + "learning_rate": 1.9988517670670476e-05, + "loss": 1.3311, + "step": 715 + }, + { + "epoch": 0.044893096745877485, + "grad_norm": 2.298435926437378, + "learning_rate": 1.9988420175458313e-05, + "loss": 1.2429, + "step": 716 + }, + { + "epoch": 0.04495579660166782, + "grad_norm": 2.083470106124878, + "learning_rate": 1.998832226832327e-05, + "loss": 1.4932, + "step": 717 + }, + { + "epoch": 0.045018496457458146, + "grad_norm": 2.8396756649017334, + "learning_rate": 1.9988223949269376e-05, + "loss": 1.2056, + "step": 718 + }, + { + "epoch": 0.04508119631324848, + "grad_norm": 2.6425251960754395, + "learning_rate": 1.998812521830069e-05, + "loss": 1.3361, + "step": 719 + }, + { + "epoch": 0.045143896169038814, + "grad_norm": 2.234604835510254, + "learning_rate": 1.9988026075421284e-05, + "loss": 1.2281, + "step": 720 + }, + { + "epoch": 0.04520659602482914, + "grad_norm": 2.4789533615112305, + "learning_rate": 1.998792652063525e-05, + "loss": 1.4153, + "step": 721 + }, + { + "epoch": 0.045269295880619474, + "grad_norm": 2.437704086303711, + "learning_rate": 1.9987826553946686e-05, + "loss": 1.4142, + "step": 722 + }, + { + "epoch": 0.04533199573640981, + "grad_norm": 2.5257911682128906, + "learning_rate": 1.998772617535972e-05, + "loss": 1.2023, + "step": 723 + }, + { + "epoch": 0.045394695592200135, + "grad_norm": 2.3976306915283203, + "learning_rate": 1.9987625384878493e-05, + "loss": 1.5118, + "step": 724 + }, + { + "epoch": 0.04545739544799047, + "grad_norm": 2.460416316986084, + "learning_rate": 1.9987524182507157e-05, + "loss": 1.447, + "step": 725 + }, + { + "epoch": 0.0455200953037808, + "grad_norm": 2.3993418216705322, + "learning_rate": 1.9987422568249886e-05, + "loss": 1.3119, + "step": 726 + }, + { + "epoch": 0.045582795159571136, + "grad_norm": 2.375439167022705, + "learning_rate": 1.9987320542110873e-05, + "loss": 1.3463, + "step": 727 + }, + { + "epoch": 0.04564549501536146, + "grad_norm": 2.2368738651275635, + "learning_rate": 1.998721810409433e-05, + "loss": 1.2921, + "step": 728 + }, + { + "epoch": 0.0457081948711518, + "grad_norm": 2.3163177967071533, + "learning_rate": 1.998711525420447e-05, + "loss": 1.3058, + "step": 729 + }, + { + "epoch": 0.04577089472694213, + "grad_norm": 2.4100430011749268, + "learning_rate": 1.998701199244554e-05, + "loss": 1.4676, + "step": 730 + }, + { + "epoch": 0.04583359458273246, + "grad_norm": 2.8831939697265625, + "learning_rate": 1.9986908318821804e-05, + "loss": 1.3058, + "step": 731 + }, + { + "epoch": 0.04589629443852279, + "grad_norm": 2.4540538787841797, + "learning_rate": 1.998680423333753e-05, + "loss": 1.2598, + "step": 732 + }, + { + "epoch": 0.045958994294313125, + "grad_norm": 2.5371623039245605, + "learning_rate": 1.998669973599701e-05, + "loss": 1.3491, + "step": 733 + }, + { + "epoch": 0.04602169415010345, + "grad_norm": 2.3432161808013916, + "learning_rate": 1.9986594826804563e-05, + "loss": 1.3695, + "step": 734 + }, + { + "epoch": 0.046084394005893786, + "grad_norm": 2.3682754039764404, + "learning_rate": 1.9986489505764507e-05, + "loss": 1.3053, + "step": 735 + }, + { + "epoch": 0.04614709386168412, + "grad_norm": 2.4354300498962402, + "learning_rate": 1.998638377288119e-05, + "loss": 1.5529, + "step": 736 + }, + { + "epoch": 0.04620979371747445, + "grad_norm": 2.579533100128174, + "learning_rate": 1.9986277628158965e-05, + "loss": 1.253, + "step": 737 + }, + { + "epoch": 0.04627249357326478, + "grad_norm": 2.3517487049102783, + "learning_rate": 1.9986171071602215e-05, + "loss": 1.361, + "step": 738 + }, + { + "epoch": 0.046335193429055115, + "grad_norm": 2.6030046939849854, + "learning_rate": 1.998606410321534e-05, + "loss": 1.2908, + "step": 739 + }, + { + "epoch": 0.04639789328484544, + "grad_norm": 2.645944595336914, + "learning_rate": 1.998595672300274e-05, + "loss": 1.2144, + "step": 740 + }, + { + "epoch": 0.046460593140635775, + "grad_norm": 2.5629258155822754, + "learning_rate": 1.9985848930968846e-05, + "loss": 1.4648, + "step": 741 + }, + { + "epoch": 0.04652329299642611, + "grad_norm": 2.438568353652954, + "learning_rate": 1.9985740727118114e-05, + "loss": 1.4401, + "step": 742 + }, + { + "epoch": 0.04658599285221644, + "grad_norm": 2.5641255378723145, + "learning_rate": 1.9985632111454992e-05, + "loss": 1.2619, + "step": 743 + }, + { + "epoch": 0.04664869270800677, + "grad_norm": 2.4810941219329834, + "learning_rate": 1.9985523083983967e-05, + "loss": 1.4382, + "step": 744 + }, + { + "epoch": 0.046711392563797104, + "grad_norm": 2.7489006519317627, + "learning_rate": 1.998541364470954e-05, + "loss": 1.3217, + "step": 745 + }, + { + "epoch": 0.04677409241958744, + "grad_norm": 2.340359687805176, + "learning_rate": 1.998530379363621e-05, + "loss": 1.3179, + "step": 746 + }, + { + "epoch": 0.046836792275377764, + "grad_norm": 2.6784815788269043, + "learning_rate": 1.9985193530768518e-05, + "loss": 1.2586, + "step": 747 + }, + { + "epoch": 0.0468994921311681, + "grad_norm": 2.6561193466186523, + "learning_rate": 1.998508285611101e-05, + "loss": 1.3162, + "step": 748 + }, + { + "epoch": 0.04696219198695843, + "grad_norm": 2.5901665687561035, + "learning_rate": 1.9984971769668248e-05, + "loss": 1.3556, + "step": 749 + }, + { + "epoch": 0.04702489184274876, + "grad_norm": 2.8484201431274414, + "learning_rate": 1.9984860271444816e-05, + "loss": 1.2512, + "step": 750 + }, + { + "epoch": 0.04708759169853909, + "grad_norm": 2.4837758541107178, + "learning_rate": 1.9984748361445306e-05, + "loss": 1.234, + "step": 751 + }, + { + "epoch": 0.04715029155432943, + "grad_norm": 2.5134685039520264, + "learning_rate": 1.9984636039674342e-05, + "loss": 1.3324, + "step": 752 + }, + { + "epoch": 0.04721299141011975, + "grad_norm": 2.6588265895843506, + "learning_rate": 1.998452330613655e-05, + "loss": 1.222, + "step": 753 + }, + { + "epoch": 0.04727569126591009, + "grad_norm": 2.2506253719329834, + "learning_rate": 1.998441016083658e-05, + "loss": 1.3217, + "step": 754 + }, + { + "epoch": 0.04733839112170042, + "grad_norm": 2.6757376194000244, + "learning_rate": 1.99842966037791e-05, + "loss": 1.3724, + "step": 755 + }, + { + "epoch": 0.047401090977490755, + "grad_norm": 2.7438037395477295, + "learning_rate": 1.998418263496879e-05, + "loss": 1.3212, + "step": 756 + }, + { + "epoch": 0.04746379083328108, + "grad_norm": 3.00738525390625, + "learning_rate": 1.9984068254410357e-05, + "loss": 1.2932, + "step": 757 + }, + { + "epoch": 0.047526490689071416, + "grad_norm": 2.4451406002044678, + "learning_rate": 1.9983953462108506e-05, + "loss": 1.5458, + "step": 758 + }, + { + "epoch": 0.04758919054486175, + "grad_norm": 2.824795961380005, + "learning_rate": 1.998383825806799e-05, + "loss": 1.118, + "step": 759 + }, + { + "epoch": 0.047651890400652076, + "grad_norm": 2.1470086574554443, + "learning_rate": 1.9983722642293534e-05, + "loss": 1.3665, + "step": 760 + }, + { + "epoch": 0.04771459025644241, + "grad_norm": 2.540020227432251, + "learning_rate": 1.998360661478993e-05, + "loss": 1.4165, + "step": 761 + }, + { + "epoch": 0.047777290112232744, + "grad_norm": 2.3594181537628174, + "learning_rate": 1.998349017556195e-05, + "loss": 1.4233, + "step": 762 + }, + { + "epoch": 0.04783998996802307, + "grad_norm": 2.347083806991577, + "learning_rate": 1.9983373324614398e-05, + "loss": 1.3166, + "step": 763 + }, + { + "epoch": 0.047902689823813405, + "grad_norm": 2.16774320602417, + "learning_rate": 1.9983256061952097e-05, + "loss": 1.4007, + "step": 764 + }, + { + "epoch": 0.04796538967960374, + "grad_norm": 2.6637182235717773, + "learning_rate": 1.9983138387579877e-05, + "loss": 1.2855, + "step": 765 + }, + { + "epoch": 0.048028089535394065, + "grad_norm": 2.4501705169677734, + "learning_rate": 1.9983020301502597e-05, + "loss": 1.3595, + "step": 766 + }, + { + "epoch": 0.0480907893911844, + "grad_norm": 2.609168767929077, + "learning_rate": 1.9982901803725122e-05, + "loss": 1.3647, + "step": 767 + }, + { + "epoch": 0.04815348924697473, + "grad_norm": 2.4758646488189697, + "learning_rate": 1.998278289425234e-05, + "loss": 1.4718, + "step": 768 + }, + { + "epoch": 0.04821618910276507, + "grad_norm": 2.6493093967437744, + "learning_rate": 1.9982663573089156e-05, + "loss": 1.5125, + "step": 769 + }, + { + "epoch": 0.048278888958555394, + "grad_norm": 2.4595324993133545, + "learning_rate": 1.998254384024049e-05, + "loss": 1.5487, + "step": 770 + }, + { + "epoch": 0.04834158881434573, + "grad_norm": 2.9884560108184814, + "learning_rate": 1.998242369571128e-05, + "loss": 1.3824, + "step": 771 + }, + { + "epoch": 0.04840428867013606, + "grad_norm": 2.527425527572632, + "learning_rate": 1.9982303139506484e-05, + "loss": 1.3617, + "step": 772 + }, + { + "epoch": 0.04846698852592639, + "grad_norm": 2.717266798019409, + "learning_rate": 1.9982182171631065e-05, + "loss": 1.2295, + "step": 773 + }, + { + "epoch": 0.04852968838171672, + "grad_norm": 2.721583127975464, + "learning_rate": 1.998206079209002e-05, + "loss": 1.3038, + "step": 774 + }, + { + "epoch": 0.048592388237507056, + "grad_norm": 2.5376462936401367, + "learning_rate": 1.9981939000888354e-05, + "loss": 1.4159, + "step": 775 + }, + { + "epoch": 0.04865508809329738, + "grad_norm": 2.863079786300659, + "learning_rate": 1.9981816798031088e-05, + "loss": 1.4144, + "step": 776 + }, + { + "epoch": 0.04871778794908772, + "grad_norm": 2.5212502479553223, + "learning_rate": 1.9981694183523257e-05, + "loss": 1.3907, + "step": 777 + }, + { + "epoch": 0.04878048780487805, + "grad_norm": 2.6059210300445557, + "learning_rate": 1.9981571157369925e-05, + "loss": 1.4185, + "step": 778 + }, + { + "epoch": 0.04884318766066838, + "grad_norm": 2.5552444458007812, + "learning_rate": 1.9981447719576163e-05, + "loss": 1.1339, + "step": 779 + }, + { + "epoch": 0.04890588751645871, + "grad_norm": 2.8934972286224365, + "learning_rate": 1.9981323870147058e-05, + "loss": 1.2071, + "step": 780 + }, + { + "epoch": 0.048968587372249045, + "grad_norm": 2.675523281097412, + "learning_rate": 1.9981199609087726e-05, + "loss": 1.4558, + "step": 781 + }, + { + "epoch": 0.04903128722803938, + "grad_norm": 2.492743730545044, + "learning_rate": 1.998107493640328e-05, + "loss": 1.6118, + "step": 782 + }, + { + "epoch": 0.049093987083829706, + "grad_norm": 2.4601569175720215, + "learning_rate": 1.9980949852098873e-05, + "loss": 1.4048, + "step": 783 + }, + { + "epoch": 0.04915668693962004, + "grad_norm": 2.5114433765411377, + "learning_rate": 1.9980824356179654e-05, + "loss": 1.3277, + "step": 784 + }, + { + "epoch": 0.04921938679541037, + "grad_norm": 2.602379083633423, + "learning_rate": 1.9980698448650805e-05, + "loss": 1.4059, + "step": 785 + }, + { + "epoch": 0.0492820866512007, + "grad_norm": 2.496201753616333, + "learning_rate": 1.9980572129517517e-05, + "loss": 1.2154, + "step": 786 + }, + { + "epoch": 0.049344786506991034, + "grad_norm": 2.2243387699127197, + "learning_rate": 1.9980445398784998e-05, + "loss": 1.2868, + "step": 787 + }, + { + "epoch": 0.04940748636278137, + "grad_norm": 2.7783920764923096, + "learning_rate": 1.9980318256458474e-05, + "loss": 1.2682, + "step": 788 + }, + { + "epoch": 0.049470186218571695, + "grad_norm": 2.4680097103118896, + "learning_rate": 1.998019070254319e-05, + "loss": 1.4468, + "step": 789 + }, + { + "epoch": 0.04953288607436203, + "grad_norm": 2.707218885421753, + "learning_rate": 1.9980062737044402e-05, + "loss": 1.2743, + "step": 790 + }, + { + "epoch": 0.04959558593015236, + "grad_norm": 2.5456490516662598, + "learning_rate": 1.9979934359967398e-05, + "loss": 1.4155, + "step": 791 + }, + { + "epoch": 0.04965828578594269, + "grad_norm": 2.6581568717956543, + "learning_rate": 1.997980557131746e-05, + "loss": 1.2311, + "step": 792 + }, + { + "epoch": 0.04972098564173302, + "grad_norm": 2.6159274578094482, + "learning_rate": 1.9979676371099905e-05, + "loss": 1.2918, + "step": 793 + }, + { + "epoch": 0.04978368549752336, + "grad_norm": 2.5730254650115967, + "learning_rate": 1.997954675932006e-05, + "loss": 1.2775, + "step": 794 + }, + { + "epoch": 0.04984638535331369, + "grad_norm": 2.3798301219940186, + "learning_rate": 1.9979416735983273e-05, + "loss": 1.434, + "step": 795 + }, + { + "epoch": 0.04990908520910402, + "grad_norm": 2.5276296138763428, + "learning_rate": 1.9979286301094905e-05, + "loss": 1.3058, + "step": 796 + }, + { + "epoch": 0.04997178506489435, + "grad_norm": 2.30886173248291, + "learning_rate": 1.9979155454660334e-05, + "loss": 1.3973, + "step": 797 + }, + { + "epoch": 0.050034484920684685, + "grad_norm": 2.6314690113067627, + "learning_rate": 1.9979024196684952e-05, + "loss": 1.3519, + "step": 798 + }, + { + "epoch": 0.05009718477647501, + "grad_norm": 2.5101120471954346, + "learning_rate": 1.997889252717418e-05, + "loss": 1.1783, + "step": 799 + }, + { + "epoch": 0.050159884632265346, + "grad_norm": 2.5742878913879395, + "learning_rate": 1.9978760446133442e-05, + "loss": 1.3591, + "step": 800 + }, + { + "epoch": 0.05022258448805568, + "grad_norm": 2.4853763580322266, + "learning_rate": 1.9978627953568187e-05, + "loss": 1.255, + "step": 801 + }, + { + "epoch": 0.05028528434384601, + "grad_norm": 2.440086841583252, + "learning_rate": 1.9978495049483883e-05, + "loss": 1.3954, + "step": 802 + }, + { + "epoch": 0.05034798419963634, + "grad_norm": 2.484473705291748, + "learning_rate": 1.9978361733886003e-05, + "loss": 1.1564, + "step": 803 + }, + { + "epoch": 0.050410684055426674, + "grad_norm": 2.2278010845184326, + "learning_rate": 1.9978228006780056e-05, + "loss": 1.4205, + "step": 804 + }, + { + "epoch": 0.050473383911217, + "grad_norm": 2.826495885848999, + "learning_rate": 1.9978093868171547e-05, + "loss": 1.3783, + "step": 805 + }, + { + "epoch": 0.050536083767007335, + "grad_norm": 2.6370222568511963, + "learning_rate": 1.997795931806601e-05, + "loss": 1.3419, + "step": 806 + }, + { + "epoch": 0.05059878362279767, + "grad_norm": 2.703270673751831, + "learning_rate": 1.9977824356468994e-05, + "loss": 1.5886, + "step": 807 + }, + { + "epoch": 0.050661483478587996, + "grad_norm": 2.6265194416046143, + "learning_rate": 1.9977688983386065e-05, + "loss": 1.4408, + "step": 808 + }, + { + "epoch": 0.05072418333437833, + "grad_norm": 2.5425236225128174, + "learning_rate": 1.997755319882281e-05, + "loss": 1.2765, + "step": 809 + }, + { + "epoch": 0.050786883190168663, + "grad_norm": 2.4533538818359375, + "learning_rate": 1.9977417002784823e-05, + "loss": 1.4806, + "step": 810 + }, + { + "epoch": 0.050849583045959, + "grad_norm": 2.359133243560791, + "learning_rate": 1.9977280395277728e-05, + "loss": 1.4826, + "step": 811 + }, + { + "epoch": 0.050912282901749324, + "grad_norm": 2.487460136413574, + "learning_rate": 1.997714337630715e-05, + "loss": 1.3498, + "step": 812 + }, + { + "epoch": 0.05097498275753966, + "grad_norm": 2.761617422103882, + "learning_rate": 1.9977005945878744e-05, + "loss": 1.2817, + "step": 813 + }, + { + "epoch": 0.05103768261332999, + "grad_norm": 2.2673192024230957, + "learning_rate": 1.9976868103998174e-05, + "loss": 1.2804, + "step": 814 + }, + { + "epoch": 0.05110038246912032, + "grad_norm": 2.4426419734954834, + "learning_rate": 1.997672985067113e-05, + "loss": 1.2699, + "step": 815 + }, + { + "epoch": 0.05116308232491065, + "grad_norm": 2.559535026550293, + "learning_rate": 1.997659118590331e-05, + "loss": 1.3096, + "step": 816 + }, + { + "epoch": 0.051225782180700986, + "grad_norm": 2.4309756755828857, + "learning_rate": 1.9976452109700436e-05, + "loss": 1.4722, + "step": 817 + }, + { + "epoch": 0.05128848203649131, + "grad_norm": 2.602914333343506, + "learning_rate": 1.997631262206824e-05, + "loss": 1.4361, + "step": 818 + }, + { + "epoch": 0.05135118189228165, + "grad_norm": 2.2998552322387695, + "learning_rate": 1.997617272301248e-05, + "loss": 1.4323, + "step": 819 + }, + { + "epoch": 0.05141388174807198, + "grad_norm": 2.5676658153533936, + "learning_rate": 1.9976032412538914e-05, + "loss": 1.3606, + "step": 820 + }, + { + "epoch": 0.05147658160386231, + "grad_norm": 2.7907283306121826, + "learning_rate": 1.997589169065334e-05, + "loss": 1.3767, + "step": 821 + }, + { + "epoch": 0.05153928145965264, + "grad_norm": 2.44295072555542, + "learning_rate": 1.9975750557361556e-05, + "loss": 1.3453, + "step": 822 + }, + { + "epoch": 0.051601981315442975, + "grad_norm": 2.397301197052002, + "learning_rate": 1.9975609012669387e-05, + "loss": 1.5276, + "step": 823 + }, + { + "epoch": 0.05166468117123331, + "grad_norm": 2.421433925628662, + "learning_rate": 1.997546705658266e-05, + "loss": 1.2473, + "step": 824 + }, + { + "epoch": 0.051727381027023636, + "grad_norm": 2.227354049682617, + "learning_rate": 1.997532468910724e-05, + "loss": 1.3483, + "step": 825 + }, + { + "epoch": 0.05179008088281397, + "grad_norm": 2.529763698577881, + "learning_rate": 1.9975181910248994e-05, + "loss": 1.3699, + "step": 826 + }, + { + "epoch": 0.051852780738604304, + "grad_norm": 2.544827699661255, + "learning_rate": 1.997503872001381e-05, + "loss": 1.4523, + "step": 827 + }, + { + "epoch": 0.05191548059439463, + "grad_norm": 2.835536003112793, + "learning_rate": 1.9974895118407593e-05, + "loss": 1.1587, + "step": 828 + }, + { + "epoch": 0.051978180450184965, + "grad_norm": 2.5233607292175293, + "learning_rate": 1.9974751105436266e-05, + "loss": 1.3468, + "step": 829 + }, + { + "epoch": 0.0520408803059753, + "grad_norm": 2.6870508193969727, + "learning_rate": 1.997460668110577e-05, + "loss": 1.3733, + "step": 830 + }, + { + "epoch": 0.052103580161765625, + "grad_norm": 2.5915393829345703, + "learning_rate": 1.9974461845422057e-05, + "loss": 1.3207, + "step": 831 + }, + { + "epoch": 0.05216628001755596, + "grad_norm": 2.618290901184082, + "learning_rate": 1.9974316598391098e-05, + "loss": 1.2816, + "step": 832 + }, + { + "epoch": 0.05222897987334629, + "grad_norm": 2.357779026031494, + "learning_rate": 1.9974170940018893e-05, + "loss": 1.2824, + "step": 833 + }, + { + "epoch": 0.05229167972913662, + "grad_norm": 2.351287841796875, + "learning_rate": 1.997402487031144e-05, + "loss": 1.3624, + "step": 834 + }, + { + "epoch": 0.052354379584926954, + "grad_norm": 2.6121628284454346, + "learning_rate": 1.997387838927477e-05, + "loss": 1.413, + "step": 835 + }, + { + "epoch": 0.05241707944071729, + "grad_norm": 2.628267288208008, + "learning_rate": 1.9973731496914914e-05, + "loss": 1.4037, + "step": 836 + }, + { + "epoch": 0.05247977929650762, + "grad_norm": 2.689682722091675, + "learning_rate": 1.9973584193237937e-05, + "loss": 1.3643, + "step": 837 + }, + { + "epoch": 0.05254247915229795, + "grad_norm": 2.615882396697998, + "learning_rate": 1.9973436478249912e-05, + "loss": 1.1925, + "step": 838 + }, + { + "epoch": 0.05260517900808828, + "grad_norm": 2.307039260864258, + "learning_rate": 1.9973288351956933e-05, + "loss": 1.4801, + "step": 839 + }, + { + "epoch": 0.052667878863878616, + "grad_norm": 2.3024814128875732, + "learning_rate": 1.9973139814365106e-05, + "loss": 1.448, + "step": 840 + }, + { + "epoch": 0.05273057871966894, + "grad_norm": 2.3254010677337646, + "learning_rate": 1.9972990865480555e-05, + "loss": 1.5266, + "step": 841 + }, + { + "epoch": 0.052793278575459276, + "grad_norm": 2.640583038330078, + "learning_rate": 1.997284150530943e-05, + "loss": 1.3436, + "step": 842 + }, + { + "epoch": 0.05285597843124961, + "grad_norm": 2.676729679107666, + "learning_rate": 1.997269173385788e-05, + "loss": 1.2707, + "step": 843 + }, + { + "epoch": 0.05291867828703994, + "grad_norm": 2.696059465408325, + "learning_rate": 1.9972541551132095e-05, + "loss": 1.3383, + "step": 844 + }, + { + "epoch": 0.05298137814283027, + "grad_norm": 2.322068691253662, + "learning_rate": 1.9972390957138253e-05, + "loss": 1.2994, + "step": 845 + }, + { + "epoch": 0.053044077998620605, + "grad_norm": 2.47586727142334, + "learning_rate": 1.9972239951882577e-05, + "loss": 1.2565, + "step": 846 + }, + { + "epoch": 0.05310677785441093, + "grad_norm": 2.460615396499634, + "learning_rate": 1.997208853537129e-05, + "loss": 1.2968, + "step": 847 + }, + { + "epoch": 0.053169477710201266, + "grad_norm": 2.7346270084381104, + "learning_rate": 1.9971936707610636e-05, + "loss": 1.4115, + "step": 848 + }, + { + "epoch": 0.0532321775659916, + "grad_norm": 2.476101875305176, + "learning_rate": 1.9971784468606877e-05, + "loss": 1.2766, + "step": 849 + }, + { + "epoch": 0.05329487742178193, + "grad_norm": 2.794489622116089, + "learning_rate": 1.9971631818366288e-05, + "loss": 1.3693, + "step": 850 + }, + { + "epoch": 0.05335757727757226, + "grad_norm": 2.8534739017486572, + "learning_rate": 1.997147875689517e-05, + "loss": 1.3378, + "step": 851 + }, + { + "epoch": 0.053420277133362594, + "grad_norm": 2.9984018802642822, + "learning_rate": 1.9971325284199833e-05, + "loss": 1.348, + "step": 852 + }, + { + "epoch": 0.05348297698915293, + "grad_norm": 2.9481723308563232, + "learning_rate": 1.9971171400286602e-05, + "loss": 1.2671, + "step": 853 + }, + { + "epoch": 0.053545676844943255, + "grad_norm": 2.307497024536133, + "learning_rate": 1.9971017105161833e-05, + "loss": 1.3072, + "step": 854 + }, + { + "epoch": 0.05360837670073359, + "grad_norm": 2.585742473602295, + "learning_rate": 1.9970862398831875e-05, + "loss": 1.1865, + "step": 855 + }, + { + "epoch": 0.05367107655652392, + "grad_norm": 2.6186087131500244, + "learning_rate": 1.9970707281303126e-05, + "loss": 1.0296, + "step": 856 + }, + { + "epoch": 0.05373377641231425, + "grad_norm": 2.90307879447937, + "learning_rate": 1.9970551752581964e-05, + "loss": 1.3072, + "step": 857 + }, + { + "epoch": 0.05379647626810458, + "grad_norm": 2.3025949001312256, + "learning_rate": 1.997039581267482e-05, + "loss": 1.5501, + "step": 858 + }, + { + "epoch": 0.05385917612389492, + "grad_norm": 2.794062614440918, + "learning_rate": 1.9970239461588115e-05, + "loss": 1.244, + "step": 859 + }, + { + "epoch": 0.053921875979685244, + "grad_norm": 2.6304771900177, + "learning_rate": 1.99700826993283e-05, + "loss": 1.3144, + "step": 860 + }, + { + "epoch": 0.05398457583547558, + "grad_norm": 2.4890964031219482, + "learning_rate": 1.9969925525901834e-05, + "loss": 1.1498, + "step": 861 + }, + { + "epoch": 0.05404727569126591, + "grad_norm": 1.9616060256958008, + "learning_rate": 1.996976794131521e-05, + "loss": 1.4049, + "step": 862 + }, + { + "epoch": 0.054109975547056245, + "grad_norm": 2.3030712604522705, + "learning_rate": 1.9969609945574914e-05, + "loss": 1.2757, + "step": 863 + }, + { + "epoch": 0.05417267540284657, + "grad_norm": 2.374952793121338, + "learning_rate": 1.9969451538687474e-05, + "loss": 1.4319, + "step": 864 + }, + { + "epoch": 0.054235375258636906, + "grad_norm": 2.285092353820801, + "learning_rate": 1.9969292720659416e-05, + "loss": 1.2622, + "step": 865 + }, + { + "epoch": 0.05429807511442724, + "grad_norm": 2.562075614929199, + "learning_rate": 1.996913349149729e-05, + "loss": 1.2253, + "step": 866 + }, + { + "epoch": 0.05436077497021757, + "grad_norm": 2.660186290740967, + "learning_rate": 1.9968973851207662e-05, + "loss": 1.3969, + "step": 867 + }, + { + "epoch": 0.0544234748260079, + "grad_norm": 2.4297733306884766, + "learning_rate": 1.996881379979712e-05, + "loss": 1.3791, + "step": 868 + }, + { + "epoch": 0.054486174681798234, + "grad_norm": 2.5998642444610596, + "learning_rate": 1.9968653337272262e-05, + "loss": 1.2967, + "step": 869 + }, + { + "epoch": 0.05454887453758856, + "grad_norm": 2.592989683151245, + "learning_rate": 1.9968492463639704e-05, + "loss": 1.3907, + "step": 870 + }, + { + "epoch": 0.054611574393378895, + "grad_norm": 3.010082721710205, + "learning_rate": 1.9968331178906082e-05, + "loss": 1.444, + "step": 871 + }, + { + "epoch": 0.05467427424916923, + "grad_norm": 2.5760438442230225, + "learning_rate": 1.9968169483078043e-05, + "loss": 1.4071, + "step": 872 + }, + { + "epoch": 0.054736974104959556, + "grad_norm": 3.113914966583252, + "learning_rate": 1.9968007376162265e-05, + "loss": 1.191, + "step": 873 + }, + { + "epoch": 0.05479967396074989, + "grad_norm": 2.5330982208251953, + "learning_rate": 1.9967844858165423e-05, + "loss": 1.4178, + "step": 874 + }, + { + "epoch": 0.05486237381654022, + "grad_norm": 2.8806002140045166, + "learning_rate": 1.9967681929094228e-05, + "loss": 1.2648, + "step": 875 + }, + { + "epoch": 0.05492507367233055, + "grad_norm": 2.7232344150543213, + "learning_rate": 1.996751858895539e-05, + "loss": 1.2239, + "step": 876 + }, + { + "epoch": 0.054987773528120884, + "grad_norm": 2.535862922668457, + "learning_rate": 1.996735483775565e-05, + "loss": 1.404, + "step": 877 + }, + { + "epoch": 0.05505047338391122, + "grad_norm": 2.466148614883423, + "learning_rate": 1.9967190675501766e-05, + "loss": 1.2565, + "step": 878 + }, + { + "epoch": 0.05511317323970155, + "grad_norm": 2.419408082962036, + "learning_rate": 1.9967026102200503e-05, + "loss": 1.4343, + "step": 879 + }, + { + "epoch": 0.05517587309549188, + "grad_norm": 2.497140645980835, + "learning_rate": 1.996686111785864e-05, + "loss": 1.3379, + "step": 880 + }, + { + "epoch": 0.05523857295128221, + "grad_norm": 2.681546449661255, + "learning_rate": 1.9966695722483e-05, + "loss": 1.3624, + "step": 881 + }, + { + "epoch": 0.055301272807072546, + "grad_norm": 2.4991652965545654, + "learning_rate": 1.996652991608039e-05, + "loss": 1.2844, + "step": 882 + }, + { + "epoch": 0.05536397266286287, + "grad_norm": 2.614464521408081, + "learning_rate": 1.996636369865765e-05, + "loss": 1.3426, + "step": 883 + }, + { + "epoch": 0.05542667251865321, + "grad_norm": 2.6509597301483154, + "learning_rate": 1.9966197070221634e-05, + "loss": 1.3885, + "step": 884 + }, + { + "epoch": 0.05548937237444354, + "grad_norm": 2.5003342628479004, + "learning_rate": 1.9966030030779216e-05, + "loss": 1.3511, + "step": 885 + }, + { + "epoch": 0.05555207223023387, + "grad_norm": 2.465104579925537, + "learning_rate": 1.9965862580337286e-05, + "loss": 1.4505, + "step": 886 + }, + { + "epoch": 0.0556147720860242, + "grad_norm": 2.297727346420288, + "learning_rate": 1.9965694718902745e-05, + "loss": 1.2247, + "step": 887 + }, + { + "epoch": 0.055677471941814535, + "grad_norm": 2.480191230773926, + "learning_rate": 1.996552644648252e-05, + "loss": 1.2516, + "step": 888 + }, + { + "epoch": 0.05574017179760486, + "grad_norm": 2.484630823135376, + "learning_rate": 1.9965357763083552e-05, + "loss": 1.3418, + "step": 889 + }, + { + "epoch": 0.055802871653395196, + "grad_norm": 2.4757261276245117, + "learning_rate": 1.9965188668712793e-05, + "loss": 1.3328, + "step": 890 + }, + { + "epoch": 0.05586557150918553, + "grad_norm": 2.599947929382324, + "learning_rate": 1.996501916337722e-05, + "loss": 1.3596, + "step": 891 + }, + { + "epoch": 0.055928271364975864, + "grad_norm": 2.4501922130584717, + "learning_rate": 1.9964849247083817e-05, + "loss": 1.3062, + "step": 892 + }, + { + "epoch": 0.05599097122076619, + "grad_norm": 2.725106716156006, + "learning_rate": 1.99646789198396e-05, + "loss": 1.3807, + "step": 893 + }, + { + "epoch": 0.056053671076556524, + "grad_norm": 2.766155242919922, + "learning_rate": 1.9964508181651586e-05, + "loss": 1.3833, + "step": 894 + }, + { + "epoch": 0.05611637093234686, + "grad_norm": 3.011690139770508, + "learning_rate": 1.996433703252682e-05, + "loss": 1.2742, + "step": 895 + }, + { + "epoch": 0.056179070788137185, + "grad_norm": 2.642080307006836, + "learning_rate": 1.9964165472472364e-05, + "loss": 1.2465, + "step": 896 + }, + { + "epoch": 0.05624177064392752, + "grad_norm": 3.0767345428466797, + "learning_rate": 1.9963993501495284e-05, + "loss": 1.2312, + "step": 897 + }, + { + "epoch": 0.05630447049971785, + "grad_norm": 2.5653083324432373, + "learning_rate": 1.996382111960268e-05, + "loss": 1.2068, + "step": 898 + }, + { + "epoch": 0.05636717035550818, + "grad_norm": 2.733785629272461, + "learning_rate": 1.9963648326801653e-05, + "loss": 1.2485, + "step": 899 + }, + { + "epoch": 0.05642987021129851, + "grad_norm": 2.585791826248169, + "learning_rate": 1.996347512309934e-05, + "loss": 1.2287, + "step": 900 + }, + { + "epoch": 0.05649257006708885, + "grad_norm": 2.8484606742858887, + "learning_rate": 1.9963301508502876e-05, + "loss": 1.4082, + "step": 901 + }, + { + "epoch": 0.056555269922879174, + "grad_norm": 2.7703535556793213, + "learning_rate": 1.996312748301942e-05, + "loss": 1.2625, + "step": 902 + }, + { + "epoch": 0.05661796977866951, + "grad_norm": 2.5547525882720947, + "learning_rate": 1.996295304665615e-05, + "loss": 1.1756, + "step": 903 + }, + { + "epoch": 0.05668066963445984, + "grad_norm": 2.646908760070801, + "learning_rate": 1.9962778199420265e-05, + "loss": 1.3052, + "step": 904 + }, + { + "epoch": 0.056743369490250176, + "grad_norm": 2.6104736328125, + "learning_rate": 1.9962602941318973e-05, + "loss": 1.2628, + "step": 905 + }, + { + "epoch": 0.0568060693460405, + "grad_norm": 2.601595878601074, + "learning_rate": 1.9962427272359498e-05, + "loss": 1.3578, + "step": 906 + }, + { + "epoch": 0.056868769201830836, + "grad_norm": 2.540867567062378, + "learning_rate": 1.9962251192549088e-05, + "loss": 1.2037, + "step": 907 + }, + { + "epoch": 0.05693146905762117, + "grad_norm": 2.836087226867676, + "learning_rate": 1.9962074701895006e-05, + "loss": 1.2038, + "step": 908 + }, + { + "epoch": 0.0569941689134115, + "grad_norm": 2.5596938133239746, + "learning_rate": 1.9961897800404527e-05, + "loss": 1.3949, + "step": 909 + }, + { + "epoch": 0.05705686876920183, + "grad_norm": 2.6723711490631104, + "learning_rate": 1.9961720488084945e-05, + "loss": 1.4159, + "step": 910 + }, + { + "epoch": 0.057119568624992165, + "grad_norm": 2.9663219451904297, + "learning_rate": 1.9961542764943576e-05, + "loss": 1.2353, + "step": 911 + }, + { + "epoch": 0.05718226848078249, + "grad_norm": 2.5667338371276855, + "learning_rate": 1.996136463098775e-05, + "loss": 1.3429, + "step": 912 + }, + { + "epoch": 0.057244968336572825, + "grad_norm": 2.4332594871520996, + "learning_rate": 1.996118608622481e-05, + "loss": 1.2953, + "step": 913 + }, + { + "epoch": 0.05730766819236316, + "grad_norm": 2.665006637573242, + "learning_rate": 1.9961007130662123e-05, + "loss": 1.1688, + "step": 914 + }, + { + "epoch": 0.057370368048153486, + "grad_norm": 2.331287384033203, + "learning_rate": 1.9960827764307065e-05, + "loss": 1.2175, + "step": 915 + }, + { + "epoch": 0.05743306790394382, + "grad_norm": 2.5570809841156006, + "learning_rate": 1.9960647987167035e-05, + "loss": 1.3983, + "step": 916 + }, + { + "epoch": 0.057495767759734154, + "grad_norm": 2.430894374847412, + "learning_rate": 1.9960467799249446e-05, + "loss": 1.3961, + "step": 917 + }, + { + "epoch": 0.05755846761552449, + "grad_norm": 2.779379367828369, + "learning_rate": 1.9960287200561736e-05, + "loss": 1.0504, + "step": 918 + }, + { + "epoch": 0.057621167471314814, + "grad_norm": 2.440476417541504, + "learning_rate": 1.996010619111134e-05, + "loss": 1.3213, + "step": 919 + }, + { + "epoch": 0.05768386732710515, + "grad_norm": 2.956186532974243, + "learning_rate": 1.9959924770905733e-05, + "loss": 1.4191, + "step": 920 + }, + { + "epoch": 0.05774656718289548, + "grad_norm": 2.8288896083831787, + "learning_rate": 1.9959742939952393e-05, + "loss": 1.418, + "step": 921 + }, + { + "epoch": 0.05780926703868581, + "grad_norm": 2.525137186050415, + "learning_rate": 1.995956069825882e-05, + "loss": 1.271, + "step": 922 + }, + { + "epoch": 0.05787196689447614, + "grad_norm": 2.7856364250183105, + "learning_rate": 1.9959378045832526e-05, + "loss": 1.276, + "step": 923 + }, + { + "epoch": 0.05793466675026648, + "grad_norm": 2.193737030029297, + "learning_rate": 1.995919498268105e-05, + "loss": 1.4654, + "step": 924 + }, + { + "epoch": 0.057997366606056804, + "grad_norm": 2.505089044570923, + "learning_rate": 1.9959011508811932e-05, + "loss": 1.4424, + "step": 925 + }, + { + "epoch": 0.05806006646184714, + "grad_norm": 2.6274335384368896, + "learning_rate": 1.995882762423275e-05, + "loss": 1.3512, + "step": 926 + }, + { + "epoch": 0.05812276631763747, + "grad_norm": 2.4132328033447266, + "learning_rate": 1.9958643328951083e-05, + "loss": 1.2621, + "step": 927 + }, + { + "epoch": 0.0581854661734278, + "grad_norm": 2.7425177097320557, + "learning_rate": 1.9958458622974525e-05, + "loss": 1.222, + "step": 928 + }, + { + "epoch": 0.05824816602921813, + "grad_norm": 2.9089114665985107, + "learning_rate": 1.9958273506310703e-05, + "loss": 1.2491, + "step": 929 + }, + { + "epoch": 0.058310865885008466, + "grad_norm": 2.422116756439209, + "learning_rate": 1.9958087978967245e-05, + "loss": 1.153, + "step": 930 + }, + { + "epoch": 0.0583735657407988, + "grad_norm": 2.225642442703247, + "learning_rate": 1.9957902040951803e-05, + "loss": 1.3609, + "step": 931 + }, + { + "epoch": 0.058436265596589126, + "grad_norm": 2.567652463912964, + "learning_rate": 1.9957715692272047e-05, + "loss": 1.5107, + "step": 932 + }, + { + "epoch": 0.05849896545237946, + "grad_norm": 2.788205862045288, + "learning_rate": 1.995752893293566e-05, + "loss": 1.2893, + "step": 933 + }, + { + "epoch": 0.058561665308169794, + "grad_norm": 2.824765682220459, + "learning_rate": 1.9957341762950346e-05, + "loss": 1.2986, + "step": 934 + }, + { + "epoch": 0.05862436516396012, + "grad_norm": 2.879718065261841, + "learning_rate": 1.9957154182323825e-05, + "loss": 1.3037, + "step": 935 + }, + { + "epoch": 0.058687065019750455, + "grad_norm": 2.7131617069244385, + "learning_rate": 1.9956966191063825e-05, + "loss": 1.2179, + "step": 936 + }, + { + "epoch": 0.05874976487554079, + "grad_norm": 2.2003540992736816, + "learning_rate": 1.995677778917811e-05, + "loss": 1.3354, + "step": 937 + }, + { + "epoch": 0.058812464731331116, + "grad_norm": 2.7957353591918945, + "learning_rate": 1.9956588976674442e-05, + "loss": 1.2338, + "step": 938 + }, + { + "epoch": 0.05887516458712145, + "grad_norm": 2.677849054336548, + "learning_rate": 1.995639975356061e-05, + "loss": 1.1815, + "step": 939 + }, + { + "epoch": 0.05893786444291178, + "grad_norm": 2.6023590564727783, + "learning_rate": 1.9956210119844416e-05, + "loss": 1.152, + "step": 940 + }, + { + "epoch": 0.05900056429870211, + "grad_norm": 2.4914019107818604, + "learning_rate": 1.9956020075533683e-05, + "loss": 1.2309, + "step": 941 + }, + { + "epoch": 0.059063264154492444, + "grad_norm": 2.614649534225464, + "learning_rate": 1.9955829620636245e-05, + "loss": 1.3295, + "step": 942 + }, + { + "epoch": 0.05912596401028278, + "grad_norm": 2.553225517272949, + "learning_rate": 1.9955638755159966e-05, + "loss": 1.3329, + "step": 943 + }, + { + "epoch": 0.059188663866073105, + "grad_norm": 2.8196825981140137, + "learning_rate": 1.99554474791127e-05, + "loss": 1.2634, + "step": 944 + }, + { + "epoch": 0.05925136372186344, + "grad_norm": 2.3634018898010254, + "learning_rate": 1.9955255792502354e-05, + "loss": 1.3548, + "step": 945 + }, + { + "epoch": 0.05931406357765377, + "grad_norm": 2.565140724182129, + "learning_rate": 1.9955063695336817e-05, + "loss": 1.3733, + "step": 946 + }, + { + "epoch": 0.059376763433444106, + "grad_norm": 2.4933676719665527, + "learning_rate": 1.9954871187624027e-05, + "loss": 1.3481, + "step": 947 + }, + { + "epoch": 0.05943946328923443, + "grad_norm": 2.940674304962158, + "learning_rate": 1.9954678269371904e-05, + "loss": 1.2445, + "step": 948 + }, + { + "epoch": 0.05950216314502477, + "grad_norm": 2.410270929336548, + "learning_rate": 1.9954484940588424e-05, + "loss": 1.4497, + "step": 949 + }, + { + "epoch": 0.0595648630008151, + "grad_norm": 2.831864833831787, + "learning_rate": 1.9954291201281542e-05, + "loss": 1.4629, + "step": 950 + }, + { + "epoch": 0.05962756285660543, + "grad_norm": 2.4343385696411133, + "learning_rate": 1.9954097051459262e-05, + "loss": 1.24, + "step": 951 + }, + { + "epoch": 0.05969026271239576, + "grad_norm": 2.530163049697876, + "learning_rate": 1.9953902491129585e-05, + "loss": 1.353, + "step": 952 + }, + { + "epoch": 0.059752962568186095, + "grad_norm": 2.745349168777466, + "learning_rate": 1.9953707520300533e-05, + "loss": 1.3173, + "step": 953 + }, + { + "epoch": 0.05981566242397642, + "grad_norm": 2.5273821353912354, + "learning_rate": 1.995351213898015e-05, + "loss": 1.4804, + "step": 954 + }, + { + "epoch": 0.059878362279766756, + "grad_norm": 2.572007656097412, + "learning_rate": 1.995331634717649e-05, + "loss": 1.1834, + "step": 955 + }, + { + "epoch": 0.05994106213555709, + "grad_norm": 2.782041311264038, + "learning_rate": 1.9953120144897625e-05, + "loss": 1.1754, + "step": 956 + }, + { + "epoch": 0.06000376199134742, + "grad_norm": 2.6412558555603027, + "learning_rate": 1.9952923532151657e-05, + "loss": 1.2161, + "step": 957 + }, + { + "epoch": 0.06006646184713775, + "grad_norm": 2.2318115234375, + "learning_rate": 1.9952726508946686e-05, + "loss": 1.4401, + "step": 958 + }, + { + "epoch": 0.060129161702928084, + "grad_norm": 2.439096212387085, + "learning_rate": 1.995252907529084e-05, + "loss": 1.3489, + "step": 959 + }, + { + "epoch": 0.06019186155871842, + "grad_norm": 2.8339836597442627, + "learning_rate": 1.9952331231192256e-05, + "loss": 1.3381, + "step": 960 + }, + { + "epoch": 0.060254561414508745, + "grad_norm": 2.467017650604248, + "learning_rate": 1.99521329766591e-05, + "loss": 1.388, + "step": 961 + }, + { + "epoch": 0.06031726127029908, + "grad_norm": 2.709259510040283, + "learning_rate": 1.9951934311699547e-05, + "loss": 1.2505, + "step": 962 + }, + { + "epoch": 0.06037996112608941, + "grad_norm": 2.5285723209381104, + "learning_rate": 1.9951735236321786e-05, + "loss": 1.1143, + "step": 963 + }, + { + "epoch": 0.06044266098187974, + "grad_norm": 2.967592239379883, + "learning_rate": 1.995153575053403e-05, + "loss": 1.2401, + "step": 964 + }, + { + "epoch": 0.06050536083767007, + "grad_norm": 2.5401153564453125, + "learning_rate": 1.9951335854344507e-05, + "loss": 1.3473, + "step": 965 + }, + { + "epoch": 0.06056806069346041, + "grad_norm": 2.588465452194214, + "learning_rate": 1.9951135547761457e-05, + "loss": 1.1861, + "step": 966 + }, + { + "epoch": 0.060630760549250734, + "grad_norm": 2.755138635635376, + "learning_rate": 1.9950934830793142e-05, + "loss": 1.1292, + "step": 967 + }, + { + "epoch": 0.06069346040504107, + "grad_norm": 2.707343339920044, + "learning_rate": 1.995073370344784e-05, + "loss": 1.2355, + "step": 968 + }, + { + "epoch": 0.0607561602608314, + "grad_norm": 2.853106737136841, + "learning_rate": 1.9950532165733847e-05, + "loss": 1.1657, + "step": 969 + }, + { + "epoch": 0.06081886011662173, + "grad_norm": 2.669879198074341, + "learning_rate": 1.995033021765947e-05, + "loss": 1.3195, + "step": 970 + }, + { + "epoch": 0.06088155997241206, + "grad_norm": 2.8072361946105957, + "learning_rate": 1.9950127859233045e-05, + "loss": 1.1507, + "step": 971 + }, + { + "epoch": 0.060944259828202396, + "grad_norm": 2.7528889179229736, + "learning_rate": 1.994992509046291e-05, + "loss": 1.3536, + "step": 972 + }, + { + "epoch": 0.06100695968399273, + "grad_norm": 2.5252506732940674, + "learning_rate": 1.994972191135743e-05, + "loss": 1.3348, + "step": 973 + }, + { + "epoch": 0.06106965953978306, + "grad_norm": 2.601257801055908, + "learning_rate": 1.9949518321924984e-05, + "loss": 1.3089, + "step": 974 + }, + { + "epoch": 0.06113235939557339, + "grad_norm": 2.6625735759735107, + "learning_rate": 1.9949314322173966e-05, + "loss": 1.1837, + "step": 975 + }, + { + "epoch": 0.061195059251363725, + "grad_norm": 2.510457754135132, + "learning_rate": 1.9949109912112796e-05, + "loss": 1.2546, + "step": 976 + }, + { + "epoch": 0.06125775910715405, + "grad_norm": 2.6652119159698486, + "learning_rate": 1.9948905091749892e-05, + "loss": 1.2889, + "step": 977 + }, + { + "epoch": 0.061320458962944385, + "grad_norm": 2.5552403926849365, + "learning_rate": 1.9948699861093713e-05, + "loss": 1.2951, + "step": 978 + }, + { + "epoch": 0.06138315881873472, + "grad_norm": 2.446385383605957, + "learning_rate": 1.9948494220152714e-05, + "loss": 1.3208, + "step": 979 + }, + { + "epoch": 0.061445858674525046, + "grad_norm": 2.5162973403930664, + "learning_rate": 1.994828816893538e-05, + "loss": 1.3285, + "step": 980 + }, + { + "epoch": 0.06150855853031538, + "grad_norm": 2.7106072902679443, + "learning_rate": 1.9948081707450206e-05, + "loss": 1.344, + "step": 981 + }, + { + "epoch": 0.061571258386105714, + "grad_norm": 2.435622453689575, + "learning_rate": 1.994787483570571e-05, + "loss": 1.3533, + "step": 982 + }, + { + "epoch": 0.06163395824189604, + "grad_norm": 2.5035622119903564, + "learning_rate": 1.994766755371042e-05, + "loss": 1.378, + "step": 983 + }, + { + "epoch": 0.061696658097686374, + "grad_norm": 2.1993558406829834, + "learning_rate": 1.9947459861472887e-05, + "loss": 1.3226, + "step": 984 + }, + { + "epoch": 0.06175935795347671, + "grad_norm": 2.6636440753936768, + "learning_rate": 1.9947251759001674e-05, + "loss": 1.2257, + "step": 985 + }, + { + "epoch": 0.06182205780926704, + "grad_norm": 2.4166393280029297, + "learning_rate": 1.9947043246305365e-05, + "loss": 1.3238, + "step": 986 + }, + { + "epoch": 0.06188475766505737, + "grad_norm": 2.7195870876312256, + "learning_rate": 1.9946834323392558e-05, + "loss": 1.1271, + "step": 987 + }, + { + "epoch": 0.0619474575208477, + "grad_norm": 2.3717780113220215, + "learning_rate": 1.994662499027187e-05, + "loss": 1.1955, + "step": 988 + }, + { + "epoch": 0.062010157376638037, + "grad_norm": 2.662379503250122, + "learning_rate": 1.9946415246951928e-05, + "loss": 1.3203, + "step": 989 + }, + { + "epoch": 0.06207285723242836, + "grad_norm": 2.739652633666992, + "learning_rate": 1.994620509344139e-05, + "loss": 1.4265, + "step": 990 + }, + { + "epoch": 0.0621355570882187, + "grad_norm": 2.4186697006225586, + "learning_rate": 1.994599452974892e-05, + "loss": 1.2864, + "step": 991 + }, + { + "epoch": 0.06219825694400903, + "grad_norm": 2.4762825965881348, + "learning_rate": 1.99457835558832e-05, + "loss": 1.1769, + "step": 992 + }, + { + "epoch": 0.06226095679979936, + "grad_norm": 2.6037216186523438, + "learning_rate": 1.9945572171852933e-05, + "loss": 1.2387, + "step": 993 + }, + { + "epoch": 0.06232365665558969, + "grad_norm": 2.890687942504883, + "learning_rate": 1.994536037766684e-05, + "loss": 1.0989, + "step": 994 + }, + { + "epoch": 0.062386356511380026, + "grad_norm": 2.815784215927124, + "learning_rate": 1.9945148173333645e-05, + "loss": 1.2452, + "step": 995 + }, + { + "epoch": 0.06244905636717035, + "grad_norm": 3.164156436920166, + "learning_rate": 1.9944935558862105e-05, + "loss": 1.1564, + "step": 996 + }, + { + "epoch": 0.0625117562229607, + "grad_norm": 2.7624754905700684, + "learning_rate": 1.994472253426099e-05, + "loss": 1.2815, + "step": 997 + }, + { + "epoch": 0.06257445607875102, + "grad_norm": 2.800910472869873, + "learning_rate": 1.9944509099539083e-05, + "loss": 1.1211, + "step": 998 + }, + { + "epoch": 0.06263715593454135, + "grad_norm": 2.657743453979492, + "learning_rate": 1.9944295254705187e-05, + "loss": 1.5254, + "step": 999 + }, + { + "epoch": 0.06269985579033169, + "grad_norm": 2.8070170879364014, + "learning_rate": 1.994408099976812e-05, + "loss": 1.259, + "step": 1000 + }, + { + "epoch": 0.06269985579033169, + "eval_loss": 1.3050593137741089, + "eval_runtime": 144.2757, + "eval_samples_per_second": 4.367, + "eval_steps_per_second": 1.095, + "step": 1000 + }, + { + "epoch": 0.06276255564612201, + "grad_norm": 2.4529707431793213, + "learning_rate": 1.9943866334736715e-05, + "loss": 1.1678, + "step": 1001 + }, + { + "epoch": 0.06282525550191234, + "grad_norm": 2.3521525859832764, + "learning_rate": 1.9943651259619833e-05, + "loss": 1.3993, + "step": 1002 + }, + { + "epoch": 0.06288795535770268, + "grad_norm": 2.3541526794433594, + "learning_rate": 1.9943435774426335e-05, + "loss": 1.3972, + "step": 1003 + }, + { + "epoch": 0.06295065521349301, + "grad_norm": 2.451450824737549, + "learning_rate": 1.9943219879165113e-05, + "loss": 1.2488, + "step": 1004 + }, + { + "epoch": 0.06301335506928334, + "grad_norm": 2.5069398880004883, + "learning_rate": 1.994300357384507e-05, + "loss": 1.2896, + "step": 1005 + }, + { + "epoch": 0.06307605492507368, + "grad_norm": 2.603461742401123, + "learning_rate": 1.9942786858475126e-05, + "loss": 1.3972, + "step": 1006 + }, + { + "epoch": 0.063138754780864, + "grad_norm": 2.504265308380127, + "learning_rate": 1.9942569733064217e-05, + "loss": 1.1633, + "step": 1007 + }, + { + "epoch": 0.06320145463665433, + "grad_norm": 2.534980297088623, + "learning_rate": 1.9942352197621297e-05, + "loss": 1.2823, + "step": 1008 + }, + { + "epoch": 0.06326415449244467, + "grad_norm": 2.62241530418396, + "learning_rate": 1.994213425215534e-05, + "loss": 1.3481, + "step": 1009 + }, + { + "epoch": 0.063326854348235, + "grad_norm": 2.7118077278137207, + "learning_rate": 1.994191589667533e-05, + "loss": 1.3524, + "step": 1010 + }, + { + "epoch": 0.06338955420402533, + "grad_norm": 2.79935359954834, + "learning_rate": 1.9941697131190273e-05, + "loss": 1.2099, + "step": 1011 + }, + { + "epoch": 0.06345225405981567, + "grad_norm": 2.7085633277893066, + "learning_rate": 1.9941477955709195e-05, + "loss": 1.2793, + "step": 1012 + }, + { + "epoch": 0.06351495391560599, + "grad_norm": 2.607023239135742, + "learning_rate": 1.9941258370241134e-05, + "loss": 1.2207, + "step": 1013 + }, + { + "epoch": 0.06357765377139632, + "grad_norm": 2.4730610847473145, + "learning_rate": 1.994103837479514e-05, + "loss": 1.2459, + "step": 1014 + }, + { + "epoch": 0.06364035362718666, + "grad_norm": 2.6164045333862305, + "learning_rate": 1.994081796938029e-05, + "loss": 1.1142, + "step": 1015 + }, + { + "epoch": 0.06370305348297699, + "grad_norm": 2.763126850128174, + "learning_rate": 1.9940597154005672e-05, + "loss": 1.3695, + "step": 1016 + }, + { + "epoch": 0.06376575333876731, + "grad_norm": 2.808368444442749, + "learning_rate": 1.9940375928680392e-05, + "loss": 1.3211, + "step": 1017 + }, + { + "epoch": 0.06382845319455765, + "grad_norm": 2.9052734375, + "learning_rate": 1.994015429341358e-05, + "loss": 1.374, + "step": 1018 + }, + { + "epoch": 0.06389115305034798, + "grad_norm": 2.6592564582824707, + "learning_rate": 1.993993224821437e-05, + "loss": 1.2116, + "step": 1019 + }, + { + "epoch": 0.06395385290613832, + "grad_norm": 2.5633091926574707, + "learning_rate": 1.9939709793091913e-05, + "loss": 1.4597, + "step": 1020 + }, + { + "epoch": 0.06401655276192865, + "grad_norm": 2.629833698272705, + "learning_rate": 1.99394869280554e-05, + "loss": 1.3876, + "step": 1021 + }, + { + "epoch": 0.06407925261771898, + "grad_norm": 2.914766788482666, + "learning_rate": 1.9939263653114006e-05, + "loss": 1.3882, + "step": 1022 + }, + { + "epoch": 0.06414195247350932, + "grad_norm": 2.5606493949890137, + "learning_rate": 1.9939039968276942e-05, + "loss": 1.3549, + "step": 1023 + }, + { + "epoch": 0.06420465232929964, + "grad_norm": 2.4540116786956787, + "learning_rate": 1.993881587355344e-05, + "loss": 1.2577, + "step": 1024 + }, + { + "epoch": 0.06426735218508997, + "grad_norm": 2.5270473957061768, + "learning_rate": 1.993859136895274e-05, + "loss": 1.273, + "step": 1025 + }, + { + "epoch": 0.06433005204088031, + "grad_norm": 2.767155885696411, + "learning_rate": 1.9938366454484092e-05, + "loss": 1.2472, + "step": 1026 + }, + { + "epoch": 0.06439275189667064, + "grad_norm": 2.527635335922241, + "learning_rate": 1.9938141130156784e-05, + "loss": 1.2339, + "step": 1027 + }, + { + "epoch": 0.06445545175246097, + "grad_norm": 2.8579154014587402, + "learning_rate": 1.9937915395980095e-05, + "loss": 1.3892, + "step": 1028 + }, + { + "epoch": 0.0645181516082513, + "grad_norm": 2.5441009998321533, + "learning_rate": 1.9937689251963347e-05, + "loss": 1.2711, + "step": 1029 + }, + { + "epoch": 0.06458085146404163, + "grad_norm": 2.5281691551208496, + "learning_rate": 1.9937462698115854e-05, + "loss": 1.231, + "step": 1030 + }, + { + "epoch": 0.06464355131983196, + "grad_norm": 2.60624361038208, + "learning_rate": 1.993723573444697e-05, + "loss": 1.27, + "step": 1031 + }, + { + "epoch": 0.0647062511756223, + "grad_norm": 2.4841368198394775, + "learning_rate": 1.993700836096605e-05, + "loss": 1.2816, + "step": 1032 + }, + { + "epoch": 0.06476895103141263, + "grad_norm": 2.701402187347412, + "learning_rate": 1.9936780577682468e-05, + "loss": 1.3391, + "step": 1033 + }, + { + "epoch": 0.06483165088720295, + "grad_norm": 2.481954336166382, + "learning_rate": 1.9936552384605627e-05, + "loss": 1.0809, + "step": 1034 + }, + { + "epoch": 0.0648943507429933, + "grad_norm": 3.067180871963501, + "learning_rate": 1.9936323781744925e-05, + "loss": 1.2385, + "step": 1035 + }, + { + "epoch": 0.06495705059878362, + "grad_norm": 3.0508737564086914, + "learning_rate": 1.99360947691098e-05, + "loss": 1.1502, + "step": 1036 + }, + { + "epoch": 0.06501975045457395, + "grad_norm": 2.9366233348846436, + "learning_rate": 1.9935865346709692e-05, + "loss": 1.2119, + "step": 1037 + }, + { + "epoch": 0.06508245031036429, + "grad_norm": 2.364657402038574, + "learning_rate": 1.9935635514554062e-05, + "loss": 1.2332, + "step": 1038 + }, + { + "epoch": 0.06514515016615462, + "grad_norm": 2.402892589569092, + "learning_rate": 1.993540527265239e-05, + "loss": 1.3388, + "step": 1039 + }, + { + "epoch": 0.06520785002194494, + "grad_norm": 2.293461322784424, + "learning_rate": 1.9935174621014173e-05, + "loss": 1.3554, + "step": 1040 + }, + { + "epoch": 0.06527054987773528, + "grad_norm": 2.6049554347991943, + "learning_rate": 1.993494355964892e-05, + "loss": 1.1394, + "step": 1041 + }, + { + "epoch": 0.06533324973352561, + "grad_norm": 3.0106053352355957, + "learning_rate": 1.9934712088566158e-05, + "loss": 1.2337, + "step": 1042 + }, + { + "epoch": 0.06539594958931594, + "grad_norm": 2.738032341003418, + "learning_rate": 1.9934480207775436e-05, + "loss": 1.3038, + "step": 1043 + }, + { + "epoch": 0.06545864944510628, + "grad_norm": 2.5054397583007812, + "learning_rate": 1.9934247917286322e-05, + "loss": 1.2016, + "step": 1044 + }, + { + "epoch": 0.0655213493008966, + "grad_norm": 2.5377964973449707, + "learning_rate": 1.9934015217108385e-05, + "loss": 1.2735, + "step": 1045 + }, + { + "epoch": 0.06558404915668693, + "grad_norm": 2.4546477794647217, + "learning_rate": 1.9933782107251227e-05, + "loss": 1.3288, + "step": 1046 + }, + { + "epoch": 0.06564674901247727, + "grad_norm": 2.5963754653930664, + "learning_rate": 1.9933548587724462e-05, + "loss": 1.3612, + "step": 1047 + }, + { + "epoch": 0.0657094488682676, + "grad_norm": 2.751113176345825, + "learning_rate": 1.993331465853772e-05, + "loss": 1.3391, + "step": 1048 + }, + { + "epoch": 0.06577214872405794, + "grad_norm": 2.797670841217041, + "learning_rate": 1.993308031970065e-05, + "loss": 1.3133, + "step": 1049 + }, + { + "epoch": 0.06583484857984827, + "grad_norm": 2.4644668102264404, + "learning_rate": 1.993284557122291e-05, + "loss": 1.269, + "step": 1050 + }, + { + "epoch": 0.0658975484356386, + "grad_norm": 2.4036829471588135, + "learning_rate": 1.9932610413114184e-05, + "loss": 1.2286, + "step": 1051 + }, + { + "epoch": 0.06596024829142894, + "grad_norm": 2.661212682723999, + "learning_rate": 1.9932374845384172e-05, + "loss": 1.2197, + "step": 1052 + }, + { + "epoch": 0.06602294814721926, + "grad_norm": 2.565155267715454, + "learning_rate": 1.993213886804259e-05, + "loss": 1.1135, + "step": 1053 + }, + { + "epoch": 0.06608564800300959, + "grad_norm": 2.7220401763916016, + "learning_rate": 1.9931902481099163e-05, + "loss": 1.3054, + "step": 1054 + }, + { + "epoch": 0.06614834785879993, + "grad_norm": 2.8867852687835693, + "learning_rate": 1.9931665684563648e-05, + "loss": 1.3326, + "step": 1055 + }, + { + "epoch": 0.06621104771459026, + "grad_norm": 2.7576324939727783, + "learning_rate": 1.99314284784458e-05, + "loss": 1.3595, + "step": 1056 + }, + { + "epoch": 0.06627374757038058, + "grad_norm": 2.6046857833862305, + "learning_rate": 1.9931190862755416e-05, + "loss": 1.1293, + "step": 1057 + }, + { + "epoch": 0.06633644742617092, + "grad_norm": 2.6726512908935547, + "learning_rate": 1.9930952837502283e-05, + "loss": 1.3501, + "step": 1058 + }, + { + "epoch": 0.06639914728196125, + "grad_norm": 2.3795716762542725, + "learning_rate": 1.993071440269622e-05, + "loss": 1.3966, + "step": 1059 + }, + { + "epoch": 0.06646184713775158, + "grad_norm": 2.448014259338379, + "learning_rate": 1.9930475558347066e-05, + "loss": 1.3677, + "step": 1060 + }, + { + "epoch": 0.06652454699354192, + "grad_norm": 2.9704740047454834, + "learning_rate": 1.9930236304464664e-05, + "loss": 1.2107, + "step": 1061 + }, + { + "epoch": 0.06658724684933225, + "grad_norm": 2.7938365936279297, + "learning_rate": 1.9929996641058885e-05, + "loss": 1.203, + "step": 1062 + }, + { + "epoch": 0.06664994670512257, + "grad_norm": 2.5977535247802734, + "learning_rate": 1.9929756568139607e-05, + "loss": 1.3679, + "step": 1063 + }, + { + "epoch": 0.06671264656091291, + "grad_norm": 2.634629011154175, + "learning_rate": 1.9929516085716736e-05, + "loss": 1.4971, + "step": 1064 + }, + { + "epoch": 0.06677534641670324, + "grad_norm": 2.4875330924987793, + "learning_rate": 1.9929275193800187e-05, + "loss": 1.34, + "step": 1065 + }, + { + "epoch": 0.06683804627249357, + "grad_norm": 2.424628973007202, + "learning_rate": 1.99290338923999e-05, + "loss": 1.5288, + "step": 1066 + }, + { + "epoch": 0.06690074612828391, + "grad_norm": 2.489971160888672, + "learning_rate": 1.9928792181525818e-05, + "loss": 1.4101, + "step": 1067 + }, + { + "epoch": 0.06696344598407424, + "grad_norm": 2.657792568206787, + "learning_rate": 1.992855006118791e-05, + "loss": 1.4398, + "step": 1068 + }, + { + "epoch": 0.06702614583986456, + "grad_norm": 2.4586451053619385, + "learning_rate": 1.9928307531396168e-05, + "loss": 1.4141, + "step": 1069 + }, + { + "epoch": 0.0670888456956549, + "grad_norm": 2.6875617504119873, + "learning_rate": 1.992806459216059e-05, + "loss": 1.2872, + "step": 1070 + }, + { + "epoch": 0.06715154555144523, + "grad_norm": 3.0958075523376465, + "learning_rate": 1.992782124349119e-05, + "loss": 1.2444, + "step": 1071 + }, + { + "epoch": 0.06721424540723556, + "grad_norm": 2.5168449878692627, + "learning_rate": 1.9927577485398017e-05, + "loss": 1.2986, + "step": 1072 + }, + { + "epoch": 0.0672769452630259, + "grad_norm": 2.643244743347168, + "learning_rate": 1.9927333317891108e-05, + "loss": 1.2287, + "step": 1073 + }, + { + "epoch": 0.06733964511881622, + "grad_norm": 2.3760416507720947, + "learning_rate": 1.992708874098054e-05, + "loss": 1.237, + "step": 1074 + }, + { + "epoch": 0.06740234497460657, + "grad_norm": 2.8958091735839844, + "learning_rate": 1.9926843754676403e-05, + "loss": 1.1052, + "step": 1075 + }, + { + "epoch": 0.06746504483039689, + "grad_norm": 3.0938808917999268, + "learning_rate": 1.9926598358988793e-05, + "loss": 1.3348, + "step": 1076 + }, + { + "epoch": 0.06752774468618722, + "grad_norm": 2.460859775543213, + "learning_rate": 1.9926352553927833e-05, + "loss": 1.3898, + "step": 1077 + }, + { + "epoch": 0.06759044454197756, + "grad_norm": 2.650350332260132, + "learning_rate": 1.9926106339503662e-05, + "loss": 1.155, + "step": 1078 + }, + { + "epoch": 0.06765314439776789, + "grad_norm": 2.6144907474517822, + "learning_rate": 1.992585971572643e-05, + "loss": 1.3277, + "step": 1079 + }, + { + "epoch": 0.06771584425355821, + "grad_norm": 2.861947774887085, + "learning_rate": 1.992561268260631e-05, + "loss": 1.3009, + "step": 1080 + }, + { + "epoch": 0.06777854410934855, + "grad_norm": 3.0963521003723145, + "learning_rate": 1.992536524015349e-05, + "loss": 1.2933, + "step": 1081 + }, + { + "epoch": 0.06784124396513888, + "grad_norm": 2.431779146194458, + "learning_rate": 1.9925117388378172e-05, + "loss": 1.2065, + "step": 1082 + }, + { + "epoch": 0.06790394382092921, + "grad_norm": 2.586637020111084, + "learning_rate": 1.992486912729058e-05, + "loss": 1.4198, + "step": 1083 + }, + { + "epoch": 0.06796664367671955, + "grad_norm": 2.675132989883423, + "learning_rate": 1.9924620456900956e-05, + "loss": 1.1666, + "step": 1084 + }, + { + "epoch": 0.06802934353250988, + "grad_norm": 2.7031986713409424, + "learning_rate": 1.992437137721955e-05, + "loss": 1.3517, + "step": 1085 + }, + { + "epoch": 0.0680920433883002, + "grad_norm": 2.316678285598755, + "learning_rate": 1.992412188825663e-05, + "loss": 1.3105, + "step": 1086 + }, + { + "epoch": 0.06815474324409054, + "grad_norm": 2.43615460395813, + "learning_rate": 1.9923871990022495e-05, + "loss": 1.3441, + "step": 1087 + }, + { + "epoch": 0.06821744309988087, + "grad_norm": 2.797114610671997, + "learning_rate": 1.9923621682527444e-05, + "loss": 1.2706, + "step": 1088 + }, + { + "epoch": 0.0682801429556712, + "grad_norm": 2.5704028606414795, + "learning_rate": 1.9923370965781804e-05, + "loss": 1.4919, + "step": 1089 + }, + { + "epoch": 0.06834284281146154, + "grad_norm": 2.50311279296875, + "learning_rate": 1.992311983979591e-05, + "loss": 1.1514, + "step": 1090 + }, + { + "epoch": 0.06840554266725186, + "grad_norm": 2.4062628746032715, + "learning_rate": 1.992286830458012e-05, + "loss": 1.385, + "step": 1091 + }, + { + "epoch": 0.06846824252304219, + "grad_norm": 2.8741657733917236, + "learning_rate": 1.9922616360144804e-05, + "loss": 1.4175, + "step": 1092 + }, + { + "epoch": 0.06853094237883253, + "grad_norm": 2.734461784362793, + "learning_rate": 1.992236400650036e-05, + "loss": 1.4763, + "step": 1093 + }, + { + "epoch": 0.06859364223462286, + "grad_norm": 2.7093467712402344, + "learning_rate": 1.992211124365719e-05, + "loss": 1.3731, + "step": 1094 + }, + { + "epoch": 0.06865634209041319, + "grad_norm": 2.502335786819458, + "learning_rate": 1.992185807162572e-05, + "loss": 1.3356, + "step": 1095 + }, + { + "epoch": 0.06871904194620353, + "grad_norm": 2.3775200843811035, + "learning_rate": 1.992160449041639e-05, + "loss": 1.1052, + "step": 1096 + }, + { + "epoch": 0.06878174180199385, + "grad_norm": 2.293473243713379, + "learning_rate": 1.9921350500039658e-05, + "loss": 1.2523, + "step": 1097 + }, + { + "epoch": 0.06884444165778418, + "grad_norm": 2.6763148307800293, + "learning_rate": 1.9921096100505996e-05, + "loss": 1.2284, + "step": 1098 + }, + { + "epoch": 0.06890714151357452, + "grad_norm": 2.661794424057007, + "learning_rate": 1.99208412918259e-05, + "loss": 1.4321, + "step": 1099 + }, + { + "epoch": 0.06896984136936485, + "grad_norm": 2.513867139816284, + "learning_rate": 1.9920586074009875e-05, + "loss": 1.1488, + "step": 1100 + }, + { + "epoch": 0.06903254122515519, + "grad_norm": 2.8906190395355225, + "learning_rate": 1.9920330447068447e-05, + "loss": 1.3004, + "step": 1101 + }, + { + "epoch": 0.06909524108094552, + "grad_norm": 2.673896074295044, + "learning_rate": 1.9920074411012158e-05, + "loss": 1.3618, + "step": 1102 + }, + { + "epoch": 0.06915794093673584, + "grad_norm": 2.675889492034912, + "learning_rate": 1.9919817965851566e-05, + "loss": 1.0969, + "step": 1103 + }, + { + "epoch": 0.06922064079252618, + "grad_norm": 2.4992141723632812, + "learning_rate": 1.9919561111597246e-05, + "loss": 1.4151, + "step": 1104 + }, + { + "epoch": 0.06928334064831651, + "grad_norm": 2.6800265312194824, + "learning_rate": 1.99193038482598e-05, + "loss": 1.4267, + "step": 1105 + }, + { + "epoch": 0.06934604050410684, + "grad_norm": 2.505100727081299, + "learning_rate": 1.9919046175849823e-05, + "loss": 1.3367, + "step": 1106 + }, + { + "epoch": 0.06940874035989718, + "grad_norm": 2.6033682823181152, + "learning_rate": 1.9918788094377953e-05, + "loss": 1.2378, + "step": 1107 + }, + { + "epoch": 0.0694714402156875, + "grad_norm": 2.602632761001587, + "learning_rate": 1.9918529603854825e-05, + "loss": 1.2855, + "step": 1108 + }, + { + "epoch": 0.06953414007147783, + "grad_norm": 2.794849157333374, + "learning_rate": 1.9918270704291104e-05, + "loss": 1.3, + "step": 1109 + }, + { + "epoch": 0.06959683992726817, + "grad_norm": 2.8201539516448975, + "learning_rate": 1.9918011395697467e-05, + "loss": 1.2962, + "step": 1110 + }, + { + "epoch": 0.0696595397830585, + "grad_norm": 2.6906023025512695, + "learning_rate": 1.991775167808461e-05, + "loss": 1.2846, + "step": 1111 + }, + { + "epoch": 0.06972223963884883, + "grad_norm": 2.4628896713256836, + "learning_rate": 1.9917491551463235e-05, + "loss": 1.073, + "step": 1112 + }, + { + "epoch": 0.06978493949463917, + "grad_norm": 2.6996710300445557, + "learning_rate": 1.991723101584408e-05, + "loss": 1.3232, + "step": 1113 + }, + { + "epoch": 0.0698476393504295, + "grad_norm": 2.860462188720703, + "learning_rate": 1.9916970071237884e-05, + "loss": 1.2207, + "step": 1114 + }, + { + "epoch": 0.06991033920621982, + "grad_norm": 2.9283385276794434, + "learning_rate": 1.9916708717655407e-05, + "loss": 1.3186, + "step": 1115 + }, + { + "epoch": 0.06997303906201016, + "grad_norm": 2.799539566040039, + "learning_rate": 1.991644695510743e-05, + "loss": 1.2742, + "step": 1116 + }, + { + "epoch": 0.07003573891780049, + "grad_norm": 2.7540481090545654, + "learning_rate": 1.991618478360475e-05, + "loss": 1.1838, + "step": 1117 + }, + { + "epoch": 0.07009843877359082, + "grad_norm": 2.559326648712158, + "learning_rate": 1.9915922203158173e-05, + "loss": 1.264, + "step": 1118 + }, + { + "epoch": 0.07016113862938116, + "grad_norm": 2.6823854446411133, + "learning_rate": 1.991565921377853e-05, + "loss": 1.3377, + "step": 1119 + }, + { + "epoch": 0.07022383848517148, + "grad_norm": 3.299346446990967, + "learning_rate": 1.991539581547667e-05, + "loss": 1.1162, + "step": 1120 + }, + { + "epoch": 0.07028653834096181, + "grad_norm": 3.019859790802002, + "learning_rate": 1.9915132008263456e-05, + "loss": 1.1178, + "step": 1121 + }, + { + "epoch": 0.07034923819675215, + "grad_norm": 2.6361048221588135, + "learning_rate": 1.9914867792149764e-05, + "loss": 1.1787, + "step": 1122 + }, + { + "epoch": 0.07041193805254248, + "grad_norm": 2.359501838684082, + "learning_rate": 1.9914603167146488e-05, + "loss": 1.2694, + "step": 1123 + }, + { + "epoch": 0.0704746379083328, + "grad_norm": 3.042206287384033, + "learning_rate": 1.9914338133264548e-05, + "loss": 1.2401, + "step": 1124 + }, + { + "epoch": 0.07053733776412315, + "grad_norm": 2.6799185276031494, + "learning_rate": 1.991407269051487e-05, + "loss": 1.352, + "step": 1125 + }, + { + "epoch": 0.07060003761991347, + "grad_norm": 2.6121292114257812, + "learning_rate": 1.99138068389084e-05, + "loss": 1.1663, + "step": 1126 + }, + { + "epoch": 0.0706627374757038, + "grad_norm": 2.7583847045898438, + "learning_rate": 1.9913540578456104e-05, + "loss": 1.3515, + "step": 1127 + }, + { + "epoch": 0.07072543733149414, + "grad_norm": 2.46124005317688, + "learning_rate": 1.991327390916896e-05, + "loss": 1.2637, + "step": 1128 + }, + { + "epoch": 0.07078813718728447, + "grad_norm": 2.965609550476074, + "learning_rate": 1.9913006831057967e-05, + "loss": 1.3106, + "step": 1129 + }, + { + "epoch": 0.07085083704307481, + "grad_norm": 2.6929032802581787, + "learning_rate": 1.9912739344134145e-05, + "loss": 1.1993, + "step": 1130 + }, + { + "epoch": 0.07091353689886513, + "grad_norm": 2.8868000507354736, + "learning_rate": 1.9912471448408516e-05, + "loss": 1.3094, + "step": 1131 + }, + { + "epoch": 0.07097623675465546, + "grad_norm": 2.8845388889312744, + "learning_rate": 1.9912203143892128e-05, + "loss": 1.1641, + "step": 1132 + }, + { + "epoch": 0.0710389366104458, + "grad_norm": 2.422670364379883, + "learning_rate": 1.991193443059605e-05, + "loss": 1.2375, + "step": 1133 + }, + { + "epoch": 0.07110163646623613, + "grad_norm": 2.616375684738159, + "learning_rate": 1.991166530853137e-05, + "loss": 1.2384, + "step": 1134 + }, + { + "epoch": 0.07116433632202646, + "grad_norm": 2.6820600032806396, + "learning_rate": 1.9911395777709176e-05, + "loss": 1.3822, + "step": 1135 + }, + { + "epoch": 0.0712270361778168, + "grad_norm": 2.5307939052581787, + "learning_rate": 1.9911125838140583e-05, + "loss": 1.345, + "step": 1136 + }, + { + "epoch": 0.07128973603360712, + "grad_norm": 2.911944627761841, + "learning_rate": 1.9910855489836734e-05, + "loss": 1.2393, + "step": 1137 + }, + { + "epoch": 0.07135243588939745, + "grad_norm": 2.490511417388916, + "learning_rate": 1.9910584732808767e-05, + "loss": 1.1823, + "step": 1138 + }, + { + "epoch": 0.07141513574518779, + "grad_norm": 2.525264263153076, + "learning_rate": 1.9910313567067857e-05, + "loss": 1.3272, + "step": 1139 + }, + { + "epoch": 0.07147783560097812, + "grad_norm": 2.648602247238159, + "learning_rate": 1.991004199262518e-05, + "loss": 1.2977, + "step": 1140 + }, + { + "epoch": 0.07154053545676844, + "grad_norm": 2.4049055576324463, + "learning_rate": 1.9909770009491938e-05, + "loss": 1.2116, + "step": 1141 + }, + { + "epoch": 0.07160323531255879, + "grad_norm": 2.5991053581237793, + "learning_rate": 1.990949761767935e-05, + "loss": 1.4205, + "step": 1142 + }, + { + "epoch": 0.07166593516834911, + "grad_norm": 2.624498128890991, + "learning_rate": 1.9909224817198646e-05, + "loss": 1.279, + "step": 1143 + }, + { + "epoch": 0.07172863502413944, + "grad_norm": 2.454963207244873, + "learning_rate": 1.9908951608061078e-05, + "loss": 1.3095, + "step": 1144 + }, + { + "epoch": 0.07179133487992978, + "grad_norm": 2.4225757122039795, + "learning_rate": 1.9908677990277913e-05, + "loss": 1.1993, + "step": 1145 + }, + { + "epoch": 0.07185403473572011, + "grad_norm": 2.766308307647705, + "learning_rate": 1.9908403963860436e-05, + "loss": 1.1217, + "step": 1146 + }, + { + "epoch": 0.07191673459151043, + "grad_norm": 2.7144546508789062, + "learning_rate": 1.9908129528819948e-05, + "loss": 1.1568, + "step": 1147 + }, + { + "epoch": 0.07197943444730077, + "grad_norm": 3.3820974826812744, + "learning_rate": 1.9907854685167764e-05, + "loss": 1.1739, + "step": 1148 + }, + { + "epoch": 0.0720421343030911, + "grad_norm": 2.777233362197876, + "learning_rate": 1.9907579432915217e-05, + "loss": 1.4357, + "step": 1149 + }, + { + "epoch": 0.07210483415888143, + "grad_norm": 2.7564473152160645, + "learning_rate": 1.9907303772073665e-05, + "loss": 1.4387, + "step": 1150 + }, + { + "epoch": 0.07216753401467177, + "grad_norm": 2.8691258430480957, + "learning_rate": 1.9907027702654472e-05, + "loss": 1.3331, + "step": 1151 + }, + { + "epoch": 0.0722302338704621, + "grad_norm": 2.410794973373413, + "learning_rate": 1.9906751224669025e-05, + "loss": 1.3223, + "step": 1152 + }, + { + "epoch": 0.07229293372625242, + "grad_norm": 2.7182421684265137, + "learning_rate": 1.9906474338128724e-05, + "loss": 1.2747, + "step": 1153 + }, + { + "epoch": 0.07235563358204276, + "grad_norm": 2.6301796436309814, + "learning_rate": 1.990619704304499e-05, + "loss": 1.1418, + "step": 1154 + }, + { + "epoch": 0.07241833343783309, + "grad_norm": 2.63419246673584, + "learning_rate": 1.9905919339429254e-05, + "loss": 1.3423, + "step": 1155 + }, + { + "epoch": 0.07248103329362343, + "grad_norm": 2.378706693649292, + "learning_rate": 1.9905641227292974e-05, + "loss": 1.3093, + "step": 1156 + }, + { + "epoch": 0.07254373314941376, + "grad_norm": 2.5741770267486572, + "learning_rate": 1.9905362706647614e-05, + "loss": 1.2817, + "step": 1157 + }, + { + "epoch": 0.07260643300520409, + "grad_norm": 2.842294216156006, + "learning_rate": 1.9905083777504668e-05, + "loss": 1.0758, + "step": 1158 + }, + { + "epoch": 0.07266913286099443, + "grad_norm": 2.9450066089630127, + "learning_rate": 1.9904804439875635e-05, + "loss": 1.133, + "step": 1159 + }, + { + "epoch": 0.07273183271678475, + "grad_norm": 2.95989727973938, + "learning_rate": 1.990452469377203e-05, + "loss": 1.1393, + "step": 1160 + }, + { + "epoch": 0.07279453257257508, + "grad_norm": 2.838106632232666, + "learning_rate": 1.9904244539205398e-05, + "loss": 1.2858, + "step": 1161 + }, + { + "epoch": 0.07285723242836542, + "grad_norm": 2.5263009071350098, + "learning_rate": 1.9903963976187288e-05, + "loss": 1.3588, + "step": 1162 + }, + { + "epoch": 0.07291993228415575, + "grad_norm": 2.419846296310425, + "learning_rate": 1.9903683004729267e-05, + "loss": 1.1692, + "step": 1163 + }, + { + "epoch": 0.07298263213994607, + "grad_norm": 2.784393787384033, + "learning_rate": 1.990340162484293e-05, + "loss": 1.3707, + "step": 1164 + }, + { + "epoch": 0.07304533199573641, + "grad_norm": 2.4548470973968506, + "learning_rate": 1.9903119836539877e-05, + "loss": 1.2653, + "step": 1165 + }, + { + "epoch": 0.07310803185152674, + "grad_norm": 3.005660057067871, + "learning_rate": 1.990283763983173e-05, + "loss": 1.371, + "step": 1166 + }, + { + "epoch": 0.07317073170731707, + "grad_norm": 2.884389638900757, + "learning_rate": 1.9902555034730128e-05, + "loss": 1.2634, + "step": 1167 + }, + { + "epoch": 0.07323343156310741, + "grad_norm": 2.292057514190674, + "learning_rate": 1.9902272021246718e-05, + "loss": 1.324, + "step": 1168 + }, + { + "epoch": 0.07329613141889774, + "grad_norm": 2.5987980365753174, + "learning_rate": 1.9901988599393183e-05, + "loss": 1.2486, + "step": 1169 + }, + { + "epoch": 0.07335883127468806, + "grad_norm": 2.6289074420928955, + "learning_rate": 1.9901704769181204e-05, + "loss": 1.3666, + "step": 1170 + }, + { + "epoch": 0.0734215311304784, + "grad_norm": 2.31500244140625, + "learning_rate": 1.9901420530622486e-05, + "loss": 1.1701, + "step": 1171 + }, + { + "epoch": 0.07348423098626873, + "grad_norm": 3.111057758331299, + "learning_rate": 1.9901135883728756e-05, + "loss": 1.3766, + "step": 1172 + }, + { + "epoch": 0.07354693084205906, + "grad_norm": 2.5827934741973877, + "learning_rate": 1.9900850828511746e-05, + "loss": 1.2499, + "step": 1173 + }, + { + "epoch": 0.0736096306978494, + "grad_norm": 2.694986343383789, + "learning_rate": 1.990056536498322e-05, + "loss": 1.3788, + "step": 1174 + }, + { + "epoch": 0.07367233055363973, + "grad_norm": 2.7963054180145264, + "learning_rate": 1.9900279493154944e-05, + "loss": 1.3614, + "step": 1175 + }, + { + "epoch": 0.07373503040943005, + "grad_norm": 2.8119959831237793, + "learning_rate": 1.989999321303871e-05, + "loss": 1.2048, + "step": 1176 + }, + { + "epoch": 0.0737977302652204, + "grad_norm": 3.2149550914764404, + "learning_rate": 1.9899706524646324e-05, + "loss": 1.3991, + "step": 1177 + }, + { + "epoch": 0.07386043012101072, + "grad_norm": 3.0142743587493896, + "learning_rate": 1.9899419427989604e-05, + "loss": 1.1956, + "step": 1178 + }, + { + "epoch": 0.07392312997680105, + "grad_norm": 2.880871534347534, + "learning_rate": 1.98991319230804e-05, + "loss": 1.2578, + "step": 1179 + }, + { + "epoch": 0.07398582983259139, + "grad_norm": 2.5125691890716553, + "learning_rate": 1.989884400993056e-05, + "loss": 1.2424, + "step": 1180 + }, + { + "epoch": 0.07404852968838171, + "grad_norm": 2.6721243858337402, + "learning_rate": 1.989855568855196e-05, + "loss": 1.2658, + "step": 1181 + }, + { + "epoch": 0.07411122954417206, + "grad_norm": 2.5460455417633057, + "learning_rate": 1.9898266958956495e-05, + "loss": 1.3926, + "step": 1182 + }, + { + "epoch": 0.07417392939996238, + "grad_norm": 2.7202022075653076, + "learning_rate": 1.9897977821156066e-05, + "loss": 1.3314, + "step": 1183 + }, + { + "epoch": 0.07423662925575271, + "grad_norm": 3.1856517791748047, + "learning_rate": 1.9897688275162598e-05, + "loss": 1.2178, + "step": 1184 + }, + { + "epoch": 0.07429932911154305, + "grad_norm": 2.7908058166503906, + "learning_rate": 1.9897398320988036e-05, + "loss": 1.2469, + "step": 1185 + }, + { + "epoch": 0.07436202896733338, + "grad_norm": 2.7151057720184326, + "learning_rate": 1.9897107958644336e-05, + "loss": 1.2328, + "step": 1186 + }, + { + "epoch": 0.0744247288231237, + "grad_norm": 3.056649923324585, + "learning_rate": 1.989681718814347e-05, + "loss": 1.352, + "step": 1187 + }, + { + "epoch": 0.07448742867891404, + "grad_norm": 2.9558475017547607, + "learning_rate": 1.989652600949743e-05, + "loss": 1.3386, + "step": 1188 + }, + { + "epoch": 0.07455012853470437, + "grad_norm": 2.703521966934204, + "learning_rate": 1.9896234422718224e-05, + "loss": 1.3559, + "step": 1189 + }, + { + "epoch": 0.0746128283904947, + "grad_norm": 2.66271710395813, + "learning_rate": 1.989594242781788e-05, + "loss": 1.2275, + "step": 1190 + }, + { + "epoch": 0.07467552824628504, + "grad_norm": 2.406252861022949, + "learning_rate": 1.989565002480844e-05, + "loss": 1.2389, + "step": 1191 + }, + { + "epoch": 0.07473822810207537, + "grad_norm": 2.686614513397217, + "learning_rate": 1.989535721370196e-05, + "loss": 1.2156, + "step": 1192 + }, + { + "epoch": 0.07480092795786569, + "grad_norm": 2.5925941467285156, + "learning_rate": 1.9895063994510512e-05, + "loss": 1.3677, + "step": 1193 + }, + { + "epoch": 0.07486362781365603, + "grad_norm": 2.518115520477295, + "learning_rate": 1.9894770367246197e-05, + "loss": 1.4449, + "step": 1194 + }, + { + "epoch": 0.07492632766944636, + "grad_norm": 2.642206907272339, + "learning_rate": 1.989447633192112e-05, + "loss": 1.288, + "step": 1195 + }, + { + "epoch": 0.07498902752523669, + "grad_norm": 2.7056891918182373, + "learning_rate": 1.9894181888547406e-05, + "loss": 1.3162, + "step": 1196 + }, + { + "epoch": 0.07505172738102703, + "grad_norm": 2.6376023292541504, + "learning_rate": 1.9893887037137198e-05, + "loss": 1.1263, + "step": 1197 + }, + { + "epoch": 0.07511442723681735, + "grad_norm": 3.2062206268310547, + "learning_rate": 1.9893591777702658e-05, + "loss": 1.2362, + "step": 1198 + }, + { + "epoch": 0.07517712709260768, + "grad_norm": 2.713937759399414, + "learning_rate": 1.989329611025596e-05, + "loss": 1.3875, + "step": 1199 + }, + { + "epoch": 0.07523982694839802, + "grad_norm": 2.317399740219116, + "learning_rate": 1.9893000034809297e-05, + "loss": 1.3135, + "step": 1200 + }, + { + "epoch": 0.07530252680418835, + "grad_norm": 2.5050179958343506, + "learning_rate": 1.9892703551374882e-05, + "loss": 1.2595, + "step": 1201 + }, + { + "epoch": 0.07536522665997868, + "grad_norm": 2.5205514430999756, + "learning_rate": 1.989240665996494e-05, + "loss": 1.3196, + "step": 1202 + }, + { + "epoch": 0.07542792651576902, + "grad_norm": 2.657735824584961, + "learning_rate": 1.9892109360591717e-05, + "loss": 1.2559, + "step": 1203 + }, + { + "epoch": 0.07549062637155934, + "grad_norm": 2.664823293685913, + "learning_rate": 1.989181165326747e-05, + "loss": 1.2616, + "step": 1204 + }, + { + "epoch": 0.07555332622734967, + "grad_norm": 2.9471499919891357, + "learning_rate": 1.9891513538004482e-05, + "loss": 1.2053, + "step": 1205 + }, + { + "epoch": 0.07561602608314001, + "grad_norm": 2.9576709270477295, + "learning_rate": 1.989121501481504e-05, + "loss": 1.2963, + "step": 1206 + }, + { + "epoch": 0.07567872593893034, + "grad_norm": 2.8049426078796387, + "learning_rate": 1.9890916083711463e-05, + "loss": 1.2121, + "step": 1207 + }, + { + "epoch": 0.07574142579472067, + "grad_norm": 2.737863063812256, + "learning_rate": 1.989061674470607e-05, + "loss": 1.2722, + "step": 1208 + }, + { + "epoch": 0.075804125650511, + "grad_norm": 2.720446825027466, + "learning_rate": 1.9890316997811214e-05, + "loss": 1.2593, + "step": 1209 + }, + { + "epoch": 0.07586682550630133, + "grad_norm": 2.8650496006011963, + "learning_rate": 1.989001684303925e-05, + "loss": 1.2185, + "step": 1210 + }, + { + "epoch": 0.07592952536209167, + "grad_norm": 2.4774272441864014, + "learning_rate": 1.9889716280402564e-05, + "loss": 1.216, + "step": 1211 + }, + { + "epoch": 0.075992225217882, + "grad_norm": 3.109574317932129, + "learning_rate": 1.9889415309913544e-05, + "loss": 1.0747, + "step": 1212 + }, + { + "epoch": 0.07605492507367233, + "grad_norm": 2.750098943710327, + "learning_rate": 1.98891139315846e-05, + "loss": 1.3083, + "step": 1213 + }, + { + "epoch": 0.07611762492946267, + "grad_norm": 3.045126438140869, + "learning_rate": 1.9888812145428172e-05, + "loss": 1.2234, + "step": 1214 + }, + { + "epoch": 0.076180324785253, + "grad_norm": 2.9821770191192627, + "learning_rate": 1.98885099514567e-05, + "loss": 1.2704, + "step": 1215 + }, + { + "epoch": 0.07624302464104332, + "grad_norm": 2.7034225463867188, + "learning_rate": 1.988820734968264e-05, + "loss": 1.2705, + "step": 1216 + }, + { + "epoch": 0.07630572449683366, + "grad_norm": 2.6261520385742188, + "learning_rate": 1.988790434011848e-05, + "loss": 1.2977, + "step": 1217 + }, + { + "epoch": 0.07636842435262399, + "grad_norm": 2.518874168395996, + "learning_rate": 1.9887600922776716e-05, + "loss": 1.341, + "step": 1218 + }, + { + "epoch": 0.07643112420841432, + "grad_norm": 2.6119627952575684, + "learning_rate": 1.988729709766985e-05, + "loss": 1.3091, + "step": 1219 + }, + { + "epoch": 0.07649382406420466, + "grad_norm": 2.6994469165802, + "learning_rate": 1.9886992864810427e-05, + "loss": 1.1343, + "step": 1220 + }, + { + "epoch": 0.07655652391999498, + "grad_norm": 2.815887451171875, + "learning_rate": 1.9886688224210988e-05, + "loss": 1.3344, + "step": 1221 + }, + { + "epoch": 0.07661922377578531, + "grad_norm": 2.4275174140930176, + "learning_rate": 1.988638317588409e-05, + "loss": 1.3151, + "step": 1222 + }, + { + "epoch": 0.07668192363157565, + "grad_norm": 2.518137216567993, + "learning_rate": 1.988607771984232e-05, + "loss": 1.1979, + "step": 1223 + }, + { + "epoch": 0.07674462348736598, + "grad_norm": 2.8350582122802734, + "learning_rate": 1.9885771856098276e-05, + "loss": 1.3932, + "step": 1224 + }, + { + "epoch": 0.0768073233431563, + "grad_norm": 2.823108673095703, + "learning_rate": 1.9885465584664562e-05, + "loss": 1.2284, + "step": 1225 + }, + { + "epoch": 0.07687002319894665, + "grad_norm": 2.88366961479187, + "learning_rate": 1.9885158905553822e-05, + "loss": 1.3477, + "step": 1226 + }, + { + "epoch": 0.07693272305473697, + "grad_norm": 2.6124038696289062, + "learning_rate": 1.9884851818778695e-05, + "loss": 1.3454, + "step": 1227 + }, + { + "epoch": 0.0769954229105273, + "grad_norm": 2.697432518005371, + "learning_rate": 1.9884544324351846e-05, + "loss": 1.2851, + "step": 1228 + }, + { + "epoch": 0.07705812276631764, + "grad_norm": 2.9594221115112305, + "learning_rate": 1.988423642228596e-05, + "loss": 1.2085, + "step": 1229 + }, + { + "epoch": 0.07712082262210797, + "grad_norm": 2.166137456893921, + "learning_rate": 1.988392811259373e-05, + "loss": 1.4228, + "step": 1230 + }, + { + "epoch": 0.0771835224778983, + "grad_norm": 2.7097458839416504, + "learning_rate": 1.9883619395287873e-05, + "loss": 1.4548, + "step": 1231 + }, + { + "epoch": 0.07724622233368864, + "grad_norm": 2.3480563163757324, + "learning_rate": 1.988331027038112e-05, + "loss": 1.3076, + "step": 1232 + }, + { + "epoch": 0.07730892218947896, + "grad_norm": 2.609158515930176, + "learning_rate": 1.9883000737886222e-05, + "loss": 1.2186, + "step": 1233 + }, + { + "epoch": 0.07737162204526929, + "grad_norm": 2.630594491958618, + "learning_rate": 1.9882690797815943e-05, + "loss": 1.1949, + "step": 1234 + }, + { + "epoch": 0.07743432190105963, + "grad_norm": 2.5416440963745117, + "learning_rate": 1.988238045018306e-05, + "loss": 1.2576, + "step": 1235 + }, + { + "epoch": 0.07749702175684996, + "grad_norm": 2.787503480911255, + "learning_rate": 1.9882069695000378e-05, + "loss": 1.3379, + "step": 1236 + }, + { + "epoch": 0.0775597216126403, + "grad_norm": 2.6087281703948975, + "learning_rate": 1.988175853228071e-05, + "loss": 1.34, + "step": 1237 + }, + { + "epoch": 0.07762242146843062, + "grad_norm": 2.677251100540161, + "learning_rate": 1.988144696203689e-05, + "loss": 1.2317, + "step": 1238 + }, + { + "epoch": 0.07768512132422095, + "grad_norm": 2.543107032775879, + "learning_rate": 1.988113498428176e-05, + "loss": 1.2827, + "step": 1239 + }, + { + "epoch": 0.07774782118001129, + "grad_norm": 2.469230890274048, + "learning_rate": 1.9880822599028197e-05, + "loss": 1.2725, + "step": 1240 + }, + { + "epoch": 0.07781052103580162, + "grad_norm": 2.745326280593872, + "learning_rate": 1.988050980628908e-05, + "loss": 1.1887, + "step": 1241 + }, + { + "epoch": 0.07787322089159195, + "grad_norm": 2.8567423820495605, + "learning_rate": 1.98801966060773e-05, + "loss": 1.0894, + "step": 1242 + }, + { + "epoch": 0.07793592074738229, + "grad_norm": 2.70544695854187, + "learning_rate": 1.987988299840579e-05, + "loss": 1.2364, + "step": 1243 + }, + { + "epoch": 0.07799862060317261, + "grad_norm": 2.7286384105682373, + "learning_rate": 1.9879568983287468e-05, + "loss": 1.1476, + "step": 1244 + }, + { + "epoch": 0.07806132045896294, + "grad_norm": 2.84879994392395, + "learning_rate": 1.9879254560735288e-05, + "loss": 1.3011, + "step": 1245 + }, + { + "epoch": 0.07812402031475328, + "grad_norm": 2.952376365661621, + "learning_rate": 1.9878939730762222e-05, + "loss": 1.2251, + "step": 1246 + }, + { + "epoch": 0.07818672017054361, + "grad_norm": 2.8027617931365967, + "learning_rate": 1.9878624493381247e-05, + "loss": 1.1182, + "step": 1247 + }, + { + "epoch": 0.07824942002633394, + "grad_norm": 2.495382785797119, + "learning_rate": 1.987830884860537e-05, + "loss": 1.3415, + "step": 1248 + }, + { + "epoch": 0.07831211988212428, + "grad_norm": 2.9798924922943115, + "learning_rate": 1.9877992796447604e-05, + "loss": 1.1685, + "step": 1249 + }, + { + "epoch": 0.0783748197379146, + "grad_norm": 2.7214107513427734, + "learning_rate": 1.9877676336920984e-05, + "loss": 1.0658, + "step": 1250 + }, + { + "epoch": 0.07843751959370493, + "grad_norm": 2.649357795715332, + "learning_rate": 1.9877359470038557e-05, + "loss": 1.3359, + "step": 1251 + }, + { + "epoch": 0.07850021944949527, + "grad_norm": 2.625134229660034, + "learning_rate": 1.9877042195813395e-05, + "loss": 1.242, + "step": 1252 + }, + { + "epoch": 0.0785629193052856, + "grad_norm": 2.6236841678619385, + "learning_rate": 1.9876724514258586e-05, + "loss": 1.2004, + "step": 1253 + }, + { + "epoch": 0.07862561916107592, + "grad_norm": 2.735640048980713, + "learning_rate": 1.9876406425387222e-05, + "loss": 1.3712, + "step": 1254 + }, + { + "epoch": 0.07868831901686626, + "grad_norm": 2.687265157699585, + "learning_rate": 1.9876087929212428e-05, + "loss": 1.2992, + "step": 1255 + }, + { + "epoch": 0.07875101887265659, + "grad_norm": 2.7477805614471436, + "learning_rate": 1.9875769025747337e-05, + "loss": 1.1382, + "step": 1256 + }, + { + "epoch": 0.07881371872844692, + "grad_norm": 2.853605031967163, + "learning_rate": 1.98754497150051e-05, + "loss": 1.2232, + "step": 1257 + }, + { + "epoch": 0.07887641858423726, + "grad_norm": 2.7245590686798096, + "learning_rate": 1.9875129996998882e-05, + "loss": 1.2502, + "step": 1258 + }, + { + "epoch": 0.07893911844002759, + "grad_norm": 2.8478920459747314, + "learning_rate": 1.9874809871741877e-05, + "loss": 1.2064, + "step": 1259 + }, + { + "epoch": 0.07900181829581791, + "grad_norm": 2.7323238849639893, + "learning_rate": 1.9874489339247277e-05, + "loss": 1.477, + "step": 1260 + }, + { + "epoch": 0.07906451815160825, + "grad_norm": 2.454375982284546, + "learning_rate": 1.9874168399528307e-05, + "loss": 1.1763, + "step": 1261 + }, + { + "epoch": 0.07912721800739858, + "grad_norm": 2.710110664367676, + "learning_rate": 1.98738470525982e-05, + "loss": 1.1609, + "step": 1262 + }, + { + "epoch": 0.07918991786318891, + "grad_norm": 2.5799171924591064, + "learning_rate": 1.987352529847021e-05, + "loss": 1.357, + "step": 1263 + }, + { + "epoch": 0.07925261771897925, + "grad_norm": 2.8160431385040283, + "learning_rate": 1.987320313715761e-05, + "loss": 1.2631, + "step": 1264 + }, + { + "epoch": 0.07931531757476958, + "grad_norm": 2.579690456390381, + "learning_rate": 1.9872880568673675e-05, + "loss": 1.3478, + "step": 1265 + }, + { + "epoch": 0.07937801743055992, + "grad_norm": 2.945063829421997, + "learning_rate": 1.987255759303172e-05, + "loss": 1.234, + "step": 1266 + }, + { + "epoch": 0.07944071728635024, + "grad_norm": 2.443359613418579, + "learning_rate": 1.9872234210245055e-05, + "loss": 1.3376, + "step": 1267 + }, + { + "epoch": 0.07950341714214057, + "grad_norm": 2.6438097953796387, + "learning_rate": 1.9871910420327018e-05, + "loss": 1.4033, + "step": 1268 + }, + { + "epoch": 0.07956611699793091, + "grad_norm": 2.8579742908477783, + "learning_rate": 1.987158622329097e-05, + "loss": 1.195, + "step": 1269 + }, + { + "epoch": 0.07962881685372124, + "grad_norm": 2.6206023693084717, + "learning_rate": 1.9871261619150267e-05, + "loss": 1.1069, + "step": 1270 + }, + { + "epoch": 0.07969151670951156, + "grad_norm": 2.5386998653411865, + "learning_rate": 1.987093660791831e-05, + "loss": 1.2099, + "step": 1271 + }, + { + "epoch": 0.0797542165653019, + "grad_norm": 2.5458946228027344, + "learning_rate": 1.9870611189608494e-05, + "loss": 1.175, + "step": 1272 + }, + { + "epoch": 0.07981691642109223, + "grad_norm": 3.1842119693756104, + "learning_rate": 1.9870285364234244e-05, + "loss": 1.0706, + "step": 1273 + }, + { + "epoch": 0.07987961627688256, + "grad_norm": 2.6858630180358887, + "learning_rate": 1.9869959131808995e-05, + "loss": 1.2549, + "step": 1274 + }, + { + "epoch": 0.0799423161326729, + "grad_norm": 2.823439359664917, + "learning_rate": 1.9869632492346196e-05, + "loss": 1.2336, + "step": 1275 + }, + { + "epoch": 0.08000501598846323, + "grad_norm": 2.768827438354492, + "learning_rate": 1.9869305445859322e-05, + "loss": 1.3364, + "step": 1276 + }, + { + "epoch": 0.08006771584425355, + "grad_norm": 2.905043840408325, + "learning_rate": 1.9868977992361866e-05, + "loss": 1.2565, + "step": 1277 + }, + { + "epoch": 0.0801304157000439, + "grad_norm": 2.7227118015289307, + "learning_rate": 1.986865013186732e-05, + "loss": 1.3372, + "step": 1278 + }, + { + "epoch": 0.08019311555583422, + "grad_norm": 2.876739025115967, + "learning_rate": 1.9868321864389216e-05, + "loss": 1.3002, + "step": 1279 + }, + { + "epoch": 0.08025581541162455, + "grad_norm": 2.934328079223633, + "learning_rate": 1.9867993189941085e-05, + "loss": 1.3796, + "step": 1280 + }, + { + "epoch": 0.08031851526741489, + "grad_norm": 3.078748941421509, + "learning_rate": 1.986766410853649e-05, + "loss": 1.2124, + "step": 1281 + }, + { + "epoch": 0.08038121512320522, + "grad_norm": 2.7110280990600586, + "learning_rate": 1.986733462018899e-05, + "loss": 1.4494, + "step": 1282 + }, + { + "epoch": 0.08044391497899554, + "grad_norm": 2.6764707565307617, + "learning_rate": 1.9867004724912178e-05, + "loss": 1.1829, + "step": 1283 + }, + { + "epoch": 0.08050661483478588, + "grad_norm": 2.739830255508423, + "learning_rate": 1.9866674422719666e-05, + "loss": 1.2868, + "step": 1284 + }, + { + "epoch": 0.08056931469057621, + "grad_norm": 2.823793411254883, + "learning_rate": 1.9866343713625066e-05, + "loss": 1.1706, + "step": 1285 + }, + { + "epoch": 0.08063201454636654, + "grad_norm": 2.736750364303589, + "learning_rate": 1.986601259764202e-05, + "loss": 1.2652, + "step": 1286 + }, + { + "epoch": 0.08069471440215688, + "grad_norm": 2.7968523502349854, + "learning_rate": 1.9865681074784183e-05, + "loss": 1.3144, + "step": 1287 + }, + { + "epoch": 0.0807574142579472, + "grad_norm": 2.63802170753479, + "learning_rate": 1.9865349145065233e-05, + "loss": 1.1561, + "step": 1288 + }, + { + "epoch": 0.08082011411373753, + "grad_norm": 2.6819794178009033, + "learning_rate": 1.9865016808498848e-05, + "loss": 1.1805, + "step": 1289 + }, + { + "epoch": 0.08088281396952787, + "grad_norm": 2.5093612670898438, + "learning_rate": 1.9864684065098744e-05, + "loss": 1.2918, + "step": 1290 + }, + { + "epoch": 0.0809455138253182, + "grad_norm": 2.6123626232147217, + "learning_rate": 1.9864350914878635e-05, + "loss": 1.3919, + "step": 1291 + }, + { + "epoch": 0.08100821368110854, + "grad_norm": 2.7592933177948, + "learning_rate": 1.9864017357852262e-05, + "loss": 1.3135, + "step": 1292 + }, + { + "epoch": 0.08107091353689887, + "grad_norm": 3.0334367752075195, + "learning_rate": 1.9863683394033386e-05, + "loss": 1.065, + "step": 1293 + }, + { + "epoch": 0.0811336133926892, + "grad_norm": 2.796496629714966, + "learning_rate": 1.9863349023435773e-05, + "loss": 1.2578, + "step": 1294 + }, + { + "epoch": 0.08119631324847953, + "grad_norm": 2.8122432231903076, + "learning_rate": 1.9863014246073216e-05, + "loss": 1.1045, + "step": 1295 + }, + { + "epoch": 0.08125901310426986, + "grad_norm": 2.691667318344116, + "learning_rate": 1.9862679061959523e-05, + "loss": 1.2189, + "step": 1296 + }, + { + "epoch": 0.08132171296006019, + "grad_norm": 2.6731770038604736, + "learning_rate": 1.9862343471108513e-05, + "loss": 1.3975, + "step": 1297 + }, + { + "epoch": 0.08138441281585053, + "grad_norm": 2.5327913761138916, + "learning_rate": 1.9862007473534026e-05, + "loss": 1.3244, + "step": 1298 + }, + { + "epoch": 0.08144711267164086, + "grad_norm": 2.8924202919006348, + "learning_rate": 1.9861671069249925e-05, + "loss": 1.2208, + "step": 1299 + }, + { + "epoch": 0.08150981252743118, + "grad_norm": 3.00374698638916, + "learning_rate": 1.986133425827007e-05, + "loss": 1.2157, + "step": 1300 + }, + { + "epoch": 0.08157251238322152, + "grad_norm": 2.8342292308807373, + "learning_rate": 1.986099704060837e-05, + "loss": 1.2018, + "step": 1301 + }, + { + "epoch": 0.08163521223901185, + "grad_norm": 2.798806667327881, + "learning_rate": 1.9860659416278715e-05, + "loss": 1.195, + "step": 1302 + }, + { + "epoch": 0.08169791209480218, + "grad_norm": 2.594348430633545, + "learning_rate": 1.9860321385295038e-05, + "loss": 1.1881, + "step": 1303 + }, + { + "epoch": 0.08176061195059252, + "grad_norm": 2.785688877105713, + "learning_rate": 1.9859982947671273e-05, + "loss": 1.1909, + "step": 1304 + }, + { + "epoch": 0.08182331180638285, + "grad_norm": 2.6540842056274414, + "learning_rate": 1.9859644103421384e-05, + "loss": 1.2295, + "step": 1305 + }, + { + "epoch": 0.08188601166217317, + "grad_norm": 2.650623321533203, + "learning_rate": 1.985930485255934e-05, + "loss": 1.2723, + "step": 1306 + }, + { + "epoch": 0.08194871151796351, + "grad_norm": 2.919755697250366, + "learning_rate": 1.9858965195099133e-05, + "loss": 1.2451, + "step": 1307 + }, + { + "epoch": 0.08201141137375384, + "grad_norm": 2.729658842086792, + "learning_rate": 1.9858625131054772e-05, + "loss": 1.2648, + "step": 1308 + }, + { + "epoch": 0.08207411122954417, + "grad_norm": 2.8196041584014893, + "learning_rate": 1.9858284660440282e-05, + "loss": 1.0445, + "step": 1309 + }, + { + "epoch": 0.08213681108533451, + "grad_norm": 2.476365566253662, + "learning_rate": 1.98579437832697e-05, + "loss": 1.3971, + "step": 1310 + }, + { + "epoch": 0.08219951094112483, + "grad_norm": 2.554795742034912, + "learning_rate": 1.985760249955708e-05, + "loss": 1.121, + "step": 1311 + }, + { + "epoch": 0.08226221079691516, + "grad_norm": 2.5871002674102783, + "learning_rate": 1.985726080931651e-05, + "loss": 1.2531, + "step": 1312 + }, + { + "epoch": 0.0823249106527055, + "grad_norm": 3.069575309753418, + "learning_rate": 1.985691871256207e-05, + "loss": 1.2677, + "step": 1313 + }, + { + "epoch": 0.08238761050849583, + "grad_norm": 2.283582925796509, + "learning_rate": 1.9856576209307875e-05, + "loss": 1.3708, + "step": 1314 + }, + { + "epoch": 0.08245031036428616, + "grad_norm": 2.6969730854034424, + "learning_rate": 1.9856233299568047e-05, + "loss": 1.2427, + "step": 1315 + }, + { + "epoch": 0.0825130102200765, + "grad_norm": 2.737898588180542, + "learning_rate": 1.9855889983356724e-05, + "loss": 1.029, + "step": 1316 + }, + { + "epoch": 0.08257571007586682, + "grad_norm": 2.7518866062164307, + "learning_rate": 1.9855546260688066e-05, + "loss": 1.1934, + "step": 1317 + }, + { + "epoch": 0.08263840993165716, + "grad_norm": 2.453906536102295, + "learning_rate": 1.9855202131576256e-05, + "loss": 1.3354, + "step": 1318 + }, + { + "epoch": 0.08270110978744749, + "grad_norm": 2.7782344818115234, + "learning_rate": 1.9854857596035476e-05, + "loss": 1.1168, + "step": 1319 + }, + { + "epoch": 0.08276380964323782, + "grad_norm": 2.6979691982269287, + "learning_rate": 1.985451265407994e-05, + "loss": 1.2552, + "step": 1320 + }, + { + "epoch": 0.08282650949902816, + "grad_norm": 2.4545280933380127, + "learning_rate": 1.985416730572387e-05, + "loss": 1.2616, + "step": 1321 + }, + { + "epoch": 0.08288920935481849, + "grad_norm": 2.5163140296936035, + "learning_rate": 1.9853821550981512e-05, + "loss": 1.2415, + "step": 1322 + }, + { + "epoch": 0.08295190921060881, + "grad_norm": 2.981084108352661, + "learning_rate": 1.985347538986712e-05, + "loss": 1.2031, + "step": 1323 + }, + { + "epoch": 0.08301460906639915, + "grad_norm": 2.435689687728882, + "learning_rate": 1.9853128822394976e-05, + "loss": 1.3307, + "step": 1324 + }, + { + "epoch": 0.08307730892218948, + "grad_norm": 2.8322956562042236, + "learning_rate": 1.9852781848579367e-05, + "loss": 1.1711, + "step": 1325 + }, + { + "epoch": 0.0831400087779798, + "grad_norm": 2.7071914672851562, + "learning_rate": 1.9852434468434603e-05, + "loss": 1.265, + "step": 1326 + }, + { + "epoch": 0.08320270863377015, + "grad_norm": 2.9382688999176025, + "learning_rate": 1.9852086681975016e-05, + "loss": 1.2432, + "step": 1327 + }, + { + "epoch": 0.08326540848956047, + "grad_norm": 2.550605297088623, + "learning_rate": 1.985173848921494e-05, + "loss": 1.2746, + "step": 1328 + }, + { + "epoch": 0.0833281083453508, + "grad_norm": 2.7576193809509277, + "learning_rate": 1.9851389890168738e-05, + "loss": 1.3171, + "step": 1329 + }, + { + "epoch": 0.08339080820114114, + "grad_norm": 3.0757932662963867, + "learning_rate": 1.985104088485079e-05, + "loss": 1.1377, + "step": 1330 + }, + { + "epoch": 0.08345350805693147, + "grad_norm": 2.5999927520751953, + "learning_rate": 1.9850691473275483e-05, + "loss": 1.3316, + "step": 1331 + }, + { + "epoch": 0.0835162079127218, + "grad_norm": 2.925724744796753, + "learning_rate": 1.9850341655457233e-05, + "loss": 1.3052, + "step": 1332 + }, + { + "epoch": 0.08357890776851214, + "grad_norm": 2.5194084644317627, + "learning_rate": 1.984999143141046e-05, + "loss": 1.3082, + "step": 1333 + }, + { + "epoch": 0.08364160762430246, + "grad_norm": 2.878697633743286, + "learning_rate": 1.9849640801149606e-05, + "loss": 1.3515, + "step": 1334 + }, + { + "epoch": 0.08370430748009279, + "grad_norm": 3.063779830932617, + "learning_rate": 1.9849289764689144e-05, + "loss": 1.2472, + "step": 1335 + }, + { + "epoch": 0.08376700733588313, + "grad_norm": 2.700253486633301, + "learning_rate": 1.9848938322043536e-05, + "loss": 1.317, + "step": 1336 + }, + { + "epoch": 0.08382970719167346, + "grad_norm": 2.6942949295043945, + "learning_rate": 1.9848586473227286e-05, + "loss": 1.3641, + "step": 1337 + }, + { + "epoch": 0.08389240704746379, + "grad_norm": 2.578930377960205, + "learning_rate": 1.9848234218254895e-05, + "loss": 1.3096, + "step": 1338 + }, + { + "epoch": 0.08395510690325413, + "grad_norm": 3.025020122528076, + "learning_rate": 1.9847881557140896e-05, + "loss": 0.9916, + "step": 1339 + }, + { + "epoch": 0.08401780675904445, + "grad_norm": 2.7581655979156494, + "learning_rate": 1.984752848989983e-05, + "loss": 1.1438, + "step": 1340 + }, + { + "epoch": 0.08408050661483478, + "grad_norm": 2.7883896827697754, + "learning_rate": 1.9847175016546264e-05, + "loss": 1.3082, + "step": 1341 + }, + { + "epoch": 0.08414320647062512, + "grad_norm": 2.680837392807007, + "learning_rate": 1.9846821137094768e-05, + "loss": 1.2603, + "step": 1342 + }, + { + "epoch": 0.08420590632641545, + "grad_norm": 2.825000524520874, + "learning_rate": 1.984646685155994e-05, + "loss": 1.1517, + "step": 1343 + }, + { + "epoch": 0.08426860618220577, + "grad_norm": 2.577331066131592, + "learning_rate": 1.9846112159956384e-05, + "loss": 1.227, + "step": 1344 + }, + { + "epoch": 0.08433130603799611, + "grad_norm": 2.364866256713867, + "learning_rate": 1.9845757062298736e-05, + "loss": 1.1748, + "step": 1345 + }, + { + "epoch": 0.08439400589378644, + "grad_norm": 2.778247594833374, + "learning_rate": 1.9845401558601634e-05, + "loss": 1.2042, + "step": 1346 + }, + { + "epoch": 0.08445670574957678, + "grad_norm": 3.316617012023926, + "learning_rate": 1.9845045648879747e-05, + "loss": 1.3286, + "step": 1347 + }, + { + "epoch": 0.08451940560536711, + "grad_norm": 2.6467018127441406, + "learning_rate": 1.9844689333147745e-05, + "loss": 1.3833, + "step": 1348 + }, + { + "epoch": 0.08458210546115744, + "grad_norm": 2.593684434890747, + "learning_rate": 1.984433261142032e-05, + "loss": 1.2192, + "step": 1349 + }, + { + "epoch": 0.08464480531694778, + "grad_norm": 2.729100465774536, + "learning_rate": 1.9843975483712198e-05, + "loss": 1.1897, + "step": 1350 + }, + { + "epoch": 0.0847075051727381, + "grad_norm": 2.6096115112304688, + "learning_rate": 1.984361795003809e-05, + "loss": 1.3151, + "step": 1351 + }, + { + "epoch": 0.08477020502852843, + "grad_norm": 2.5198731422424316, + "learning_rate": 1.984326001041275e-05, + "loss": 1.2194, + "step": 1352 + }, + { + "epoch": 0.08483290488431877, + "grad_norm": 2.936073064804077, + "learning_rate": 1.9842901664850938e-05, + "loss": 1.2295, + "step": 1353 + }, + { + "epoch": 0.0848956047401091, + "grad_norm": 2.6048877239227295, + "learning_rate": 1.984254291336743e-05, + "loss": 1.3077, + "step": 1354 + }, + { + "epoch": 0.08495830459589943, + "grad_norm": 2.6064412593841553, + "learning_rate": 1.9842183755977022e-05, + "loss": 1.2506, + "step": 1355 + }, + { + "epoch": 0.08502100445168977, + "grad_norm": 2.74106502532959, + "learning_rate": 1.9841824192694526e-05, + "loss": 1.2754, + "step": 1356 + }, + { + "epoch": 0.08508370430748009, + "grad_norm": 2.6705849170684814, + "learning_rate": 1.9841464223534774e-05, + "loss": 1.2792, + "step": 1357 + }, + { + "epoch": 0.08514640416327042, + "grad_norm": 2.4380173683166504, + "learning_rate": 1.9841103848512606e-05, + "loss": 1.2259, + "step": 1358 + }, + { + "epoch": 0.08520910401906076, + "grad_norm": 2.9990460872650146, + "learning_rate": 1.9840743067642885e-05, + "loss": 1.2251, + "step": 1359 + }, + { + "epoch": 0.08527180387485109, + "grad_norm": 3.008645534515381, + "learning_rate": 1.9840381880940488e-05, + "loss": 1.1816, + "step": 1360 + }, + { + "epoch": 0.08533450373064141, + "grad_norm": 2.69325852394104, + "learning_rate": 1.9840020288420314e-05, + "loss": 1.3717, + "step": 1361 + }, + { + "epoch": 0.08539720358643176, + "grad_norm": 2.6082913875579834, + "learning_rate": 1.9839658290097276e-05, + "loss": 1.1682, + "step": 1362 + }, + { + "epoch": 0.08545990344222208, + "grad_norm": 2.5351686477661133, + "learning_rate": 1.98392958859863e-05, + "loss": 1.5105, + "step": 1363 + }, + { + "epoch": 0.08552260329801241, + "grad_norm": 2.665637731552124, + "learning_rate": 1.983893307610233e-05, + "loss": 1.1969, + "step": 1364 + }, + { + "epoch": 0.08558530315380275, + "grad_norm": 2.6634914875030518, + "learning_rate": 1.983856986046033e-05, + "loss": 1.2432, + "step": 1365 + }, + { + "epoch": 0.08564800300959308, + "grad_norm": 2.7587814331054688, + "learning_rate": 1.983820623907528e-05, + "loss": 1.2912, + "step": 1366 + }, + { + "epoch": 0.0857107028653834, + "grad_norm": 2.5042364597320557, + "learning_rate": 1.9837842211962175e-05, + "loss": 1.2172, + "step": 1367 + }, + { + "epoch": 0.08577340272117374, + "grad_norm": 2.744389057159424, + "learning_rate": 1.9837477779136028e-05, + "loss": 1.2187, + "step": 1368 + }, + { + "epoch": 0.08583610257696407, + "grad_norm": 3.0363094806671143, + "learning_rate": 1.9837112940611866e-05, + "loss": 1.2384, + "step": 1369 + }, + { + "epoch": 0.0858988024327544, + "grad_norm": 2.9972379207611084, + "learning_rate": 1.9836747696404737e-05, + "loss": 1.2763, + "step": 1370 + }, + { + "epoch": 0.08596150228854474, + "grad_norm": 2.5058088302612305, + "learning_rate": 1.9836382046529708e-05, + "loss": 1.1884, + "step": 1371 + }, + { + "epoch": 0.08602420214433507, + "grad_norm": 2.7410523891448975, + "learning_rate": 1.983601599100185e-05, + "loss": 1.2354, + "step": 1372 + }, + { + "epoch": 0.0860869020001254, + "grad_norm": 2.375929117202759, + "learning_rate": 1.9835649529836264e-05, + "loss": 1.3468, + "step": 1373 + }, + { + "epoch": 0.08614960185591573, + "grad_norm": 2.758585214614868, + "learning_rate": 1.983528266304806e-05, + "loss": 1.4353, + "step": 1374 + }, + { + "epoch": 0.08621230171170606, + "grad_norm": 2.4965972900390625, + "learning_rate": 1.983491539065237e-05, + "loss": 1.3497, + "step": 1375 + }, + { + "epoch": 0.0862750015674964, + "grad_norm": 2.4747118949890137, + "learning_rate": 1.983454771266434e-05, + "loss": 1.1642, + "step": 1376 + }, + { + "epoch": 0.08633770142328673, + "grad_norm": 2.6417577266693115, + "learning_rate": 1.9834179629099135e-05, + "loss": 1.523, + "step": 1377 + }, + { + "epoch": 0.08640040127907705, + "grad_norm": 2.67887544631958, + "learning_rate": 1.983381113997193e-05, + "loss": 1.2104, + "step": 1378 + }, + { + "epoch": 0.0864631011348674, + "grad_norm": 2.460981607437134, + "learning_rate": 1.9833442245297923e-05, + "loss": 1.1368, + "step": 1379 + }, + { + "epoch": 0.08652580099065772, + "grad_norm": 2.455939292907715, + "learning_rate": 1.9833072945092334e-05, + "loss": 1.0947, + "step": 1380 + }, + { + "epoch": 0.08658850084644805, + "grad_norm": 2.358366012573242, + "learning_rate": 1.9832703239370385e-05, + "loss": 1.3171, + "step": 1381 + }, + { + "epoch": 0.08665120070223839, + "grad_norm": 3.046525001525879, + "learning_rate": 1.9832333128147323e-05, + "loss": 1.1442, + "step": 1382 + }, + { + "epoch": 0.08671390055802872, + "grad_norm": 2.6797382831573486, + "learning_rate": 1.9831962611438412e-05, + "loss": 1.2054, + "step": 1383 + }, + { + "epoch": 0.08677660041381904, + "grad_norm": 2.6812055110931396, + "learning_rate": 1.9831591689258936e-05, + "loss": 1.358, + "step": 1384 + }, + { + "epoch": 0.08683930026960938, + "grad_norm": 2.738280773162842, + "learning_rate": 1.9831220361624192e-05, + "loss": 1.3609, + "step": 1385 + }, + { + "epoch": 0.08690200012539971, + "grad_norm": 2.834893226623535, + "learning_rate": 1.9830848628549487e-05, + "loss": 1.2512, + "step": 1386 + }, + { + "epoch": 0.08696469998119004, + "grad_norm": 2.7571065425872803, + "learning_rate": 1.9830476490050156e-05, + "loss": 1.135, + "step": 1387 + }, + { + "epoch": 0.08702739983698038, + "grad_norm": 2.6347649097442627, + "learning_rate": 1.9830103946141545e-05, + "loss": 1.2046, + "step": 1388 + }, + { + "epoch": 0.0870900996927707, + "grad_norm": 2.8580994606018066, + "learning_rate": 1.982973099683902e-05, + "loss": 1.2113, + "step": 1389 + }, + { + "epoch": 0.08715279954856103, + "grad_norm": 2.5019478797912598, + "learning_rate": 1.9829357642157957e-05, + "loss": 1.3026, + "step": 1390 + }, + { + "epoch": 0.08721549940435137, + "grad_norm": 2.679407835006714, + "learning_rate": 1.9828983882113758e-05, + "loss": 1.4131, + "step": 1391 + }, + { + "epoch": 0.0872781992601417, + "grad_norm": 3.0774121284484863, + "learning_rate": 1.9828609716721834e-05, + "loss": 1.2012, + "step": 1392 + }, + { + "epoch": 0.08734089911593203, + "grad_norm": 3.072697162628174, + "learning_rate": 1.9828235145997614e-05, + "loss": 1.2441, + "step": 1393 + }, + { + "epoch": 0.08740359897172237, + "grad_norm": 2.6595516204833984, + "learning_rate": 1.982786016995655e-05, + "loss": 1.3461, + "step": 1394 + }, + { + "epoch": 0.0874662988275127, + "grad_norm": 2.546985626220703, + "learning_rate": 1.98274847886141e-05, + "loss": 1.2266, + "step": 1395 + }, + { + "epoch": 0.08752899868330302, + "grad_norm": 2.8480827808380127, + "learning_rate": 1.982710900198575e-05, + "loss": 1.216, + "step": 1396 + }, + { + "epoch": 0.08759169853909336, + "grad_norm": 2.857326030731201, + "learning_rate": 1.9826732810087e-05, + "loss": 1.253, + "step": 1397 + }, + { + "epoch": 0.08765439839488369, + "grad_norm": 2.9043195247650146, + "learning_rate": 1.9826356212933353e-05, + "loss": 1.0474, + "step": 1398 + }, + { + "epoch": 0.08771709825067402, + "grad_norm": 3.0661776065826416, + "learning_rate": 1.9825979210540348e-05, + "loss": 1.3819, + "step": 1399 + }, + { + "epoch": 0.08777979810646436, + "grad_norm": 2.7739059925079346, + "learning_rate": 1.9825601802923532e-05, + "loss": 1.3019, + "step": 1400 + }, + { + "epoch": 0.08784249796225468, + "grad_norm": 2.8987908363342285, + "learning_rate": 1.982522399009847e-05, + "loss": 1.1748, + "step": 1401 + }, + { + "epoch": 0.08790519781804502, + "grad_norm": 2.821255683898926, + "learning_rate": 1.982484577208074e-05, + "loss": 1.3914, + "step": 1402 + }, + { + "epoch": 0.08796789767383535, + "grad_norm": 2.8112640380859375, + "learning_rate": 1.9824467148885942e-05, + "loss": 1.212, + "step": 1403 + }, + { + "epoch": 0.08803059752962568, + "grad_norm": 2.8358311653137207, + "learning_rate": 1.982408812052969e-05, + "loss": 1.3075, + "step": 1404 + }, + { + "epoch": 0.08809329738541602, + "grad_norm": 2.7072231769561768, + "learning_rate": 1.9823708687027614e-05, + "loss": 1.1793, + "step": 1405 + }, + { + "epoch": 0.08815599724120635, + "grad_norm": 2.4970836639404297, + "learning_rate": 1.9823328848395366e-05, + "loss": 1.099, + "step": 1406 + }, + { + "epoch": 0.08821869709699667, + "grad_norm": 3.1183087825775146, + "learning_rate": 1.9822948604648602e-05, + "loss": 1.2318, + "step": 1407 + }, + { + "epoch": 0.08828139695278701, + "grad_norm": 2.4535374641418457, + "learning_rate": 1.982256795580301e-05, + "loss": 1.2898, + "step": 1408 + }, + { + "epoch": 0.08834409680857734, + "grad_norm": 2.715289354324341, + "learning_rate": 1.9822186901874288e-05, + "loss": 1.0675, + "step": 1409 + }, + { + "epoch": 0.08840679666436767, + "grad_norm": 2.4666051864624023, + "learning_rate": 1.982180544287815e-05, + "loss": 1.3202, + "step": 1410 + }, + { + "epoch": 0.08846949652015801, + "grad_norm": 2.5199086666107178, + "learning_rate": 1.9821423578830325e-05, + "loss": 1.1212, + "step": 1411 + }, + { + "epoch": 0.08853219637594834, + "grad_norm": 2.6699328422546387, + "learning_rate": 1.9821041309746563e-05, + "loss": 1.3383, + "step": 1412 + }, + { + "epoch": 0.08859489623173866, + "grad_norm": 2.819005250930786, + "learning_rate": 1.9820658635642628e-05, + "loss": 1.2799, + "step": 1413 + }, + { + "epoch": 0.088657596087529, + "grad_norm": 2.5930488109588623, + "learning_rate": 1.9820275556534306e-05, + "loss": 1.3076, + "step": 1414 + }, + { + "epoch": 0.08872029594331933, + "grad_norm": 2.757100820541382, + "learning_rate": 1.9819892072437388e-05, + "loss": 1.2332, + "step": 1415 + }, + { + "epoch": 0.08878299579910966, + "grad_norm": 2.659529209136963, + "learning_rate": 1.9819508183367692e-05, + "loss": 1.1915, + "step": 1416 + }, + { + "epoch": 0.0888456956549, + "grad_norm": 2.523775339126587, + "learning_rate": 1.981912388934105e-05, + "loss": 1.2721, + "step": 1417 + }, + { + "epoch": 0.08890839551069032, + "grad_norm": 3.238464593887329, + "learning_rate": 1.981873919037331e-05, + "loss": 1.1624, + "step": 1418 + }, + { + "epoch": 0.08897109536648065, + "grad_norm": 2.833627939224243, + "learning_rate": 1.9818354086480337e-05, + "loss": 1.1932, + "step": 1419 + }, + { + "epoch": 0.08903379522227099, + "grad_norm": 2.4628078937530518, + "learning_rate": 1.981796857767801e-05, + "loss": 1.3781, + "step": 1420 + }, + { + "epoch": 0.08909649507806132, + "grad_norm": 2.414083957672119, + "learning_rate": 1.9817582663982235e-05, + "loss": 1.2932, + "step": 1421 + }, + { + "epoch": 0.08915919493385165, + "grad_norm": 2.887523651123047, + "learning_rate": 1.981719634540892e-05, + "loss": 1.3533, + "step": 1422 + }, + { + "epoch": 0.08922189478964199, + "grad_norm": 2.393618106842041, + "learning_rate": 1.9816809621973997e-05, + "loss": 1.203, + "step": 1423 + }, + { + "epoch": 0.08928459464543231, + "grad_norm": 2.6079211235046387, + "learning_rate": 1.9816422493693417e-05, + "loss": 1.3866, + "step": 1424 + }, + { + "epoch": 0.08934729450122264, + "grad_norm": 2.9038569927215576, + "learning_rate": 1.981603496058315e-05, + "loss": 1.1764, + "step": 1425 + }, + { + "epoch": 0.08940999435701298, + "grad_norm": 2.636793375015259, + "learning_rate": 1.9815647022659167e-05, + "loss": 1.1768, + "step": 1426 + }, + { + "epoch": 0.08947269421280331, + "grad_norm": 3.131718873977661, + "learning_rate": 1.9815258679937472e-05, + "loss": 1.3033, + "step": 1427 + }, + { + "epoch": 0.08953539406859365, + "grad_norm": 2.580310106277466, + "learning_rate": 1.9814869932434083e-05, + "loss": 1.2304, + "step": 1428 + }, + { + "epoch": 0.08959809392438398, + "grad_norm": 2.7645533084869385, + "learning_rate": 1.9814480780165026e-05, + "loss": 1.1934, + "step": 1429 + }, + { + "epoch": 0.0896607937801743, + "grad_norm": 2.563730001449585, + "learning_rate": 1.9814091223146357e-05, + "loss": 1.2736, + "step": 1430 + }, + { + "epoch": 0.08972349363596464, + "grad_norm": 2.7871320247650146, + "learning_rate": 1.9813701261394136e-05, + "loss": 1.2596, + "step": 1431 + }, + { + "epoch": 0.08978619349175497, + "grad_norm": 2.725473642349243, + "learning_rate": 1.9813310894924447e-05, + "loss": 1.3201, + "step": 1432 + }, + { + "epoch": 0.0898488933475453, + "grad_norm": 2.646885395050049, + "learning_rate": 1.9812920123753387e-05, + "loss": 1.4, + "step": 1433 + }, + { + "epoch": 0.08991159320333564, + "grad_norm": 2.675640821456909, + "learning_rate": 1.9812528947897075e-05, + "loss": 1.1655, + "step": 1434 + }, + { + "epoch": 0.08997429305912596, + "grad_norm": 2.7104313373565674, + "learning_rate": 1.9812137367371638e-05, + "loss": 1.4441, + "step": 1435 + }, + { + "epoch": 0.09003699291491629, + "grad_norm": 3.167902708053589, + "learning_rate": 1.9811745382193224e-05, + "loss": 1.307, + "step": 1436 + }, + { + "epoch": 0.09009969277070663, + "grad_norm": 2.3958351612091064, + "learning_rate": 1.9811352992378007e-05, + "loss": 1.2579, + "step": 1437 + }, + { + "epoch": 0.09016239262649696, + "grad_norm": 2.969587564468384, + "learning_rate": 1.9810960197942165e-05, + "loss": 1.1055, + "step": 1438 + }, + { + "epoch": 0.09022509248228729, + "grad_norm": 2.6010584831237793, + "learning_rate": 1.9810566998901892e-05, + "loss": 1.0987, + "step": 1439 + }, + { + "epoch": 0.09028779233807763, + "grad_norm": 2.7416203022003174, + "learning_rate": 1.9810173395273412e-05, + "loss": 1.2199, + "step": 1440 + }, + { + "epoch": 0.09035049219386795, + "grad_norm": 2.5738096237182617, + "learning_rate": 1.980977938707295e-05, + "loss": 1.4312, + "step": 1441 + }, + { + "epoch": 0.09041319204965828, + "grad_norm": 2.514317035675049, + "learning_rate": 1.9809384974316758e-05, + "loss": 1.165, + "step": 1442 + }, + { + "epoch": 0.09047589190544862, + "grad_norm": 2.805903911590576, + "learning_rate": 1.9808990157021103e-05, + "loss": 1.2332, + "step": 1443 + }, + { + "epoch": 0.09053859176123895, + "grad_norm": 3.068471670150757, + "learning_rate": 1.9808594935202262e-05, + "loss": 1.1539, + "step": 1444 + }, + { + "epoch": 0.09060129161702928, + "grad_norm": 2.526785135269165, + "learning_rate": 1.9808199308876543e-05, + "loss": 1.1426, + "step": 1445 + }, + { + "epoch": 0.09066399147281962, + "grad_norm": 2.9580936431884766, + "learning_rate": 1.9807803278060247e-05, + "loss": 1.1427, + "step": 1446 + }, + { + "epoch": 0.09072669132860994, + "grad_norm": 2.6596641540527344, + "learning_rate": 1.9807406842769725e-05, + "loss": 1.1044, + "step": 1447 + }, + { + "epoch": 0.09078939118440027, + "grad_norm": 3.1014626026153564, + "learning_rate": 1.980701000302131e-05, + "loss": 1.3275, + "step": 1448 + }, + { + "epoch": 0.09085209104019061, + "grad_norm": 2.6227200031280518, + "learning_rate": 1.980661275883138e-05, + "loss": 1.4626, + "step": 1449 + }, + { + "epoch": 0.09091479089598094, + "grad_norm": 3.1233670711517334, + "learning_rate": 1.9806215110216308e-05, + "loss": 1.1717, + "step": 1450 + }, + { + "epoch": 0.09097749075177126, + "grad_norm": 2.564004421234131, + "learning_rate": 1.9805817057192497e-05, + "loss": 1.1121, + "step": 1451 + }, + { + "epoch": 0.0910401906075616, + "grad_norm": 2.795647382736206, + "learning_rate": 1.980541859977636e-05, + "loss": 1.3428, + "step": 1452 + }, + { + "epoch": 0.09110289046335193, + "grad_norm": 2.624648332595825, + "learning_rate": 1.9805019737984336e-05, + "loss": 1.2161, + "step": 1453 + }, + { + "epoch": 0.09116559031914227, + "grad_norm": 2.744771957397461, + "learning_rate": 1.980462047183287e-05, + "loss": 1.2886, + "step": 1454 + }, + { + "epoch": 0.0912282901749326, + "grad_norm": 2.8825128078460693, + "learning_rate": 1.9804220801338424e-05, + "loss": 1.2462, + "step": 1455 + }, + { + "epoch": 0.09129099003072293, + "grad_norm": 2.324662685394287, + "learning_rate": 1.9803820726517485e-05, + "loss": 1.4386, + "step": 1456 + }, + { + "epoch": 0.09135368988651327, + "grad_norm": 2.718273878097534, + "learning_rate": 1.9803420247386552e-05, + "loss": 1.3089, + "step": 1457 + }, + { + "epoch": 0.0914163897423036, + "grad_norm": 2.5878074169158936, + "learning_rate": 1.9803019363962138e-05, + "loss": 1.3398, + "step": 1458 + }, + { + "epoch": 0.09147908959809392, + "grad_norm": 2.6416311264038086, + "learning_rate": 1.9802618076260784e-05, + "loss": 1.3523, + "step": 1459 + }, + { + "epoch": 0.09154178945388426, + "grad_norm": 2.8559048175811768, + "learning_rate": 1.9802216384299023e-05, + "loss": 1.2599, + "step": 1460 + }, + { + "epoch": 0.09160448930967459, + "grad_norm": 2.5532071590423584, + "learning_rate": 1.9801814288093438e-05, + "loss": 1.2618, + "step": 1461 + }, + { + "epoch": 0.09166718916546492, + "grad_norm": 2.781878709793091, + "learning_rate": 1.98014117876606e-05, + "loss": 1.1914, + "step": 1462 + }, + { + "epoch": 0.09172988902125526, + "grad_norm": 2.624030828475952, + "learning_rate": 1.980100888301711e-05, + "loss": 1.3742, + "step": 1463 + }, + { + "epoch": 0.09179258887704558, + "grad_norm": 2.5942561626434326, + "learning_rate": 1.9800605574179588e-05, + "loss": 1.3086, + "step": 1464 + }, + { + "epoch": 0.09185528873283591, + "grad_norm": 2.6381802558898926, + "learning_rate": 1.9800201861164665e-05, + "loss": 1.2857, + "step": 1465 + }, + { + "epoch": 0.09191798858862625, + "grad_norm": 2.6587975025177, + "learning_rate": 1.9799797743988985e-05, + "loss": 1.246, + "step": 1466 + }, + { + "epoch": 0.09198068844441658, + "grad_norm": 2.588510751724243, + "learning_rate": 1.9799393222669224e-05, + "loss": 1.313, + "step": 1467 + }, + { + "epoch": 0.0920433883002069, + "grad_norm": 2.892062187194824, + "learning_rate": 1.9798988297222057e-05, + "loss": 1.1671, + "step": 1468 + }, + { + "epoch": 0.09210608815599725, + "grad_norm": 2.5659425258636475, + "learning_rate": 1.9798582967664182e-05, + "loss": 1.1353, + "step": 1469 + }, + { + "epoch": 0.09216878801178757, + "grad_norm": 3.125676393508911, + "learning_rate": 1.979817723401232e-05, + "loss": 1.2898, + "step": 1470 + }, + { + "epoch": 0.0922314878675779, + "grad_norm": 2.9794394969940186, + "learning_rate": 1.9797771096283197e-05, + "loss": 1.447, + "step": 1471 + }, + { + "epoch": 0.09229418772336824, + "grad_norm": 2.7525291442871094, + "learning_rate": 1.979736455449357e-05, + "loss": 1.3192, + "step": 1472 + }, + { + "epoch": 0.09235688757915857, + "grad_norm": 2.6821529865264893, + "learning_rate": 1.9796957608660203e-05, + "loss": 1.3427, + "step": 1473 + }, + { + "epoch": 0.0924195874349489, + "grad_norm": 2.6564624309539795, + "learning_rate": 1.9796550258799874e-05, + "loss": 1.1887, + "step": 1474 + }, + { + "epoch": 0.09248228729073923, + "grad_norm": 2.781777858734131, + "learning_rate": 1.9796142504929384e-05, + "loss": 1.3218, + "step": 1475 + }, + { + "epoch": 0.09254498714652956, + "grad_norm": 2.9324300289154053, + "learning_rate": 1.9795734347065547e-05, + "loss": 1.2415, + "step": 1476 + }, + { + "epoch": 0.09260768700231989, + "grad_norm": 2.706868886947632, + "learning_rate": 1.9795325785225202e-05, + "loss": 1.3775, + "step": 1477 + }, + { + "epoch": 0.09267038685811023, + "grad_norm": 2.8032147884368896, + "learning_rate": 1.979491681942519e-05, + "loss": 1.2955, + "step": 1478 + }, + { + "epoch": 0.09273308671390056, + "grad_norm": 2.7941219806671143, + "learning_rate": 1.9794507449682383e-05, + "loss": 1.1957, + "step": 1479 + }, + { + "epoch": 0.09279578656969088, + "grad_norm": 2.656782627105713, + "learning_rate": 1.979409767601366e-05, + "loss": 1.1778, + "step": 1480 + }, + { + "epoch": 0.09285848642548122, + "grad_norm": 2.363661527633667, + "learning_rate": 1.9793687498435922e-05, + "loss": 1.1214, + "step": 1481 + }, + { + "epoch": 0.09292118628127155, + "grad_norm": 2.6979634761810303, + "learning_rate": 1.979327691696608e-05, + "loss": 1.2456, + "step": 1482 + }, + { + "epoch": 0.09298388613706189, + "grad_norm": 2.8846402168273926, + "learning_rate": 1.979286593162108e-05, + "loss": 1.3592, + "step": 1483 + }, + { + "epoch": 0.09304658599285222, + "grad_norm": 2.596449375152588, + "learning_rate": 1.9792454542417854e-05, + "loss": 1.2503, + "step": 1484 + }, + { + "epoch": 0.09310928584864255, + "grad_norm": 2.9625988006591797, + "learning_rate": 1.9792042749373373e-05, + "loss": 1.3215, + "step": 1485 + }, + { + "epoch": 0.09317198570443289, + "grad_norm": 2.8382158279418945, + "learning_rate": 1.9791630552504627e-05, + "loss": 1.3864, + "step": 1486 + }, + { + "epoch": 0.09323468556022321, + "grad_norm": 2.6150314807891846, + "learning_rate": 1.9791217951828607e-05, + "loss": 1.3844, + "step": 1487 + }, + { + "epoch": 0.09329738541601354, + "grad_norm": 2.923497438430786, + "learning_rate": 1.979080494736233e-05, + "loss": 1.267, + "step": 1488 + }, + { + "epoch": 0.09336008527180388, + "grad_norm": 2.6177875995635986, + "learning_rate": 1.9790391539122832e-05, + "loss": 1.1404, + "step": 1489 + }, + { + "epoch": 0.09342278512759421, + "grad_norm": 2.8375937938690186, + "learning_rate": 1.9789977727127155e-05, + "loss": 1.0232, + "step": 1490 + }, + { + "epoch": 0.09348548498338453, + "grad_norm": 2.962240695953369, + "learning_rate": 1.9789563511392374e-05, + "loss": 1.2758, + "step": 1491 + }, + { + "epoch": 0.09354818483917487, + "grad_norm": 2.762779951095581, + "learning_rate": 1.9789148891935564e-05, + "loss": 1.4298, + "step": 1492 + }, + { + "epoch": 0.0936108846949652, + "grad_norm": 2.5157151222229004, + "learning_rate": 1.9788733868773826e-05, + "loss": 1.2015, + "step": 1493 + }, + { + "epoch": 0.09367358455075553, + "grad_norm": 2.6066272258758545, + "learning_rate": 1.9788318441924276e-05, + "loss": 1.3154, + "step": 1494 + }, + { + "epoch": 0.09373628440654587, + "grad_norm": 2.640362501144409, + "learning_rate": 1.9787902611404048e-05, + "loss": 1.2416, + "step": 1495 + }, + { + "epoch": 0.0937989842623362, + "grad_norm": 2.84089994430542, + "learning_rate": 1.9787486377230285e-05, + "loss": 1.2491, + "step": 1496 + }, + { + "epoch": 0.09386168411812652, + "grad_norm": 2.728041648864746, + "learning_rate": 1.978706973942016e-05, + "loss": 1.2244, + "step": 1497 + }, + { + "epoch": 0.09392438397391686, + "grad_norm": 2.6402523517608643, + "learning_rate": 1.978665269799085e-05, + "loss": 1.237, + "step": 1498 + }, + { + "epoch": 0.09398708382970719, + "grad_norm": 2.7456958293914795, + "learning_rate": 1.9786235252959555e-05, + "loss": 1.1761, + "step": 1499 + }, + { + "epoch": 0.09404978368549752, + "grad_norm": 3.0355119705200195, + "learning_rate": 1.978581740434349e-05, + "loss": 1.4087, + "step": 1500 + }, + { + "epoch": 0.09411248354128786, + "grad_norm": 2.6296749114990234, + "learning_rate": 1.978539915215989e-05, + "loss": 1.3487, + "step": 1501 + }, + { + "epoch": 0.09417518339707819, + "grad_norm": 2.560939311981201, + "learning_rate": 1.9784980496426e-05, + "loss": 1.2175, + "step": 1502 + }, + { + "epoch": 0.09423788325286851, + "grad_norm": 2.846165657043457, + "learning_rate": 1.9784561437159088e-05, + "loss": 1.2925, + "step": 1503 + }, + { + "epoch": 0.09430058310865885, + "grad_norm": 2.4781715869903564, + "learning_rate": 1.9784141974376434e-05, + "loss": 1.1537, + "step": 1504 + }, + { + "epoch": 0.09436328296444918, + "grad_norm": 2.8001914024353027, + "learning_rate": 1.9783722108095338e-05, + "loss": 1.233, + "step": 1505 + }, + { + "epoch": 0.0944259828202395, + "grad_norm": 2.7152762413024902, + "learning_rate": 1.9783301838333116e-05, + "loss": 1.1657, + "step": 1506 + }, + { + "epoch": 0.09448868267602985, + "grad_norm": 2.9732282161712646, + "learning_rate": 1.97828811651071e-05, + "loss": 1.3022, + "step": 1507 + }, + { + "epoch": 0.09455138253182017, + "grad_norm": 2.8084194660186768, + "learning_rate": 1.9782460088434634e-05, + "loss": 1.2021, + "step": 1508 + }, + { + "epoch": 0.09461408238761052, + "grad_norm": 2.5715174674987793, + "learning_rate": 1.9782038608333088e-05, + "loss": 1.3698, + "step": 1509 + }, + { + "epoch": 0.09467678224340084, + "grad_norm": 2.600553512573242, + "learning_rate": 1.9781616724819846e-05, + "loss": 1.24, + "step": 1510 + }, + { + "epoch": 0.09473948209919117, + "grad_norm": 3.071939706802368, + "learning_rate": 1.9781194437912296e-05, + "loss": 1.2461, + "step": 1511 + }, + { + "epoch": 0.09480218195498151, + "grad_norm": 3.365741014480591, + "learning_rate": 1.9780771747627867e-05, + "loss": 0.985, + "step": 1512 + }, + { + "epoch": 0.09486488181077184, + "grad_norm": 2.6893062591552734, + "learning_rate": 1.978034865398398e-05, + "loss": 1.2985, + "step": 1513 + }, + { + "epoch": 0.09492758166656216, + "grad_norm": 2.6972970962524414, + "learning_rate": 1.977992515699809e-05, + "loss": 1.185, + "step": 1514 + }, + { + "epoch": 0.0949902815223525, + "grad_norm": 2.80254864692688, + "learning_rate": 1.9779501256687658e-05, + "loss": 1.121, + "step": 1515 + }, + { + "epoch": 0.09505298137814283, + "grad_norm": 2.654633045196533, + "learning_rate": 1.977907695307017e-05, + "loss": 1.3987, + "step": 1516 + }, + { + "epoch": 0.09511568123393316, + "grad_norm": 2.7771341800689697, + "learning_rate": 1.9778652246163115e-05, + "loss": 1.2748, + "step": 1517 + }, + { + "epoch": 0.0951783810897235, + "grad_norm": 2.7835640907287598, + "learning_rate": 1.977822713598402e-05, + "loss": 1.2597, + "step": 1518 + }, + { + "epoch": 0.09524108094551383, + "grad_norm": 2.8588428497314453, + "learning_rate": 1.977780162255041e-05, + "loss": 1.1863, + "step": 1519 + }, + { + "epoch": 0.09530378080130415, + "grad_norm": 2.619840621948242, + "learning_rate": 1.9777375705879833e-05, + "loss": 1.2812, + "step": 1520 + }, + { + "epoch": 0.0953664806570945, + "grad_norm": 2.661597728729248, + "learning_rate": 1.9776949385989856e-05, + "loss": 1.1531, + "step": 1521 + }, + { + "epoch": 0.09542918051288482, + "grad_norm": 2.558854579925537, + "learning_rate": 1.977652266289806e-05, + "loss": 1.2024, + "step": 1522 + }, + { + "epoch": 0.09549188036867515, + "grad_norm": 2.702543020248413, + "learning_rate": 1.9776095536622038e-05, + "loss": 1.2174, + "step": 1523 + }, + { + "epoch": 0.09555458022446549, + "grad_norm": 3.026150941848755, + "learning_rate": 1.9775668007179414e-05, + "loss": 1.0792, + "step": 1524 + }, + { + "epoch": 0.09561728008025581, + "grad_norm": 3.1097288131713867, + "learning_rate": 1.977524007458781e-05, + "loss": 1.101, + "step": 1525 + }, + { + "epoch": 0.09567997993604614, + "grad_norm": 2.733109712600708, + "learning_rate": 1.977481173886488e-05, + "loss": 1.1469, + "step": 1526 + }, + { + "epoch": 0.09574267979183648, + "grad_norm": 2.7386679649353027, + "learning_rate": 1.977438300002829e-05, + "loss": 1.3093, + "step": 1527 + }, + { + "epoch": 0.09580537964762681, + "grad_norm": 2.7877254486083984, + "learning_rate": 1.9773953858095715e-05, + "loss": 1.195, + "step": 1528 + }, + { + "epoch": 0.09586807950341714, + "grad_norm": 3.204130172729492, + "learning_rate": 1.9773524313084857e-05, + "loss": 1.2732, + "step": 1529 + }, + { + "epoch": 0.09593077935920748, + "grad_norm": 3.001516819000244, + "learning_rate": 1.977309436501343e-05, + "loss": 1.3579, + "step": 1530 + }, + { + "epoch": 0.0959934792149978, + "grad_norm": 2.7684690952301025, + "learning_rate": 1.9772664013899166e-05, + "loss": 1.1052, + "step": 1531 + }, + { + "epoch": 0.09605617907078813, + "grad_norm": 2.6532318592071533, + "learning_rate": 1.977223325975981e-05, + "loss": 1.2707, + "step": 1532 + }, + { + "epoch": 0.09611887892657847, + "grad_norm": 2.8167693614959717, + "learning_rate": 1.9771802102613127e-05, + "loss": 1.1512, + "step": 1533 + }, + { + "epoch": 0.0961815787823688, + "grad_norm": 2.6880528926849365, + "learning_rate": 1.97713705424769e-05, + "loss": 1.0851, + "step": 1534 + }, + { + "epoch": 0.09624427863815913, + "grad_norm": 2.739461898803711, + "learning_rate": 1.9770938579368925e-05, + "loss": 1.2224, + "step": 1535 + }, + { + "epoch": 0.09630697849394947, + "grad_norm": 2.8189263343811035, + "learning_rate": 1.9770506213307012e-05, + "loss": 1.3089, + "step": 1536 + }, + { + "epoch": 0.09636967834973979, + "grad_norm": 2.5900466442108154, + "learning_rate": 1.9770073444309e-05, + "loss": 1.2714, + "step": 1537 + }, + { + "epoch": 0.09643237820553013, + "grad_norm": 2.9018375873565674, + "learning_rate": 1.9769640272392732e-05, + "loss": 1.1561, + "step": 1538 + }, + { + "epoch": 0.09649507806132046, + "grad_norm": 2.5549910068511963, + "learning_rate": 1.976920669757607e-05, + "loss": 1.3325, + "step": 1539 + }, + { + "epoch": 0.09655777791711079, + "grad_norm": 2.7235348224639893, + "learning_rate": 1.9768772719876903e-05, + "loss": 1.2882, + "step": 1540 + }, + { + "epoch": 0.09662047777290113, + "grad_norm": 2.694756031036377, + "learning_rate": 1.9768338339313117e-05, + "loss": 1.2178, + "step": 1541 + }, + { + "epoch": 0.09668317762869146, + "grad_norm": 2.930111885070801, + "learning_rate": 1.9767903555902633e-05, + "loss": 1.0747, + "step": 1542 + }, + { + "epoch": 0.09674587748448178, + "grad_norm": 2.5628817081451416, + "learning_rate": 1.9767468369663382e-05, + "loss": 1.2331, + "step": 1543 + }, + { + "epoch": 0.09680857734027212, + "grad_norm": 2.894416570663452, + "learning_rate": 1.9767032780613306e-05, + "loss": 1.2472, + "step": 1544 + }, + { + "epoch": 0.09687127719606245, + "grad_norm": 2.877049207687378, + "learning_rate": 1.9766596788770373e-05, + "loss": 1.3495, + "step": 1545 + }, + { + "epoch": 0.09693397705185278, + "grad_norm": 2.6378097534179688, + "learning_rate": 1.976616039415256e-05, + "loss": 1.1831, + "step": 1546 + }, + { + "epoch": 0.09699667690764312, + "grad_norm": 2.9535984992980957, + "learning_rate": 1.976572359677787e-05, + "loss": 1.1682, + "step": 1547 + }, + { + "epoch": 0.09705937676343344, + "grad_norm": 2.767835855484009, + "learning_rate": 1.9765286396664306e-05, + "loss": 1.3288, + "step": 1548 + }, + { + "epoch": 0.09712207661922377, + "grad_norm": 2.5437047481536865, + "learning_rate": 1.9764848793829912e-05, + "loss": 1.2698, + "step": 1549 + }, + { + "epoch": 0.09718477647501411, + "grad_norm": 2.9183361530303955, + "learning_rate": 1.9764410788292724e-05, + "loss": 1.0332, + "step": 1550 + }, + { + "epoch": 0.09724747633080444, + "grad_norm": 2.505079746246338, + "learning_rate": 1.9763972380070805e-05, + "loss": 1.2202, + "step": 1551 + }, + { + "epoch": 0.09731017618659477, + "grad_norm": 2.7373573780059814, + "learning_rate": 1.9763533569182246e-05, + "loss": 1.072, + "step": 1552 + }, + { + "epoch": 0.0973728760423851, + "grad_norm": 2.7370121479034424, + "learning_rate": 1.976309435564513e-05, + "loss": 1.0877, + "step": 1553 + }, + { + "epoch": 0.09743557589817543, + "grad_norm": 2.7487246990203857, + "learning_rate": 1.9762654739477578e-05, + "loss": 1.2493, + "step": 1554 + }, + { + "epoch": 0.09749827575396576, + "grad_norm": 2.5896878242492676, + "learning_rate": 1.976221472069772e-05, + "loss": 1.3089, + "step": 1555 + }, + { + "epoch": 0.0975609756097561, + "grad_norm": 2.7908031940460205, + "learning_rate": 1.9761774299323696e-05, + "loss": 1.3573, + "step": 1556 + }, + { + "epoch": 0.09762367546554643, + "grad_norm": 2.587153196334839, + "learning_rate": 1.9761333475373677e-05, + "loss": 1.2444, + "step": 1557 + }, + { + "epoch": 0.09768637532133675, + "grad_norm": 2.9151766300201416, + "learning_rate": 1.9760892248865843e-05, + "loss": 1.1671, + "step": 1558 + }, + { + "epoch": 0.0977490751771271, + "grad_norm": 3.1204841136932373, + "learning_rate": 1.976045061981838e-05, + "loss": 1.2715, + "step": 1559 + }, + { + "epoch": 0.09781177503291742, + "grad_norm": 2.940019130706787, + "learning_rate": 1.976000858824951e-05, + "loss": 1.204, + "step": 1560 + }, + { + "epoch": 0.09787447488870775, + "grad_norm": 2.4367268085479736, + "learning_rate": 1.9759566154177462e-05, + "loss": 1.2755, + "step": 1561 + }, + { + "epoch": 0.09793717474449809, + "grad_norm": 2.4063847064971924, + "learning_rate": 1.9759123317620474e-05, + "loss": 1.4276, + "step": 1562 + }, + { + "epoch": 0.09799987460028842, + "grad_norm": 2.737002372741699, + "learning_rate": 1.975868007859682e-05, + "loss": 1.3191, + "step": 1563 + }, + { + "epoch": 0.09806257445607876, + "grad_norm": 2.7423157691955566, + "learning_rate": 1.9758236437124768e-05, + "loss": 1.1917, + "step": 1564 + }, + { + "epoch": 0.09812527431186908, + "grad_norm": 2.50583815574646, + "learning_rate": 1.9757792393222622e-05, + "loss": 1.3143, + "step": 1565 + }, + { + "epoch": 0.09818797416765941, + "grad_norm": 3.1019210815429688, + "learning_rate": 1.9757347946908692e-05, + "loss": 1.0374, + "step": 1566 + }, + { + "epoch": 0.09825067402344975, + "grad_norm": 2.8861401081085205, + "learning_rate": 1.975690309820131e-05, + "loss": 1.2621, + "step": 1567 + }, + { + "epoch": 0.09831337387924008, + "grad_norm": 2.723938226699829, + "learning_rate": 1.975645784711881e-05, + "loss": 1.3032, + "step": 1568 + }, + { + "epoch": 0.0983760737350304, + "grad_norm": 2.4586801528930664, + "learning_rate": 1.9756012193679567e-05, + "loss": 1.3352, + "step": 1569 + }, + { + "epoch": 0.09843877359082075, + "grad_norm": 2.493682622909546, + "learning_rate": 1.9755566137901953e-05, + "loss": 1.3001, + "step": 1570 + }, + { + "epoch": 0.09850147344661107, + "grad_norm": 2.6108407974243164, + "learning_rate": 1.975511967980437e-05, + "loss": 1.1257, + "step": 1571 + }, + { + "epoch": 0.0985641733024014, + "grad_norm": 2.9376046657562256, + "learning_rate": 1.9754672819405222e-05, + "loss": 1.1098, + "step": 1572 + }, + { + "epoch": 0.09862687315819174, + "grad_norm": 2.775693416595459, + "learning_rate": 1.9754225556722938e-05, + "loss": 1.1639, + "step": 1573 + }, + { + "epoch": 0.09868957301398207, + "grad_norm": 2.6261632442474365, + "learning_rate": 1.9753777891775973e-05, + "loss": 1.3013, + "step": 1574 + }, + { + "epoch": 0.0987522728697724, + "grad_norm": 2.694429397583008, + "learning_rate": 1.9753329824582778e-05, + "loss": 1.1425, + "step": 1575 + }, + { + "epoch": 0.09881497272556274, + "grad_norm": 2.675666093826294, + "learning_rate": 1.9752881355161833e-05, + "loss": 1.2527, + "step": 1576 + }, + { + "epoch": 0.09887767258135306, + "grad_norm": 2.7810113430023193, + "learning_rate": 1.975243248353164e-05, + "loss": 1.4294, + "step": 1577 + }, + { + "epoch": 0.09894037243714339, + "grad_norm": 2.4274239540100098, + "learning_rate": 1.97519832097107e-05, + "loss": 1.2875, + "step": 1578 + }, + { + "epoch": 0.09900307229293373, + "grad_norm": 2.676206588745117, + "learning_rate": 1.975153353371755e-05, + "loss": 1.2689, + "step": 1579 + }, + { + "epoch": 0.09906577214872406, + "grad_norm": 2.7519147396087646, + "learning_rate": 1.975108345557073e-05, + "loss": 1.1906, + "step": 1580 + }, + { + "epoch": 0.09912847200451438, + "grad_norm": 2.5836923122406006, + "learning_rate": 1.9750632975288804e-05, + "loss": 1.2544, + "step": 1581 + }, + { + "epoch": 0.09919117186030472, + "grad_norm": 2.8128936290740967, + "learning_rate": 1.9750182092890346e-05, + "loss": 1.2158, + "step": 1582 + }, + { + "epoch": 0.09925387171609505, + "grad_norm": 2.6585779190063477, + "learning_rate": 1.9749730808393954e-05, + "loss": 1.1739, + "step": 1583 + }, + { + "epoch": 0.09931657157188538, + "grad_norm": 2.998366594314575, + "learning_rate": 1.9749279121818235e-05, + "loss": 1.1777, + "step": 1584 + }, + { + "epoch": 0.09937927142767572, + "grad_norm": 2.6667823791503906, + "learning_rate": 1.9748827033181825e-05, + "loss": 1.2926, + "step": 1585 + }, + { + "epoch": 0.09944197128346605, + "grad_norm": 2.8039743900299072, + "learning_rate": 1.974837454250336e-05, + "loss": 1.1405, + "step": 1586 + }, + { + "epoch": 0.09950467113925637, + "grad_norm": 2.7022907733917236, + "learning_rate": 1.97479216498015e-05, + "loss": 1.2008, + "step": 1587 + }, + { + "epoch": 0.09956737099504671, + "grad_norm": 2.877394914627075, + "learning_rate": 1.974746835509493e-05, + "loss": 1.322, + "step": 1588 + }, + { + "epoch": 0.09963007085083704, + "grad_norm": 3.104189157485962, + "learning_rate": 1.9747014658402336e-05, + "loss": 1.4125, + "step": 1589 + }, + { + "epoch": 0.09969277070662738, + "grad_norm": 3.145656108856201, + "learning_rate": 1.9746560559742433e-05, + "loss": 1.0586, + "step": 1590 + }, + { + "epoch": 0.09975547056241771, + "grad_norm": 2.8391902446746826, + "learning_rate": 1.974610605913395e-05, + "loss": 1.1874, + "step": 1591 + }, + { + "epoch": 0.09981817041820804, + "grad_norm": 2.9018492698669434, + "learning_rate": 1.974565115659562e-05, + "loss": 1.3189, + "step": 1592 + }, + { + "epoch": 0.09988087027399838, + "grad_norm": 2.652791976928711, + "learning_rate": 1.9745195852146216e-05, + "loss": 1.2778, + "step": 1593 + }, + { + "epoch": 0.0999435701297887, + "grad_norm": 2.8423564434051514, + "learning_rate": 1.974474014580451e-05, + "loss": 1.4273, + "step": 1594 + }, + { + "epoch": 0.10000626998557903, + "grad_norm": 2.7947731018066406, + "learning_rate": 1.9744284037589296e-05, + "loss": 1.2726, + "step": 1595 + }, + { + "epoch": 0.10006896984136937, + "grad_norm": 2.771385908126831, + "learning_rate": 1.9743827527519376e-05, + "loss": 1.34, + "step": 1596 + }, + { + "epoch": 0.1001316696971597, + "grad_norm": 2.8333005905151367, + "learning_rate": 1.974337061561359e-05, + "loss": 1.1338, + "step": 1597 + }, + { + "epoch": 0.10019436955295002, + "grad_norm": 2.625159740447998, + "learning_rate": 1.974291330189077e-05, + "loss": 1.2149, + "step": 1598 + }, + { + "epoch": 0.10025706940874037, + "grad_norm": 2.558821678161621, + "learning_rate": 1.9742455586369786e-05, + "loss": 1.2343, + "step": 1599 + }, + { + "epoch": 0.10031976926453069, + "grad_norm": 2.7036871910095215, + "learning_rate": 1.9741997469069503e-05, + "loss": 1.3594, + "step": 1600 + }, + { + "epoch": 0.10038246912032102, + "grad_norm": 3.268862724304199, + "learning_rate": 1.9741538950008817e-05, + "loss": 1.3096, + "step": 1601 + }, + { + "epoch": 0.10044516897611136, + "grad_norm": 3.3234870433807373, + "learning_rate": 1.9741080029206646e-05, + "loss": 1.2374, + "step": 1602 + }, + { + "epoch": 0.10050786883190169, + "grad_norm": 2.6918065547943115, + "learning_rate": 1.9740620706681902e-05, + "loss": 1.2796, + "step": 1603 + }, + { + "epoch": 0.10057056868769201, + "grad_norm": 2.9625356197357178, + "learning_rate": 1.974016098245354e-05, + "loss": 1.2385, + "step": 1604 + }, + { + "epoch": 0.10063326854348235, + "grad_norm": 2.733334541320801, + "learning_rate": 1.9739700856540513e-05, + "loss": 1.1387, + "step": 1605 + }, + { + "epoch": 0.10069596839927268, + "grad_norm": 2.5298163890838623, + "learning_rate": 1.9739240328961794e-05, + "loss": 1.2248, + "step": 1606 + }, + { + "epoch": 0.10075866825506301, + "grad_norm": 2.6020255088806152, + "learning_rate": 1.973877939973638e-05, + "loss": 1.1025, + "step": 1607 + }, + { + "epoch": 0.10082136811085335, + "grad_norm": 3.0281710624694824, + "learning_rate": 1.973831806888328e-05, + "loss": 1.1738, + "step": 1608 + }, + { + "epoch": 0.10088406796664368, + "grad_norm": 2.946829080581665, + "learning_rate": 1.9737856336421516e-05, + "loss": 1.26, + "step": 1609 + }, + { + "epoch": 0.100946767822434, + "grad_norm": 2.733370780944824, + "learning_rate": 1.9737394202370132e-05, + "loss": 1.1214, + "step": 1610 + }, + { + "epoch": 0.10100946767822434, + "grad_norm": 2.7204952239990234, + "learning_rate": 1.9736931666748184e-05, + "loss": 1.1289, + "step": 1611 + }, + { + "epoch": 0.10107216753401467, + "grad_norm": 2.9305827617645264, + "learning_rate": 1.9736468729574755e-05, + "loss": 1.355, + "step": 1612 + }, + { + "epoch": 0.101134867389805, + "grad_norm": 2.724308490753174, + "learning_rate": 1.9736005390868923e-05, + "loss": 1.1955, + "step": 1613 + }, + { + "epoch": 0.10119756724559534, + "grad_norm": 2.986973762512207, + "learning_rate": 1.973554165064981e-05, + "loss": 1.2244, + "step": 1614 + }, + { + "epoch": 0.10126026710138566, + "grad_norm": 2.812016487121582, + "learning_rate": 1.9735077508936528e-05, + "loss": 1.1588, + "step": 1615 + }, + { + "epoch": 0.10132296695717599, + "grad_norm": 2.766767978668213, + "learning_rate": 1.973461296574823e-05, + "loss": 1.1365, + "step": 1616 + }, + { + "epoch": 0.10138566681296633, + "grad_norm": 3.0275230407714844, + "learning_rate": 1.9734148021104062e-05, + "loss": 1.1425, + "step": 1617 + }, + { + "epoch": 0.10144836666875666, + "grad_norm": 2.9274024963378906, + "learning_rate": 1.9733682675023207e-05, + "loss": 1.2606, + "step": 1618 + }, + { + "epoch": 0.101511066524547, + "grad_norm": 2.7344982624053955, + "learning_rate": 1.9733216927524855e-05, + "loss": 1.4751, + "step": 1619 + }, + { + "epoch": 0.10157376638033733, + "grad_norm": 2.777595043182373, + "learning_rate": 1.973275077862821e-05, + "loss": 1.1064, + "step": 1620 + }, + { + "epoch": 0.10163646623612765, + "grad_norm": 2.8037877082824707, + "learning_rate": 1.97322842283525e-05, + "loss": 1.1309, + "step": 1621 + }, + { + "epoch": 0.101699166091918, + "grad_norm": 2.928142547607422, + "learning_rate": 1.9731817276716963e-05, + "loss": 1.1033, + "step": 1622 + }, + { + "epoch": 0.10176186594770832, + "grad_norm": 3.137070894241333, + "learning_rate": 1.9731349923740857e-05, + "loss": 1.1485, + "step": 1623 + }, + { + "epoch": 0.10182456580349865, + "grad_norm": 2.890127658843994, + "learning_rate": 1.9730882169443454e-05, + "loss": 1.3264, + "step": 1624 + }, + { + "epoch": 0.10188726565928899, + "grad_norm": 2.9901444911956787, + "learning_rate": 1.9730414013844044e-05, + "loss": 1.1404, + "step": 1625 + }, + { + "epoch": 0.10194996551507932, + "grad_norm": 2.6203951835632324, + "learning_rate": 1.9729945456961938e-05, + "loss": 1.2538, + "step": 1626 + }, + { + "epoch": 0.10201266537086964, + "grad_norm": 2.694471836090088, + "learning_rate": 1.9729476498816455e-05, + "loss": 1.2093, + "step": 1627 + }, + { + "epoch": 0.10207536522665998, + "grad_norm": 3.1988890171051025, + "learning_rate": 1.9729007139426935e-05, + "loss": 1.3538, + "step": 1628 + }, + { + "epoch": 0.10213806508245031, + "grad_norm": 2.533277988433838, + "learning_rate": 1.9728537378812738e-05, + "loss": 1.3266, + "step": 1629 + }, + { + "epoch": 0.10220076493824064, + "grad_norm": 2.808100700378418, + "learning_rate": 1.9728067216993234e-05, + "loss": 1.159, + "step": 1630 + }, + { + "epoch": 0.10226346479403098, + "grad_norm": 3.1164088249206543, + "learning_rate": 1.9727596653987813e-05, + "loss": 1.2812, + "step": 1631 + }, + { + "epoch": 0.1023261646498213, + "grad_norm": 2.809007167816162, + "learning_rate": 1.972712568981588e-05, + "loss": 1.0693, + "step": 1632 + }, + { + "epoch": 0.10238886450561163, + "grad_norm": 2.8748574256896973, + "learning_rate": 1.972665432449686e-05, + "loss": 1.1748, + "step": 1633 + }, + { + "epoch": 0.10245156436140197, + "grad_norm": 2.8818490505218506, + "learning_rate": 1.972618255805019e-05, + "loss": 1.1762, + "step": 1634 + }, + { + "epoch": 0.1025142642171923, + "grad_norm": 3.0112862586975098, + "learning_rate": 1.972571039049533e-05, + "loss": 1.2218, + "step": 1635 + }, + { + "epoch": 0.10257696407298263, + "grad_norm": 2.9098660945892334, + "learning_rate": 1.9725237821851745e-05, + "loss": 1.3107, + "step": 1636 + }, + { + "epoch": 0.10263966392877297, + "grad_norm": 2.914000988006592, + "learning_rate": 1.9724764852138926e-05, + "loss": 1.1978, + "step": 1637 + }, + { + "epoch": 0.1027023637845633, + "grad_norm": 3.0526537895202637, + "learning_rate": 1.9724291481376382e-05, + "loss": 1.2534, + "step": 1638 + }, + { + "epoch": 0.10276506364035362, + "grad_norm": 2.838385820388794, + "learning_rate": 1.9723817709583636e-05, + "loss": 1.2051, + "step": 1639 + }, + { + "epoch": 0.10282776349614396, + "grad_norm": 2.9168176651000977, + "learning_rate": 1.972334353678022e-05, + "loss": 1.1726, + "step": 1640 + }, + { + "epoch": 0.10289046335193429, + "grad_norm": 2.9031245708465576, + "learning_rate": 1.9722868962985693e-05, + "loss": 1.4055, + "step": 1641 + }, + { + "epoch": 0.10295316320772462, + "grad_norm": 2.930222272872925, + "learning_rate": 1.9722393988219625e-05, + "loss": 1.1278, + "step": 1642 + }, + { + "epoch": 0.10301586306351496, + "grad_norm": 2.8838579654693604, + "learning_rate": 1.9721918612501604e-05, + "loss": 1.0115, + "step": 1643 + }, + { + "epoch": 0.10307856291930528, + "grad_norm": 2.7734904289245605, + "learning_rate": 1.9721442835851234e-05, + "loss": 1.2634, + "step": 1644 + }, + { + "epoch": 0.10314126277509562, + "grad_norm": 2.612926483154297, + "learning_rate": 1.9720966658288135e-05, + "loss": 1.17, + "step": 1645 + }, + { + "epoch": 0.10320396263088595, + "grad_norm": 2.9198052883148193, + "learning_rate": 1.972049007983195e-05, + "loss": 1.1544, + "step": 1646 + }, + { + "epoch": 0.10326666248667628, + "grad_norm": 2.8225622177124023, + "learning_rate": 1.972001310050233e-05, + "loss": 1.1769, + "step": 1647 + }, + { + "epoch": 0.10332936234246662, + "grad_norm": 2.95141863822937, + "learning_rate": 1.971953572031894e-05, + "loss": 1.3438, + "step": 1648 + }, + { + "epoch": 0.10339206219825695, + "grad_norm": 2.726926565170288, + "learning_rate": 1.9719057939301477e-05, + "loss": 1.171, + "step": 1649 + }, + { + "epoch": 0.10345476205404727, + "grad_norm": 2.9953489303588867, + "learning_rate": 1.971857975746964e-05, + "loss": 1.282, + "step": 1650 + }, + { + "epoch": 0.10351746190983761, + "grad_norm": 2.9687373638153076, + "learning_rate": 1.9718101174843148e-05, + "loss": 1.3112, + "step": 1651 + }, + { + "epoch": 0.10358016176562794, + "grad_norm": 2.661818265914917, + "learning_rate": 1.971762219144174e-05, + "loss": 1.1708, + "step": 1652 + }, + { + "epoch": 0.10364286162141827, + "grad_norm": 2.7536115646362305, + "learning_rate": 1.971714280728517e-05, + "loss": 1.3055, + "step": 1653 + }, + { + "epoch": 0.10370556147720861, + "grad_norm": 2.8205511569976807, + "learning_rate": 1.9716663022393202e-05, + "loss": 1.2024, + "step": 1654 + }, + { + "epoch": 0.10376826133299893, + "grad_norm": 2.713190793991089, + "learning_rate": 1.971618283678563e-05, + "loss": 1.0731, + "step": 1655 + }, + { + "epoch": 0.10383096118878926, + "grad_norm": 2.61726450920105, + "learning_rate": 1.9715702250482253e-05, + "loss": 1.0793, + "step": 1656 + }, + { + "epoch": 0.1038936610445796, + "grad_norm": 2.6959290504455566, + "learning_rate": 1.971522126350289e-05, + "loss": 1.1478, + "step": 1657 + }, + { + "epoch": 0.10395636090036993, + "grad_norm": 2.8554303646087646, + "learning_rate": 1.9714739875867378e-05, + "loss": 1.4537, + "step": 1658 + }, + { + "epoch": 0.10401906075616026, + "grad_norm": 2.4715776443481445, + "learning_rate": 1.9714258087595574e-05, + "loss": 1.2712, + "step": 1659 + }, + { + "epoch": 0.1040817606119506, + "grad_norm": 2.4936089515686035, + "learning_rate": 1.9713775898707337e-05, + "loss": 1.2211, + "step": 1660 + }, + { + "epoch": 0.10414446046774092, + "grad_norm": 2.7850353717803955, + "learning_rate": 1.9713293309222558e-05, + "loss": 1.1818, + "step": 1661 + }, + { + "epoch": 0.10420716032353125, + "grad_norm": 2.7316999435424805, + "learning_rate": 1.971281031916114e-05, + "loss": 1.2768, + "step": 1662 + }, + { + "epoch": 0.10426986017932159, + "grad_norm": 2.964200258255005, + "learning_rate": 1.9712326928543e-05, + "loss": 1.1588, + "step": 1663 + }, + { + "epoch": 0.10433256003511192, + "grad_norm": 2.5815505981445312, + "learning_rate": 1.971184313738807e-05, + "loss": 1.3423, + "step": 1664 + }, + { + "epoch": 0.10439525989090225, + "grad_norm": 2.3366873264312744, + "learning_rate": 1.971135894571631e-05, + "loss": 1.2909, + "step": 1665 + }, + { + "epoch": 0.10445795974669259, + "grad_norm": 2.704801321029663, + "learning_rate": 1.9710874353547683e-05, + "loss": 1.3715, + "step": 1666 + }, + { + "epoch": 0.10452065960248291, + "grad_norm": 2.8708925247192383, + "learning_rate": 1.9710389360902172e-05, + "loss": 1.2319, + "step": 1667 + }, + { + "epoch": 0.10458335945827324, + "grad_norm": 2.7967939376831055, + "learning_rate": 1.9709903967799778e-05, + "loss": 1.0511, + "step": 1668 + }, + { + "epoch": 0.10464605931406358, + "grad_norm": 2.979365348815918, + "learning_rate": 1.9709418174260523e-05, + "loss": 1.3309, + "step": 1669 + }, + { + "epoch": 0.10470875916985391, + "grad_norm": 2.767913818359375, + "learning_rate": 1.9708931980304435e-05, + "loss": 1.1927, + "step": 1670 + }, + { + "epoch": 0.10477145902564425, + "grad_norm": 2.629612922668457, + "learning_rate": 1.970844538595157e-05, + "loss": 1.0937, + "step": 1671 + }, + { + "epoch": 0.10483415888143457, + "grad_norm": 2.6951539516448975, + "learning_rate": 1.9707958391221994e-05, + "loss": 1.2281, + "step": 1672 + }, + { + "epoch": 0.1048968587372249, + "grad_norm": 2.6814723014831543, + "learning_rate": 1.9707470996135793e-05, + "loss": 1.1348, + "step": 1673 + }, + { + "epoch": 0.10495955859301524, + "grad_norm": 2.905695676803589, + "learning_rate": 1.9706983200713056e-05, + "loss": 1.2243, + "step": 1674 + }, + { + "epoch": 0.10502225844880557, + "grad_norm": 2.79339599609375, + "learning_rate": 1.9706495004973914e-05, + "loss": 1.1803, + "step": 1675 + }, + { + "epoch": 0.1050849583045959, + "grad_norm": 2.789321184158325, + "learning_rate": 1.9706006408938495e-05, + "loss": 1.2248, + "step": 1676 + }, + { + "epoch": 0.10514765816038624, + "grad_norm": 2.58943510055542, + "learning_rate": 1.9705517412626943e-05, + "loss": 1.3549, + "step": 1677 + }, + { + "epoch": 0.10521035801617656, + "grad_norm": 2.986433267593384, + "learning_rate": 1.9705028016059435e-05, + "loss": 1.2822, + "step": 1678 + }, + { + "epoch": 0.10527305787196689, + "grad_norm": 3.2546539306640625, + "learning_rate": 1.9704538219256143e-05, + "loss": 1.1921, + "step": 1679 + }, + { + "epoch": 0.10533575772775723, + "grad_norm": 2.9560794830322266, + "learning_rate": 1.9704048022237272e-05, + "loss": 1.1979, + "step": 1680 + }, + { + "epoch": 0.10539845758354756, + "grad_norm": 2.771454095840454, + "learning_rate": 1.9703557425023038e-05, + "loss": 1.392, + "step": 1681 + }, + { + "epoch": 0.10546115743933789, + "grad_norm": 2.436324119567871, + "learning_rate": 1.9703066427633672e-05, + "loss": 1.218, + "step": 1682 + }, + { + "epoch": 0.10552385729512823, + "grad_norm": 3.122758388519287, + "learning_rate": 1.970257503008942e-05, + "loss": 1.1335, + "step": 1683 + }, + { + "epoch": 0.10558655715091855, + "grad_norm": 2.765817165374756, + "learning_rate": 1.9702083232410553e-05, + "loss": 1.1678, + "step": 1684 + }, + { + "epoch": 0.10564925700670888, + "grad_norm": 2.5778284072875977, + "learning_rate": 1.9701591034617352e-05, + "loss": 1.3685, + "step": 1685 + }, + { + "epoch": 0.10571195686249922, + "grad_norm": 2.8820884227752686, + "learning_rate": 1.9701098436730108e-05, + "loss": 1.4609, + "step": 1686 + }, + { + "epoch": 0.10577465671828955, + "grad_norm": 2.385669231414795, + "learning_rate": 1.970060543876914e-05, + "loss": 1.2409, + "step": 1687 + }, + { + "epoch": 0.10583735657407987, + "grad_norm": 2.81585431098938, + "learning_rate": 1.9700112040754784e-05, + "loss": 1.1961, + "step": 1688 + }, + { + "epoch": 0.10590005642987022, + "grad_norm": 2.882650852203369, + "learning_rate": 1.9699618242707387e-05, + "loss": 1.0917, + "step": 1689 + }, + { + "epoch": 0.10596275628566054, + "grad_norm": 2.7467424869537354, + "learning_rate": 1.96991240446473e-05, + "loss": 1.32, + "step": 1690 + }, + { + "epoch": 0.10602545614145087, + "grad_norm": 2.9251673221588135, + "learning_rate": 1.9698629446594924e-05, + "loss": 1.2887, + "step": 1691 + }, + { + "epoch": 0.10608815599724121, + "grad_norm": 2.9416146278381348, + "learning_rate": 1.969813444857064e-05, + "loss": 1.2698, + "step": 1692 + }, + { + "epoch": 0.10615085585303154, + "grad_norm": 2.976478338241577, + "learning_rate": 1.969763905059487e-05, + "loss": 1.0886, + "step": 1693 + }, + { + "epoch": 0.10621355570882186, + "grad_norm": 2.9695241451263428, + "learning_rate": 1.9697143252688044e-05, + "loss": 1.258, + "step": 1694 + }, + { + "epoch": 0.1062762555646122, + "grad_norm": 2.443983316421509, + "learning_rate": 1.9696647054870604e-05, + "loss": 1.3608, + "step": 1695 + }, + { + "epoch": 0.10633895542040253, + "grad_norm": 2.797459840774536, + "learning_rate": 1.9696150457163014e-05, + "loss": 1.3669, + "step": 1696 + }, + { + "epoch": 0.10640165527619286, + "grad_norm": 2.7299747467041016, + "learning_rate": 1.969565345958576e-05, + "loss": 1.2892, + "step": 1697 + }, + { + "epoch": 0.1064643551319832, + "grad_norm": 2.797177791595459, + "learning_rate": 1.969515606215933e-05, + "loss": 1.3779, + "step": 1698 + }, + { + "epoch": 0.10652705498777353, + "grad_norm": 2.969921350479126, + "learning_rate": 1.9694658264904243e-05, + "loss": 1.24, + "step": 1699 + }, + { + "epoch": 0.10658975484356387, + "grad_norm": 2.566589593887329, + "learning_rate": 1.9694160067841026e-05, + "loss": 1.2575, + "step": 1700 + }, + { + "epoch": 0.1066524546993542, + "grad_norm": 3.185269594192505, + "learning_rate": 1.9693661470990225e-05, + "loss": 1.2909, + "step": 1701 + }, + { + "epoch": 0.10671515455514452, + "grad_norm": 2.6711819171905518, + "learning_rate": 1.96931624743724e-05, + "loss": 1.2734, + "step": 1702 + }, + { + "epoch": 0.10677785441093486, + "grad_norm": 2.8629698753356934, + "learning_rate": 1.969266307800813e-05, + "loss": 1.2724, + "step": 1703 + }, + { + "epoch": 0.10684055426672519, + "grad_norm": 2.8807342052459717, + "learning_rate": 1.9692163281918016e-05, + "loss": 1.1181, + "step": 1704 + }, + { + "epoch": 0.10690325412251551, + "grad_norm": 2.5635645389556885, + "learning_rate": 1.969166308612266e-05, + "loss": 1.2334, + "step": 1705 + }, + { + "epoch": 0.10696595397830586, + "grad_norm": 2.9059884548187256, + "learning_rate": 1.9691162490642698e-05, + "loss": 1.1941, + "step": 1706 + }, + { + "epoch": 0.10702865383409618, + "grad_norm": 2.9526543617248535, + "learning_rate": 1.9690661495498774e-05, + "loss": 1.1781, + "step": 1707 + }, + { + "epoch": 0.10709135368988651, + "grad_norm": 2.826979398727417, + "learning_rate": 1.969016010071154e-05, + "loss": 1.3417, + "step": 1708 + }, + { + "epoch": 0.10715405354567685, + "grad_norm": 2.9987378120422363, + "learning_rate": 1.9689658306301687e-05, + "loss": 1.2421, + "step": 1709 + }, + { + "epoch": 0.10721675340146718, + "grad_norm": 2.51350998878479, + "learning_rate": 1.9689156112289898e-05, + "loss": 1.3911, + "step": 1710 + }, + { + "epoch": 0.1072794532572575, + "grad_norm": 2.6440463066101074, + "learning_rate": 1.9688653518696886e-05, + "loss": 1.3664, + "step": 1711 + }, + { + "epoch": 0.10734215311304784, + "grad_norm": 2.8359732627868652, + "learning_rate": 1.9688150525543387e-05, + "loss": 1.279, + "step": 1712 + }, + { + "epoch": 0.10740485296883817, + "grad_norm": 2.3533053398132324, + "learning_rate": 1.968764713285013e-05, + "loss": 1.2305, + "step": 1713 + }, + { + "epoch": 0.1074675528246285, + "grad_norm": 2.750746726989746, + "learning_rate": 1.9687143340637885e-05, + "loss": 1.1814, + "step": 1714 + }, + { + "epoch": 0.10753025268041884, + "grad_norm": 2.7605299949645996, + "learning_rate": 1.968663914892743e-05, + "loss": 1.1286, + "step": 1715 + }, + { + "epoch": 0.10759295253620917, + "grad_norm": 2.849524974822998, + "learning_rate": 1.9686134557739548e-05, + "loss": 1.195, + "step": 1716 + }, + { + "epoch": 0.10765565239199949, + "grad_norm": 2.9283881187438965, + "learning_rate": 1.9685629567095056e-05, + "loss": 1.2437, + "step": 1717 + }, + { + "epoch": 0.10771835224778983, + "grad_norm": 2.8839948177337646, + "learning_rate": 1.9685124177014776e-05, + "loss": 1.2637, + "step": 1718 + }, + { + "epoch": 0.10778105210358016, + "grad_norm": 2.9644837379455566, + "learning_rate": 1.9684618387519552e-05, + "loss": 1.1888, + "step": 1719 + }, + { + "epoch": 0.10784375195937049, + "grad_norm": 2.527780771255493, + "learning_rate": 1.9684112198630246e-05, + "loss": 1.1652, + "step": 1720 + }, + { + "epoch": 0.10790645181516083, + "grad_norm": 3.044065475463867, + "learning_rate": 1.9683605610367723e-05, + "loss": 1.1993, + "step": 1721 + }, + { + "epoch": 0.10796915167095116, + "grad_norm": 2.9031217098236084, + "learning_rate": 1.968309862275289e-05, + "loss": 1.3111, + "step": 1722 + }, + { + "epoch": 0.10803185152674148, + "grad_norm": 2.759587049484253, + "learning_rate": 1.9682591235806643e-05, + "loss": 1.1535, + "step": 1723 + }, + { + "epoch": 0.10809455138253182, + "grad_norm": 2.8115179538726807, + "learning_rate": 1.9682083449549912e-05, + "loss": 1.2029, + "step": 1724 + }, + { + "epoch": 0.10815725123832215, + "grad_norm": 3.047943592071533, + "learning_rate": 1.9681575264003635e-05, + "loss": 1.2079, + "step": 1725 + }, + { + "epoch": 0.10821995109411249, + "grad_norm": 2.4974517822265625, + "learning_rate": 1.9681066679188772e-05, + "loss": 1.2131, + "step": 1726 + }, + { + "epoch": 0.10828265094990282, + "grad_norm": 2.7367663383483887, + "learning_rate": 1.9680557695126294e-05, + "loss": 1.1631, + "step": 1727 + }, + { + "epoch": 0.10834535080569314, + "grad_norm": 3.2132043838500977, + "learning_rate": 1.9680048311837198e-05, + "loss": 1.2944, + "step": 1728 + }, + { + "epoch": 0.10840805066148348, + "grad_norm": 2.975548505783081, + "learning_rate": 1.9679538529342487e-05, + "loss": 1.232, + "step": 1729 + }, + { + "epoch": 0.10847075051727381, + "grad_norm": 2.8218376636505127, + "learning_rate": 1.967902834766318e-05, + "loss": 1.1334, + "step": 1730 + }, + { + "epoch": 0.10853345037306414, + "grad_norm": 2.9942121505737305, + "learning_rate": 1.9678517766820324e-05, + "loss": 1.0102, + "step": 1731 + }, + { + "epoch": 0.10859615022885448, + "grad_norm": 2.7313923835754395, + "learning_rate": 1.9678006786834973e-05, + "loss": 1.2143, + "step": 1732 + }, + { + "epoch": 0.1086588500846448, + "grad_norm": 2.5642199516296387, + "learning_rate": 1.96774954077282e-05, + "loss": 1.2979, + "step": 1733 + }, + { + "epoch": 0.10872154994043513, + "grad_norm": 2.537075996398926, + "learning_rate": 1.9676983629521092e-05, + "loss": 1.2122, + "step": 1734 + }, + { + "epoch": 0.10878424979622547, + "grad_norm": 2.747938394546509, + "learning_rate": 1.9676471452234754e-05, + "loss": 1.2871, + "step": 1735 + }, + { + "epoch": 0.1088469496520158, + "grad_norm": 2.9008193016052246, + "learning_rate": 1.9675958875890314e-05, + "loss": 1.1711, + "step": 1736 + }, + { + "epoch": 0.10890964950780613, + "grad_norm": 2.5695269107818604, + "learning_rate": 1.967544590050891e-05, + "loss": 1.3546, + "step": 1737 + }, + { + "epoch": 0.10897234936359647, + "grad_norm": 2.9961202144622803, + "learning_rate": 1.967493252611169e-05, + "loss": 1.2296, + "step": 1738 + }, + { + "epoch": 0.1090350492193868, + "grad_norm": 2.49649977684021, + "learning_rate": 1.9674418752719835e-05, + "loss": 1.4048, + "step": 1739 + }, + { + "epoch": 0.10909774907517712, + "grad_norm": 2.5856053829193115, + "learning_rate": 1.9673904580354523e-05, + "loss": 1.2397, + "step": 1740 + }, + { + "epoch": 0.10916044893096746, + "grad_norm": 2.6442830562591553, + "learning_rate": 1.9673390009036967e-05, + "loss": 1.2308, + "step": 1741 + }, + { + "epoch": 0.10922314878675779, + "grad_norm": 2.4895949363708496, + "learning_rate": 1.967287503878838e-05, + "loss": 1.3172, + "step": 1742 + }, + { + "epoch": 0.10928584864254812, + "grad_norm": 2.8579931259155273, + "learning_rate": 1.967235966963001e-05, + "loss": 1.2304, + "step": 1743 + }, + { + "epoch": 0.10934854849833846, + "grad_norm": 2.649953603744507, + "learning_rate": 1.9671843901583104e-05, + "loss": 1.3762, + "step": 1744 + }, + { + "epoch": 0.10941124835412878, + "grad_norm": 2.731529951095581, + "learning_rate": 1.9671327734668927e-05, + "loss": 1.2551, + "step": 1745 + }, + { + "epoch": 0.10947394820991911, + "grad_norm": 2.9530577659606934, + "learning_rate": 1.967081116890878e-05, + "loss": 1.0949, + "step": 1746 + }, + { + "epoch": 0.10953664806570945, + "grad_norm": 2.8025710582733154, + "learning_rate": 1.9670294204323955e-05, + "loss": 1.1351, + "step": 1747 + }, + { + "epoch": 0.10959934792149978, + "grad_norm": 2.6501224040985107, + "learning_rate": 1.9669776840935774e-05, + "loss": 1.2878, + "step": 1748 + }, + { + "epoch": 0.1096620477772901, + "grad_norm": 3.3007919788360596, + "learning_rate": 1.9669259078765573e-05, + "loss": 1.2642, + "step": 1749 + }, + { + "epoch": 0.10972474763308045, + "grad_norm": 3.1355156898498535, + "learning_rate": 1.9668740917834706e-05, + "loss": 1.3091, + "step": 1750 + }, + { + "epoch": 0.10978744748887077, + "grad_norm": 2.9811623096466064, + "learning_rate": 1.966822235816454e-05, + "loss": 1.1474, + "step": 1751 + }, + { + "epoch": 0.1098501473446611, + "grad_norm": 2.820402145385742, + "learning_rate": 1.9667703399776464e-05, + "loss": 1.2422, + "step": 1752 + }, + { + "epoch": 0.10991284720045144, + "grad_norm": 2.71530818939209, + "learning_rate": 1.9667184042691877e-05, + "loss": 1.2202, + "step": 1753 + }, + { + "epoch": 0.10997554705624177, + "grad_norm": 2.8746466636657715, + "learning_rate": 1.9666664286932198e-05, + "loss": 1.1436, + "step": 1754 + }, + { + "epoch": 0.11003824691203211, + "grad_norm": 2.663334846496582, + "learning_rate": 1.9666144132518862e-05, + "loss": 1.1061, + "step": 1755 + }, + { + "epoch": 0.11010094676782244, + "grad_norm": 2.8481552600860596, + "learning_rate": 1.9665623579473315e-05, + "loss": 1.1836, + "step": 1756 + }, + { + "epoch": 0.11016364662361276, + "grad_norm": 2.9686453342437744, + "learning_rate": 1.9665102627817037e-05, + "loss": 1.0341, + "step": 1757 + }, + { + "epoch": 0.1102263464794031, + "grad_norm": 2.524933338165283, + "learning_rate": 1.96645812775715e-05, + "loss": 1.1842, + "step": 1758 + }, + { + "epoch": 0.11028904633519343, + "grad_norm": 2.576896905899048, + "learning_rate": 1.966405952875821e-05, + "loss": 1.2739, + "step": 1759 + }, + { + "epoch": 0.11035174619098376, + "grad_norm": 2.7993216514587402, + "learning_rate": 1.9663537381398682e-05, + "loss": 1.1092, + "step": 1760 + }, + { + "epoch": 0.1104144460467741, + "grad_norm": 3.1049580574035645, + "learning_rate": 1.9663014835514454e-05, + "loss": 1.2641, + "step": 1761 + }, + { + "epoch": 0.11047714590256442, + "grad_norm": 3.1917619705200195, + "learning_rate": 1.966249189112707e-05, + "loss": 1.1728, + "step": 1762 + }, + { + "epoch": 0.11053984575835475, + "grad_norm": 2.7090866565704346, + "learning_rate": 1.96619685482581e-05, + "loss": 1.2336, + "step": 1763 + }, + { + "epoch": 0.11060254561414509, + "grad_norm": 3.1297669410705566, + "learning_rate": 1.9661444806929124e-05, + "loss": 1.2811, + "step": 1764 + }, + { + "epoch": 0.11066524546993542, + "grad_norm": 2.8334314823150635, + "learning_rate": 1.9660920667161738e-05, + "loss": 1.1922, + "step": 1765 + }, + { + "epoch": 0.11072794532572575, + "grad_norm": 2.863182544708252, + "learning_rate": 1.9660396128977566e-05, + "loss": 1.2843, + "step": 1766 + }, + { + "epoch": 0.11079064518151609, + "grad_norm": 3.013759136199951, + "learning_rate": 1.9659871192398237e-05, + "loss": 1.2757, + "step": 1767 + }, + { + "epoch": 0.11085334503730641, + "grad_norm": 2.7977359294891357, + "learning_rate": 1.9659345857445394e-05, + "loss": 1.1336, + "step": 1768 + }, + { + "epoch": 0.11091604489309674, + "grad_norm": 3.0644099712371826, + "learning_rate": 1.965882012414071e-05, + "loss": 1.074, + "step": 1769 + }, + { + "epoch": 0.11097874474888708, + "grad_norm": 2.737210273742676, + "learning_rate": 1.965829399250586e-05, + "loss": 1.1738, + "step": 1770 + }, + { + "epoch": 0.11104144460467741, + "grad_norm": 2.5172605514526367, + "learning_rate": 1.9657767462562544e-05, + "loss": 1.4275, + "step": 1771 + }, + { + "epoch": 0.11110414446046774, + "grad_norm": 3.028228282928467, + "learning_rate": 1.9657240534332478e-05, + "loss": 1.2457, + "step": 1772 + }, + { + "epoch": 0.11116684431625808, + "grad_norm": 2.842363119125366, + "learning_rate": 1.965671320783739e-05, + "loss": 1.321, + "step": 1773 + }, + { + "epoch": 0.1112295441720484, + "grad_norm": 2.7538788318634033, + "learning_rate": 1.9656185483099027e-05, + "loss": 1.2383, + "step": 1774 + }, + { + "epoch": 0.11129224402783873, + "grad_norm": 2.937202215194702, + "learning_rate": 1.9655657360139152e-05, + "loss": 1.3088, + "step": 1775 + }, + { + "epoch": 0.11135494388362907, + "grad_norm": 2.6083788871765137, + "learning_rate": 1.965512883897955e-05, + "loss": 1.2762, + "step": 1776 + }, + { + "epoch": 0.1114176437394194, + "grad_norm": 2.776303768157959, + "learning_rate": 1.9654599919642008e-05, + "loss": 1.2701, + "step": 1777 + }, + { + "epoch": 0.11148034359520972, + "grad_norm": 2.8173046112060547, + "learning_rate": 1.9654070602148346e-05, + "loss": 1.1401, + "step": 1778 + }, + { + "epoch": 0.11154304345100007, + "grad_norm": 2.690161943435669, + "learning_rate": 1.9653540886520387e-05, + "loss": 1.1895, + "step": 1779 + }, + { + "epoch": 0.11160574330679039, + "grad_norm": 2.70603084564209, + "learning_rate": 1.9653010772779986e-05, + "loss": 1.1786, + "step": 1780 + }, + { + "epoch": 0.11166844316258073, + "grad_norm": 2.595276355743408, + "learning_rate": 1.9652480260948995e-05, + "loss": 1.2227, + "step": 1781 + }, + { + "epoch": 0.11173114301837106, + "grad_norm": 2.89900803565979, + "learning_rate": 1.96519493510493e-05, + "loss": 1.2092, + "step": 1782 + }, + { + "epoch": 0.11179384287416139, + "grad_norm": 2.993462085723877, + "learning_rate": 1.965141804310279e-05, + "loss": 1.2133, + "step": 1783 + }, + { + "epoch": 0.11185654272995173, + "grad_norm": 2.607696294784546, + "learning_rate": 1.9650886337131378e-05, + "loss": 1.2151, + "step": 1784 + }, + { + "epoch": 0.11191924258574205, + "grad_norm": 3.1491546630859375, + "learning_rate": 1.9650354233156994e-05, + "loss": 1.3438, + "step": 1785 + }, + { + "epoch": 0.11198194244153238, + "grad_norm": 2.742859125137329, + "learning_rate": 1.9649821731201578e-05, + "loss": 1.0821, + "step": 1786 + }, + { + "epoch": 0.11204464229732272, + "grad_norm": 2.7792420387268066, + "learning_rate": 1.9649288831287095e-05, + "loss": 1.2527, + "step": 1787 + }, + { + "epoch": 0.11210734215311305, + "grad_norm": 2.560257911682129, + "learning_rate": 1.9648755533435517e-05, + "loss": 1.3153, + "step": 1788 + }, + { + "epoch": 0.11217004200890338, + "grad_norm": 3.0270395278930664, + "learning_rate": 1.964822183766884e-05, + "loss": 1.155, + "step": 1789 + }, + { + "epoch": 0.11223274186469372, + "grad_norm": 2.9100937843322754, + "learning_rate": 1.964768774400907e-05, + "loss": 1.3593, + "step": 1790 + }, + { + "epoch": 0.11229544172048404, + "grad_norm": 2.7946977615356445, + "learning_rate": 1.9647153252478243e-05, + "loss": 1.1896, + "step": 1791 + }, + { + "epoch": 0.11235814157627437, + "grad_norm": 2.8262407779693604, + "learning_rate": 1.9646618363098388e-05, + "loss": 1.1128, + "step": 1792 + }, + { + "epoch": 0.11242084143206471, + "grad_norm": 3.0834388732910156, + "learning_rate": 1.9646083075891575e-05, + "loss": 1.2419, + "step": 1793 + }, + { + "epoch": 0.11248354128785504, + "grad_norm": 3.1527440547943115, + "learning_rate": 1.964554739087987e-05, + "loss": 0.972, + "step": 1794 + }, + { + "epoch": 0.11254624114364536, + "grad_norm": 2.8445541858673096, + "learning_rate": 1.9645011308085374e-05, + "loss": 1.0381, + "step": 1795 + }, + { + "epoch": 0.1126089409994357, + "grad_norm": 3.300703525543213, + "learning_rate": 1.964447482753019e-05, + "loss": 1.3016, + "step": 1796 + }, + { + "epoch": 0.11267164085522603, + "grad_norm": 2.595257520675659, + "learning_rate": 1.9643937949236442e-05, + "loss": 1.2219, + "step": 1797 + }, + { + "epoch": 0.11273434071101636, + "grad_norm": 2.4267446994781494, + "learning_rate": 1.964340067322627e-05, + "loss": 1.2533, + "step": 1798 + }, + { + "epoch": 0.1127970405668067, + "grad_norm": 2.6247310638427734, + "learning_rate": 1.9642862999521836e-05, + "loss": 1.1875, + "step": 1799 + }, + { + "epoch": 0.11285974042259703, + "grad_norm": 2.9933948516845703, + "learning_rate": 1.964232492814531e-05, + "loss": 1.2329, + "step": 1800 + }, + { + "epoch": 0.11292244027838735, + "grad_norm": 2.88084077835083, + "learning_rate": 1.9641786459118882e-05, + "loss": 1.1593, + "step": 1801 + }, + { + "epoch": 0.1129851401341777, + "grad_norm": 2.630871534347534, + "learning_rate": 1.9641247592464757e-05, + "loss": 1.2906, + "step": 1802 + }, + { + "epoch": 0.11304783998996802, + "grad_norm": 2.855891466140747, + "learning_rate": 1.9640708328205164e-05, + "loss": 1.2687, + "step": 1803 + }, + { + "epoch": 0.11311053984575835, + "grad_norm": 2.8953709602355957, + "learning_rate": 1.964016866636234e-05, + "loss": 1.1547, + "step": 1804 + }, + { + "epoch": 0.11317323970154869, + "grad_norm": 2.8181917667388916, + "learning_rate": 1.9639628606958535e-05, + "loss": 1.1843, + "step": 1805 + }, + { + "epoch": 0.11323593955733902, + "grad_norm": 2.7089645862579346, + "learning_rate": 1.9639088150016027e-05, + "loss": 1.2556, + "step": 1806 + }, + { + "epoch": 0.11329863941312936, + "grad_norm": 2.701144218444824, + "learning_rate": 1.96385472955571e-05, + "loss": 1.1517, + "step": 1807 + }, + { + "epoch": 0.11336133926891968, + "grad_norm": 2.8642730712890625, + "learning_rate": 1.9638006043604067e-05, + "loss": 1.3128, + "step": 1808 + }, + { + "epoch": 0.11342403912471001, + "grad_norm": 3.2057785987854004, + "learning_rate": 1.963746439417924e-05, + "loss": 1.0491, + "step": 1809 + }, + { + "epoch": 0.11348673898050035, + "grad_norm": 2.905912399291992, + "learning_rate": 1.963692234730496e-05, + "loss": 1.2183, + "step": 1810 + }, + { + "epoch": 0.11354943883629068, + "grad_norm": 2.945038318634033, + "learning_rate": 1.9636379903003587e-05, + "loss": 1.326, + "step": 1811 + }, + { + "epoch": 0.113612138692081, + "grad_norm": 3.062992572784424, + "learning_rate": 1.963583706129748e-05, + "loss": 1.0586, + "step": 1812 + }, + { + "epoch": 0.11367483854787135, + "grad_norm": 2.8716282844543457, + "learning_rate": 1.9635293822209033e-05, + "loss": 1.3158, + "step": 1813 + }, + { + "epoch": 0.11373753840366167, + "grad_norm": 2.8216946125030518, + "learning_rate": 1.963475018576065e-05, + "loss": 1.2876, + "step": 1814 + }, + { + "epoch": 0.113800238259452, + "grad_norm": 2.9461042881011963, + "learning_rate": 1.963420615197475e-05, + "loss": 1.0914, + "step": 1815 + }, + { + "epoch": 0.11386293811524234, + "grad_norm": 3.080146312713623, + "learning_rate": 1.963366172087376e-05, + "loss": 1.093, + "step": 1816 + }, + { + "epoch": 0.11392563797103267, + "grad_norm": 2.548718214035034, + "learning_rate": 1.9633116892480148e-05, + "loss": 1.2645, + "step": 1817 + }, + { + "epoch": 0.113988337826823, + "grad_norm": 2.9466707706451416, + "learning_rate": 1.963257166681637e-05, + "loss": 1.1223, + "step": 1818 + }, + { + "epoch": 0.11405103768261333, + "grad_norm": 2.885998249053955, + "learning_rate": 1.9632026043904918e-05, + "loss": 1.0654, + "step": 1819 + }, + { + "epoch": 0.11411373753840366, + "grad_norm": 2.71754789352417, + "learning_rate": 1.9631480023768295e-05, + "loss": 1.4301, + "step": 1820 + }, + { + "epoch": 0.11417643739419399, + "grad_norm": 2.8236472606658936, + "learning_rate": 1.963093360642901e-05, + "loss": 1.1222, + "step": 1821 + }, + { + "epoch": 0.11423913724998433, + "grad_norm": 2.8103203773498535, + "learning_rate": 1.96303867919096e-05, + "loss": 1.3148, + "step": 1822 + }, + { + "epoch": 0.11430183710577466, + "grad_norm": 2.7377126216888428, + "learning_rate": 1.9629839580232625e-05, + "loss": 1.068, + "step": 1823 + }, + { + "epoch": 0.11436453696156498, + "grad_norm": 2.6880311965942383, + "learning_rate": 1.962929197142064e-05, + "loss": 1.2088, + "step": 1824 + }, + { + "epoch": 0.11442723681735532, + "grad_norm": 3.1068804264068604, + "learning_rate": 1.9628743965496236e-05, + "loss": 1.2094, + "step": 1825 + }, + { + "epoch": 0.11448993667314565, + "grad_norm": 3.0336623191833496, + "learning_rate": 1.962819556248201e-05, + "loss": 1.4228, + "step": 1826 + }, + { + "epoch": 0.11455263652893598, + "grad_norm": 2.8038361072540283, + "learning_rate": 1.9627646762400577e-05, + "loss": 1.1765, + "step": 1827 + }, + { + "epoch": 0.11461533638472632, + "grad_norm": 2.9458024501800537, + "learning_rate": 1.9627097565274575e-05, + "loss": 1.2257, + "step": 1828 + }, + { + "epoch": 0.11467803624051665, + "grad_norm": 2.6903998851776123, + "learning_rate": 1.9626547971126646e-05, + "loss": 1.2832, + "step": 1829 + }, + { + "epoch": 0.11474073609630697, + "grad_norm": 2.6106224060058594, + "learning_rate": 1.9625997979979457e-05, + "loss": 1.128, + "step": 1830 + }, + { + "epoch": 0.11480343595209731, + "grad_norm": 2.7287886142730713, + "learning_rate": 1.9625447591855695e-05, + "loss": 1.1154, + "step": 1831 + }, + { + "epoch": 0.11486613580788764, + "grad_norm": 2.8638365268707275, + "learning_rate": 1.962489680677805e-05, + "loss": 1.3163, + "step": 1832 + }, + { + "epoch": 0.11492883566367797, + "grad_norm": 2.978721857070923, + "learning_rate": 1.9624345624769243e-05, + "loss": 1.0674, + "step": 1833 + }, + { + "epoch": 0.11499153551946831, + "grad_norm": 2.6054162979125977, + "learning_rate": 1.9623794045852e-05, + "loss": 1.341, + "step": 1834 + }, + { + "epoch": 0.11505423537525863, + "grad_norm": 3.3473832607269287, + "learning_rate": 1.9623242070049073e-05, + "loss": 1.1615, + "step": 1835 + }, + { + "epoch": 0.11511693523104898, + "grad_norm": 2.8531386852264404, + "learning_rate": 1.9622689697383223e-05, + "loss": 1.1615, + "step": 1836 + }, + { + "epoch": 0.1151796350868393, + "grad_norm": 3.0970406532287598, + "learning_rate": 1.9622136927877226e-05, + "loss": 1.2673, + "step": 1837 + }, + { + "epoch": 0.11524233494262963, + "grad_norm": 2.448233127593994, + "learning_rate": 1.9621583761553884e-05, + "loss": 1.0882, + "step": 1838 + }, + { + "epoch": 0.11530503479841997, + "grad_norm": 2.2535786628723145, + "learning_rate": 1.9621030198436007e-05, + "loss": 1.3534, + "step": 1839 + }, + { + "epoch": 0.1153677346542103, + "grad_norm": 2.890587329864502, + "learning_rate": 1.9620476238546424e-05, + "loss": 1.0772, + "step": 1840 + }, + { + "epoch": 0.11543043451000062, + "grad_norm": 2.8953959941864014, + "learning_rate": 1.9619921881907983e-05, + "loss": 1.192, + "step": 1841 + }, + { + "epoch": 0.11549313436579096, + "grad_norm": 2.8701202869415283, + "learning_rate": 1.9619367128543542e-05, + "loss": 1.0293, + "step": 1842 + }, + { + "epoch": 0.11555583422158129, + "grad_norm": 2.877986431121826, + "learning_rate": 1.961881197847598e-05, + "loss": 1.2771, + "step": 1843 + }, + { + "epoch": 0.11561853407737162, + "grad_norm": 2.8246572017669678, + "learning_rate": 1.961825643172819e-05, + "loss": 1.3052, + "step": 1844 + }, + { + "epoch": 0.11568123393316196, + "grad_norm": 2.9720866680145264, + "learning_rate": 1.961770048832309e-05, + "loss": 1.2466, + "step": 1845 + }, + { + "epoch": 0.11574393378895229, + "grad_norm": 2.9466335773468018, + "learning_rate": 1.96171441482836e-05, + "loss": 1.1557, + "step": 1846 + }, + { + "epoch": 0.11580663364474261, + "grad_norm": 2.677537679672241, + "learning_rate": 1.9616587411632665e-05, + "loss": 1.1442, + "step": 1847 + }, + { + "epoch": 0.11586933350053295, + "grad_norm": 2.980424165725708, + "learning_rate": 1.9616030278393243e-05, + "loss": 1.0646, + "step": 1848 + }, + { + "epoch": 0.11593203335632328, + "grad_norm": 2.8661181926727295, + "learning_rate": 1.9615472748588314e-05, + "loss": 1.1887, + "step": 1849 + }, + { + "epoch": 0.11599473321211361, + "grad_norm": 2.766979455947876, + "learning_rate": 1.9614914822240868e-05, + "loss": 1.1725, + "step": 1850 + }, + { + "epoch": 0.11605743306790395, + "grad_norm": 2.9835777282714844, + "learning_rate": 1.9614356499373918e-05, + "loss": 1.0496, + "step": 1851 + }, + { + "epoch": 0.11612013292369427, + "grad_norm": 3.200077533721924, + "learning_rate": 1.9613797780010483e-05, + "loss": 1.1819, + "step": 1852 + }, + { + "epoch": 0.1161828327794846, + "grad_norm": 2.771488904953003, + "learning_rate": 1.961323866417361e-05, + "loss": 1.1624, + "step": 1853 + }, + { + "epoch": 0.11624553263527494, + "grad_norm": 2.921727418899536, + "learning_rate": 1.9612679151886352e-05, + "loss": 1.41, + "step": 1854 + }, + { + "epoch": 0.11630823249106527, + "grad_norm": 2.7722055912017822, + "learning_rate": 1.9612119243171785e-05, + "loss": 1.298, + "step": 1855 + }, + { + "epoch": 0.1163709323468556, + "grad_norm": 3.0676472187042236, + "learning_rate": 1.9611558938053003e-05, + "loss": 1.2881, + "step": 1856 + }, + { + "epoch": 0.11643363220264594, + "grad_norm": 2.6575658321380615, + "learning_rate": 1.961099823655311e-05, + "loss": 1.3034, + "step": 1857 + }, + { + "epoch": 0.11649633205843626, + "grad_norm": 2.95607328414917, + "learning_rate": 1.9610437138695234e-05, + "loss": 1.1461, + "step": 1858 + }, + { + "epoch": 0.11655903191422659, + "grad_norm": 2.7217936515808105, + "learning_rate": 1.9609875644502506e-05, + "loss": 1.3863, + "step": 1859 + }, + { + "epoch": 0.11662173177001693, + "grad_norm": 3.277743101119995, + "learning_rate": 1.960931375399809e-05, + "loss": 1.0801, + "step": 1860 + }, + { + "epoch": 0.11668443162580726, + "grad_norm": 3.033698081970215, + "learning_rate": 1.9608751467205153e-05, + "loss": 1.042, + "step": 1861 + }, + { + "epoch": 0.1167471314815976, + "grad_norm": 2.9927337169647217, + "learning_rate": 1.9608188784146884e-05, + "loss": 1.2221, + "step": 1862 + }, + { + "epoch": 0.11680983133738793, + "grad_norm": 3.000994920730591, + "learning_rate": 1.9607625704846494e-05, + "loss": 1.226, + "step": 1863 + }, + { + "epoch": 0.11687253119317825, + "grad_norm": 2.9503750801086426, + "learning_rate": 1.9607062229327198e-05, + "loss": 1.2026, + "step": 1864 + }, + { + "epoch": 0.1169352310489686, + "grad_norm": 2.8484723567962646, + "learning_rate": 1.9606498357612236e-05, + "loss": 1.2212, + "step": 1865 + }, + { + "epoch": 0.11699793090475892, + "grad_norm": 3.088703155517578, + "learning_rate": 1.960593408972486e-05, + "loss": 1.2292, + "step": 1866 + }, + { + "epoch": 0.11706063076054925, + "grad_norm": 2.599360466003418, + "learning_rate": 1.9605369425688346e-05, + "loss": 1.21, + "step": 1867 + }, + { + "epoch": 0.11712333061633959, + "grad_norm": 2.8245930671691895, + "learning_rate": 1.960480436552598e-05, + "loss": 1.1166, + "step": 1868 + }, + { + "epoch": 0.11718603047212992, + "grad_norm": 2.7975778579711914, + "learning_rate": 1.9604238909261058e-05, + "loss": 1.3901, + "step": 1869 + }, + { + "epoch": 0.11724873032792024, + "grad_norm": 2.8647677898406982, + "learning_rate": 1.9603673056916907e-05, + "loss": 1.2903, + "step": 1870 + }, + { + "epoch": 0.11731143018371058, + "grad_norm": 2.9560980796813965, + "learning_rate": 1.9603106808516857e-05, + "loss": 1.4469, + "step": 1871 + }, + { + "epoch": 0.11737413003950091, + "grad_norm": 2.566612482070923, + "learning_rate": 1.960254016408426e-05, + "loss": 1.2921, + "step": 1872 + }, + { + "epoch": 0.11743682989529124, + "grad_norm": 2.664555311203003, + "learning_rate": 1.9601973123642493e-05, + "loss": 1.3455, + "step": 1873 + }, + { + "epoch": 0.11749952975108158, + "grad_norm": 2.6611409187316895, + "learning_rate": 1.9601405687214936e-05, + "loss": 1.3791, + "step": 1874 + }, + { + "epoch": 0.1175622296068719, + "grad_norm": 2.902946710586548, + "learning_rate": 1.9600837854824985e-05, + "loss": 1.2045, + "step": 1875 + }, + { + "epoch": 0.11762492946266223, + "grad_norm": 2.865163564682007, + "learning_rate": 1.9600269626496062e-05, + "loss": 1.0983, + "step": 1876 + }, + { + "epoch": 0.11768762931845257, + "grad_norm": 2.5881099700927734, + "learning_rate": 1.95997010022516e-05, + "loss": 1.1853, + "step": 1877 + }, + { + "epoch": 0.1177503291742429, + "grad_norm": 2.9311110973358154, + "learning_rate": 1.9599131982115053e-05, + "loss": 1.1762, + "step": 1878 + }, + { + "epoch": 0.11781302903003323, + "grad_norm": 2.438061237335205, + "learning_rate": 1.959856256610988e-05, + "loss": 1.292, + "step": 1879 + }, + { + "epoch": 0.11787572888582357, + "grad_norm": 2.948760509490967, + "learning_rate": 1.9597992754259572e-05, + "loss": 1.1169, + "step": 1880 + }, + { + "epoch": 0.1179384287416139, + "grad_norm": 2.877999782562256, + "learning_rate": 1.959742254658762e-05, + "loss": 1.1054, + "step": 1881 + }, + { + "epoch": 0.11800112859740422, + "grad_norm": 2.681436061859131, + "learning_rate": 1.9596851943117544e-05, + "loss": 1.2317, + "step": 1882 + }, + { + "epoch": 0.11806382845319456, + "grad_norm": 2.721975088119507, + "learning_rate": 1.9596280943872875e-05, + "loss": 1.3626, + "step": 1883 + }, + { + "epoch": 0.11812652830898489, + "grad_norm": 3.072305917739868, + "learning_rate": 1.959570954887716e-05, + "loss": 1.1207, + "step": 1884 + }, + { + "epoch": 0.11818922816477521, + "grad_norm": 2.979001522064209, + "learning_rate": 1.9595137758153962e-05, + "loss": 1.21, + "step": 1885 + }, + { + "epoch": 0.11825192802056556, + "grad_norm": 2.9020981788635254, + "learning_rate": 1.9594565571726868e-05, + "loss": 1.2107, + "step": 1886 + }, + { + "epoch": 0.11831462787635588, + "grad_norm": 2.9129836559295654, + "learning_rate": 1.9593992989619472e-05, + "loss": 1.2327, + "step": 1887 + }, + { + "epoch": 0.11837732773214621, + "grad_norm": 2.9874014854431152, + "learning_rate": 1.9593420011855384e-05, + "loss": 1.2168, + "step": 1888 + }, + { + "epoch": 0.11844002758793655, + "grad_norm": 3.170973300933838, + "learning_rate": 1.9592846638458235e-05, + "loss": 1.3294, + "step": 1889 + }, + { + "epoch": 0.11850272744372688, + "grad_norm": 2.5441126823425293, + "learning_rate": 1.9592272869451672e-05, + "loss": 1.2209, + "step": 1890 + }, + { + "epoch": 0.11856542729951722, + "grad_norm": 3.1823387145996094, + "learning_rate": 1.9591698704859357e-05, + "loss": 1.2245, + "step": 1891 + }, + { + "epoch": 0.11862812715530754, + "grad_norm": 2.8368260860443115, + "learning_rate": 1.9591124144704966e-05, + "loss": 1.4228, + "step": 1892 + }, + { + "epoch": 0.11869082701109787, + "grad_norm": 3.1087722778320312, + "learning_rate": 1.95905491890122e-05, + "loss": 1.1316, + "step": 1893 + }, + { + "epoch": 0.11875352686688821, + "grad_norm": 3.0932998657226562, + "learning_rate": 1.9589973837804764e-05, + "loss": 0.9309, + "step": 1894 + }, + { + "epoch": 0.11881622672267854, + "grad_norm": 2.4731204509735107, + "learning_rate": 1.958939809110639e-05, + "loss": 1.2755, + "step": 1895 + }, + { + "epoch": 0.11887892657846887, + "grad_norm": 2.6586813926696777, + "learning_rate": 1.958882194894082e-05, + "loss": 1.3389, + "step": 1896 + }, + { + "epoch": 0.1189416264342592, + "grad_norm": 2.8624653816223145, + "learning_rate": 1.9588245411331813e-05, + "loss": 1.3019, + "step": 1897 + }, + { + "epoch": 0.11900432629004953, + "grad_norm": 2.8362314701080322, + "learning_rate": 1.958766847830315e-05, + "loss": 1.156, + "step": 1898 + }, + { + "epoch": 0.11906702614583986, + "grad_norm": 2.651019811630249, + "learning_rate": 1.9587091149878617e-05, + "loss": 1.3187, + "step": 1899 + }, + { + "epoch": 0.1191297260016302, + "grad_norm": 2.8918776512145996, + "learning_rate": 1.9586513426082026e-05, + "loss": 1.0542, + "step": 1900 + }, + { + "epoch": 0.11919242585742053, + "grad_norm": 2.695478916168213, + "learning_rate": 1.9585935306937202e-05, + "loss": 1.1532, + "step": 1901 + }, + { + "epoch": 0.11925512571321085, + "grad_norm": 2.736109733581543, + "learning_rate": 1.958535679246799e-05, + "loss": 1.1124, + "step": 1902 + }, + { + "epoch": 0.1193178255690012, + "grad_norm": 2.850099563598633, + "learning_rate": 1.9584777882698245e-05, + "loss": 1.2152, + "step": 1903 + }, + { + "epoch": 0.11938052542479152, + "grad_norm": 2.687453269958496, + "learning_rate": 1.958419857765184e-05, + "loss": 1.1859, + "step": 1904 + }, + { + "epoch": 0.11944322528058185, + "grad_norm": 2.41853666305542, + "learning_rate": 1.9583618877352667e-05, + "loss": 1.3105, + "step": 1905 + }, + { + "epoch": 0.11950592513637219, + "grad_norm": 2.5947563648223877, + "learning_rate": 1.9583038781824634e-05, + "loss": 1.2401, + "step": 1906 + }, + { + "epoch": 0.11956862499216252, + "grad_norm": 2.688786745071411, + "learning_rate": 1.9582458291091664e-05, + "loss": 1.1235, + "step": 1907 + }, + { + "epoch": 0.11963132484795284, + "grad_norm": 2.8655102252960205, + "learning_rate": 1.9581877405177695e-05, + "loss": 1.1997, + "step": 1908 + }, + { + "epoch": 0.11969402470374318, + "grad_norm": 3.223100185394287, + "learning_rate": 1.9581296124106682e-05, + "loss": 1.1423, + "step": 1909 + }, + { + "epoch": 0.11975672455953351, + "grad_norm": 2.729257345199585, + "learning_rate": 1.9580714447902598e-05, + "loss": 1.3159, + "step": 1910 + }, + { + "epoch": 0.11981942441532384, + "grad_norm": 3.0597128868103027, + "learning_rate": 1.9580132376589435e-05, + "loss": 1.2688, + "step": 1911 + }, + { + "epoch": 0.11988212427111418, + "grad_norm": 2.849519968032837, + "learning_rate": 1.9579549910191192e-05, + "loss": 1.2319, + "step": 1912 + }, + { + "epoch": 0.1199448241269045, + "grad_norm": 3.044323682785034, + "learning_rate": 1.9578967048731892e-05, + "loss": 1.3046, + "step": 1913 + }, + { + "epoch": 0.12000752398269483, + "grad_norm": 2.9342598915100098, + "learning_rate": 1.9578383792235573e-05, + "loss": 1.0694, + "step": 1914 + }, + { + "epoch": 0.12007022383848517, + "grad_norm": 2.802002429962158, + "learning_rate": 1.9577800140726292e-05, + "loss": 1.1151, + "step": 1915 + }, + { + "epoch": 0.1201329236942755, + "grad_norm": 2.757371425628662, + "learning_rate": 1.957721609422811e-05, + "loss": 1.2651, + "step": 1916 + }, + { + "epoch": 0.12019562355006584, + "grad_norm": 2.8203163146972656, + "learning_rate": 1.957663165276512e-05, + "loss": 1.3516, + "step": 1917 + }, + { + "epoch": 0.12025832340585617, + "grad_norm": 2.88055419921875, + "learning_rate": 1.957604681636142e-05, + "loss": 1.3237, + "step": 1918 + }, + { + "epoch": 0.1203210232616465, + "grad_norm": 3.002094268798828, + "learning_rate": 1.9575461585041133e-05, + "loss": 1.1445, + "step": 1919 + }, + { + "epoch": 0.12038372311743684, + "grad_norm": 2.523172378540039, + "learning_rate": 1.957487595882839e-05, + "loss": 1.1245, + "step": 1920 + }, + { + "epoch": 0.12044642297322716, + "grad_norm": 2.7179172039031982, + "learning_rate": 1.9574289937747347e-05, + "loss": 1.1881, + "step": 1921 + }, + { + "epoch": 0.12050912282901749, + "grad_norm": 2.583378791809082, + "learning_rate": 1.957370352182217e-05, + "loss": 1.1387, + "step": 1922 + }, + { + "epoch": 0.12057182268480783, + "grad_norm": 2.990036725997925, + "learning_rate": 1.9573116711077042e-05, + "loss": 1.2366, + "step": 1923 + }, + { + "epoch": 0.12063452254059816, + "grad_norm": 3.102914571762085, + "learning_rate": 1.957252950553616e-05, + "loss": 1.4492, + "step": 1924 + }, + { + "epoch": 0.12069722239638848, + "grad_norm": 2.62213134765625, + "learning_rate": 1.9571941905223745e-05, + "loss": 1.2348, + "step": 1925 + }, + { + "epoch": 0.12075992225217883, + "grad_norm": 2.908182144165039, + "learning_rate": 1.9571353910164026e-05, + "loss": 1.2024, + "step": 1926 + }, + { + "epoch": 0.12082262210796915, + "grad_norm": 2.5207066535949707, + "learning_rate": 1.9570765520381254e-05, + "loss": 1.3113, + "step": 1927 + }, + { + "epoch": 0.12088532196375948, + "grad_norm": 3.0817785263061523, + "learning_rate": 1.9570176735899692e-05, + "loss": 1.3793, + "step": 1928 + }, + { + "epoch": 0.12094802181954982, + "grad_norm": 2.911094903945923, + "learning_rate": 1.9569587556743627e-05, + "loss": 1.2374, + "step": 1929 + }, + { + "epoch": 0.12101072167534015, + "grad_norm": 2.715534210205078, + "learning_rate": 1.956899798293735e-05, + "loss": 1.1397, + "step": 1930 + }, + { + "epoch": 0.12107342153113047, + "grad_norm": 2.8093721866607666, + "learning_rate": 1.956840801450518e-05, + "loss": 1.2215, + "step": 1931 + }, + { + "epoch": 0.12113612138692081, + "grad_norm": 2.7095751762390137, + "learning_rate": 1.956781765147145e-05, + "loss": 1.2285, + "step": 1932 + }, + { + "epoch": 0.12119882124271114, + "grad_norm": 2.5343363285064697, + "learning_rate": 1.95672268938605e-05, + "loss": 1.2744, + "step": 1933 + }, + { + "epoch": 0.12126152109850147, + "grad_norm": 2.6815333366394043, + "learning_rate": 1.956663574169669e-05, + "loss": 1.2971, + "step": 1934 + }, + { + "epoch": 0.12132422095429181, + "grad_norm": 2.492579221725464, + "learning_rate": 1.956604419500441e-05, + "loss": 1.2042, + "step": 1935 + }, + { + "epoch": 0.12138692081008214, + "grad_norm": 2.8651444911956787, + "learning_rate": 1.9565452253808045e-05, + "loss": 1.1627, + "step": 1936 + }, + { + "epoch": 0.12144962066587246, + "grad_norm": 2.7900421619415283, + "learning_rate": 1.9564859918132016e-05, + "loss": 1.1151, + "step": 1937 + }, + { + "epoch": 0.1215123205216628, + "grad_norm": 2.699347496032715, + "learning_rate": 1.9564267188000743e-05, + "loss": 1.2813, + "step": 1938 + }, + { + "epoch": 0.12157502037745313, + "grad_norm": 2.789376974105835, + "learning_rate": 1.9563674063438674e-05, + "loss": 1.2599, + "step": 1939 + }, + { + "epoch": 0.12163772023324346, + "grad_norm": 2.7325172424316406, + "learning_rate": 1.956308054447027e-05, + "loss": 1.1801, + "step": 1940 + }, + { + "epoch": 0.1217004200890338, + "grad_norm": 2.8559131622314453, + "learning_rate": 1.9562486631120007e-05, + "loss": 1.2571, + "step": 1941 + }, + { + "epoch": 0.12176311994482412, + "grad_norm": 2.7679576873779297, + "learning_rate": 1.9561892323412376e-05, + "loss": 1.1357, + "step": 1942 + }, + { + "epoch": 0.12182581980061447, + "grad_norm": 2.8109638690948486, + "learning_rate": 1.9561297621371888e-05, + "loss": 1.0612, + "step": 1943 + }, + { + "epoch": 0.12188851965640479, + "grad_norm": 3.0087080001831055, + "learning_rate": 1.9560702525023072e-05, + "loss": 1.2184, + "step": 1944 + }, + { + "epoch": 0.12195121951219512, + "grad_norm": 2.546967029571533, + "learning_rate": 1.9560107034390464e-05, + "loss": 1.3006, + "step": 1945 + }, + { + "epoch": 0.12201391936798546, + "grad_norm": 2.777679681777954, + "learning_rate": 1.9559511149498623e-05, + "loss": 1.2018, + "step": 1946 + }, + { + "epoch": 0.12207661922377579, + "grad_norm": 3.0646724700927734, + "learning_rate": 1.955891487037213e-05, + "loss": 1.0589, + "step": 1947 + }, + { + "epoch": 0.12213931907956611, + "grad_norm": 2.676586866378784, + "learning_rate": 1.9558318197035562e-05, + "loss": 1.1563, + "step": 1948 + }, + { + "epoch": 0.12220201893535645, + "grad_norm": 2.7670047283172607, + "learning_rate": 1.9557721129513538e-05, + "loss": 1.4033, + "step": 1949 + }, + { + "epoch": 0.12226471879114678, + "grad_norm": 2.65002179145813, + "learning_rate": 1.955712366783068e-05, + "loss": 1.2783, + "step": 1950 + }, + { + "epoch": 0.12232741864693711, + "grad_norm": 2.8438098430633545, + "learning_rate": 1.955652581201162e-05, + "loss": 1.2244, + "step": 1951 + }, + { + "epoch": 0.12239011850272745, + "grad_norm": 2.9365720748901367, + "learning_rate": 1.9555927562081022e-05, + "loss": 1.2702, + "step": 1952 + }, + { + "epoch": 0.12245281835851778, + "grad_norm": 2.5259180068969727, + "learning_rate": 1.9555328918063554e-05, + "loss": 1.2965, + "step": 1953 + }, + { + "epoch": 0.1225155182143081, + "grad_norm": 2.9242196083068848, + "learning_rate": 1.95547298799839e-05, + "loss": 1.2543, + "step": 1954 + }, + { + "epoch": 0.12257821807009844, + "grad_norm": 2.8663458824157715, + "learning_rate": 1.9554130447866772e-05, + "loss": 1.2346, + "step": 1955 + }, + { + "epoch": 0.12264091792588877, + "grad_norm": 2.8225901126861572, + "learning_rate": 1.9553530621736885e-05, + "loss": 1.1873, + "step": 1956 + }, + { + "epoch": 0.1227036177816791, + "grad_norm": 2.500669002532959, + "learning_rate": 1.955293040161898e-05, + "loss": 1.2609, + "step": 1957 + }, + { + "epoch": 0.12276631763746944, + "grad_norm": 2.6375699043273926, + "learning_rate": 1.9552329787537805e-05, + "loss": 1.1475, + "step": 1958 + }, + { + "epoch": 0.12282901749325977, + "grad_norm": 2.86952805519104, + "learning_rate": 1.9551728779518138e-05, + "loss": 1.2673, + "step": 1959 + }, + { + "epoch": 0.12289171734905009, + "grad_norm": 2.625929355621338, + "learning_rate": 1.9551127377584754e-05, + "loss": 1.3472, + "step": 1960 + }, + { + "epoch": 0.12295441720484043, + "grad_norm": 2.6103365421295166, + "learning_rate": 1.955052558176246e-05, + "loss": 1.0692, + "step": 1961 + }, + { + "epoch": 0.12301711706063076, + "grad_norm": 2.6037609577178955, + "learning_rate": 1.9549923392076078e-05, + "loss": 1.2196, + "step": 1962 + }, + { + "epoch": 0.12307981691642109, + "grad_norm": 3.078298568725586, + "learning_rate": 1.9549320808550435e-05, + "loss": 1.1236, + "step": 1963 + }, + { + "epoch": 0.12314251677221143, + "grad_norm": 3.0820200443267822, + "learning_rate": 1.9548717831210383e-05, + "loss": 1.2153, + "step": 1964 + }, + { + "epoch": 0.12320521662800175, + "grad_norm": 2.541048288345337, + "learning_rate": 1.9548114460080794e-05, + "loss": 1.1083, + "step": 1965 + }, + { + "epoch": 0.12326791648379208, + "grad_norm": 3.01511812210083, + "learning_rate": 1.954751069518655e-05, + "loss": 1.0982, + "step": 1966 + }, + { + "epoch": 0.12333061633958242, + "grad_norm": 2.567573070526123, + "learning_rate": 1.9546906536552543e-05, + "loss": 1.0759, + "step": 1967 + }, + { + "epoch": 0.12339331619537275, + "grad_norm": 3.0732200145721436, + "learning_rate": 1.9546301984203694e-05, + "loss": 1.3882, + "step": 1968 + }, + { + "epoch": 0.12345601605116308, + "grad_norm": 2.70853853225708, + "learning_rate": 1.9545697038164936e-05, + "loss": 1.3047, + "step": 1969 + }, + { + "epoch": 0.12351871590695342, + "grad_norm": 2.9015004634857178, + "learning_rate": 1.9545091698461216e-05, + "loss": 1.0726, + "step": 1970 + }, + { + "epoch": 0.12358141576274374, + "grad_norm": 2.898709535598755, + "learning_rate": 1.9544485965117497e-05, + "loss": 1.2054, + "step": 1971 + }, + { + "epoch": 0.12364411561853408, + "grad_norm": 2.5194640159606934, + "learning_rate": 1.9543879838158756e-05, + "loss": 1.3372, + "step": 1972 + }, + { + "epoch": 0.12370681547432441, + "grad_norm": 3.197075366973877, + "learning_rate": 1.954327331761e-05, + "loss": 1.1829, + "step": 1973 + }, + { + "epoch": 0.12376951533011474, + "grad_norm": 2.8803110122680664, + "learning_rate": 1.9542666403496232e-05, + "loss": 1.2661, + "step": 1974 + }, + { + "epoch": 0.12383221518590508, + "grad_norm": 3.3029932975769043, + "learning_rate": 1.9542059095842484e-05, + "loss": 1.2468, + "step": 1975 + }, + { + "epoch": 0.1238949150416954, + "grad_norm": 2.4332962036132812, + "learning_rate": 1.9541451394673804e-05, + "loss": 1.4014, + "step": 1976 + }, + { + "epoch": 0.12395761489748573, + "grad_norm": 2.5563206672668457, + "learning_rate": 1.9540843300015253e-05, + "loss": 1.1902, + "step": 1977 + }, + { + "epoch": 0.12402031475327607, + "grad_norm": 2.7579216957092285, + "learning_rate": 1.9540234811891908e-05, + "loss": 1.1914, + "step": 1978 + }, + { + "epoch": 0.1240830146090664, + "grad_norm": 2.7405147552490234, + "learning_rate": 1.953962593032886e-05, + "loss": 1.2134, + "step": 1979 + }, + { + "epoch": 0.12414571446485673, + "grad_norm": 2.755309581756592, + "learning_rate": 1.9539016655351222e-05, + "loss": 1.3172, + "step": 1980 + }, + { + "epoch": 0.12420841432064707, + "grad_norm": 2.679776906967163, + "learning_rate": 1.953840698698412e-05, + "loss": 0.9412, + "step": 1981 + }, + { + "epoch": 0.1242711141764374, + "grad_norm": 2.675100803375244, + "learning_rate": 1.95377969252527e-05, + "loss": 1.1195, + "step": 1982 + }, + { + "epoch": 0.12433381403222772, + "grad_norm": 2.885694742202759, + "learning_rate": 1.9537186470182116e-05, + "loss": 1.3129, + "step": 1983 + }, + { + "epoch": 0.12439651388801806, + "grad_norm": 2.7980494499206543, + "learning_rate": 1.9536575621797546e-05, + "loss": 1.2755, + "step": 1984 + }, + { + "epoch": 0.12445921374380839, + "grad_norm": 3.098116159439087, + "learning_rate": 1.953596438012418e-05, + "loss": 1.1029, + "step": 1985 + }, + { + "epoch": 0.12452191359959872, + "grad_norm": 3.022094488143921, + "learning_rate": 1.9535352745187232e-05, + "loss": 1.2889, + "step": 1986 + }, + { + "epoch": 0.12458461345538906, + "grad_norm": 2.876941680908203, + "learning_rate": 1.9534740717011917e-05, + "loss": 1.2205, + "step": 1987 + }, + { + "epoch": 0.12464731331117938, + "grad_norm": 2.8968091011047363, + "learning_rate": 1.9534128295623477e-05, + "loss": 1.2257, + "step": 1988 + }, + { + "epoch": 0.12471001316696971, + "grad_norm": 2.846562623977661, + "learning_rate": 1.9533515481047168e-05, + "loss": 1.1909, + "step": 1989 + }, + { + "epoch": 0.12477271302276005, + "grad_norm": 2.9026026725769043, + "learning_rate": 1.9532902273308268e-05, + "loss": 1.0865, + "step": 1990 + }, + { + "epoch": 0.12483541287855038, + "grad_norm": 2.8249175548553467, + "learning_rate": 1.953228867243206e-05, + "loss": 1.2626, + "step": 1991 + }, + { + "epoch": 0.1248981127343407, + "grad_norm": 2.804593324661255, + "learning_rate": 1.9531674678443853e-05, + "loss": 1.2867, + "step": 1992 + }, + { + "epoch": 0.12496081259013105, + "grad_norm": 2.842573881149292, + "learning_rate": 1.9531060291368967e-05, + "loss": 1.3071, + "step": 1993 + }, + { + "epoch": 0.1250235124459214, + "grad_norm": 3.181889772415161, + "learning_rate": 1.9530445511232734e-05, + "loss": 1.0215, + "step": 1994 + }, + { + "epoch": 0.1250862123017117, + "grad_norm": 2.6703624725341797, + "learning_rate": 1.9529830338060515e-05, + "loss": 1.3365, + "step": 1995 + }, + { + "epoch": 0.12514891215750204, + "grad_norm": 2.8902692794799805, + "learning_rate": 1.9529214771877673e-05, + "loss": 1.1509, + "step": 1996 + }, + { + "epoch": 0.12521161201329237, + "grad_norm": 2.609008550643921, + "learning_rate": 1.95285988127096e-05, + "loss": 1.388, + "step": 1997 + }, + { + "epoch": 0.1252743118690827, + "grad_norm": 2.506986141204834, + "learning_rate": 1.9527982460581698e-05, + "loss": 1.2201, + "step": 1998 + }, + { + "epoch": 0.12533701172487302, + "grad_norm": 3.346221923828125, + "learning_rate": 1.9527365715519383e-05, + "loss": 1.1621, + "step": 1999 + }, + { + "epoch": 0.12539971158066338, + "grad_norm": 2.5804362297058105, + "learning_rate": 1.9526748577548085e-05, + "loss": 1.3394, + "step": 2000 + }, + { + "epoch": 0.12539971158066338, + "eval_loss": 1.2292829751968384, + "eval_runtime": 144.078, + "eval_samples_per_second": 4.373, + "eval_steps_per_second": 1.097, + "step": 2000 + }, + { + "epoch": 0.1254624114364537, + "grad_norm": 2.779859781265259, + "learning_rate": 1.9526131046693265e-05, + "loss": 1.2347, + "step": 2001 + }, + { + "epoch": 0.12552511129224403, + "grad_norm": 2.8311519622802734, + "learning_rate": 1.9525513122980382e-05, + "loss": 1.2696, + "step": 2002 + }, + { + "epoch": 0.12558781114803436, + "grad_norm": 3.1521952152252197, + "learning_rate": 1.9524894806434923e-05, + "loss": 1.0388, + "step": 2003 + }, + { + "epoch": 0.12565051100382468, + "grad_norm": 2.9441206455230713, + "learning_rate": 1.9524276097082383e-05, + "loss": 1.224, + "step": 2004 + }, + { + "epoch": 0.125713210859615, + "grad_norm": 2.9331865310668945, + "learning_rate": 1.9523656994948285e-05, + "loss": 1.0843, + "step": 2005 + }, + { + "epoch": 0.12577591071540536, + "grad_norm": 3.024271011352539, + "learning_rate": 1.9523037500058156e-05, + "loss": 1.3623, + "step": 2006 + }, + { + "epoch": 0.1258386105711957, + "grad_norm": 2.7537004947662354, + "learning_rate": 1.9522417612437545e-05, + "loss": 1.1924, + "step": 2007 + }, + { + "epoch": 0.12590131042698602, + "grad_norm": 2.95827054977417, + "learning_rate": 1.9521797332112012e-05, + "loss": 1.2181, + "step": 2008 + }, + { + "epoch": 0.12596401028277635, + "grad_norm": 2.783604860305786, + "learning_rate": 1.952117665910714e-05, + "loss": 1.2417, + "step": 2009 + }, + { + "epoch": 0.12602671013856667, + "grad_norm": 2.796417713165283, + "learning_rate": 1.9520555593448533e-05, + "loss": 1.1875, + "step": 2010 + }, + { + "epoch": 0.12608940999435703, + "grad_norm": 2.906092405319214, + "learning_rate": 1.9519934135161795e-05, + "loss": 1.1378, + "step": 2011 + }, + { + "epoch": 0.12615210985014735, + "grad_norm": 2.947211742401123, + "learning_rate": 1.951931228427256e-05, + "loss": 1.1618, + "step": 2012 + }, + { + "epoch": 0.12621480970593768, + "grad_norm": 2.789165496826172, + "learning_rate": 1.9518690040806465e-05, + "loss": 1.2191, + "step": 2013 + }, + { + "epoch": 0.126277509561728, + "grad_norm": 2.7962167263031006, + "learning_rate": 1.9518067404789183e-05, + "loss": 0.9876, + "step": 2014 + }, + { + "epoch": 0.12634020941751833, + "grad_norm": 2.525739908218384, + "learning_rate": 1.951744437624638e-05, + "loss": 1.1886, + "step": 2015 + }, + { + "epoch": 0.12640290927330866, + "grad_norm": 2.5859107971191406, + "learning_rate": 1.9516820955203758e-05, + "loss": 1.2655, + "step": 2016 + }, + { + "epoch": 0.12646560912909902, + "grad_norm": 2.999483108520508, + "learning_rate": 1.9516197141687026e-05, + "loss": 1.3658, + "step": 2017 + }, + { + "epoch": 0.12652830898488934, + "grad_norm": 2.980433464050293, + "learning_rate": 1.9515572935721905e-05, + "loss": 1.2227, + "step": 2018 + }, + { + "epoch": 0.12659100884067967, + "grad_norm": 2.6860363483428955, + "learning_rate": 1.9514948337334144e-05, + "loss": 1.2806, + "step": 2019 + }, + { + "epoch": 0.12665370869647, + "grad_norm": 2.9458694458007812, + "learning_rate": 1.9514323346549497e-05, + "loss": 1.2595, + "step": 2020 + }, + { + "epoch": 0.12671640855226032, + "grad_norm": 2.747515916824341, + "learning_rate": 1.9513697963393737e-05, + "loss": 1.2964, + "step": 2021 + }, + { + "epoch": 0.12677910840805065, + "grad_norm": 2.882044792175293, + "learning_rate": 1.9513072187892658e-05, + "loss": 1.4377, + "step": 2022 + }, + { + "epoch": 0.126841808263841, + "grad_norm": 2.7342288494110107, + "learning_rate": 1.9512446020072067e-05, + "loss": 1.1983, + "step": 2023 + }, + { + "epoch": 0.12690450811963133, + "grad_norm": 2.8870911598205566, + "learning_rate": 1.951181945995779e-05, + "loss": 1.0261, + "step": 2024 + }, + { + "epoch": 0.12696720797542166, + "grad_norm": 2.8844075202941895, + "learning_rate": 1.9511192507575663e-05, + "loss": 1.1271, + "step": 2025 + }, + { + "epoch": 0.12702990783121199, + "grad_norm": 2.8682236671447754, + "learning_rate": 1.9510565162951538e-05, + "loss": 1.24, + "step": 2026 + }, + { + "epoch": 0.1270926076870023, + "grad_norm": 2.653393030166626, + "learning_rate": 1.950993742611129e-05, + "loss": 1.0793, + "step": 2027 + }, + { + "epoch": 0.12715530754279264, + "grad_norm": 3.059619665145874, + "learning_rate": 1.950930929708081e-05, + "loss": 1.3065, + "step": 2028 + }, + { + "epoch": 0.127218007398583, + "grad_norm": 2.4019899368286133, + "learning_rate": 1.9508680775886e-05, + "loss": 1.2189, + "step": 2029 + }, + { + "epoch": 0.12728070725437332, + "grad_norm": 2.647219181060791, + "learning_rate": 1.9508051862552776e-05, + "loss": 1.3108, + "step": 2030 + }, + { + "epoch": 0.12734340711016365, + "grad_norm": 2.6879994869232178, + "learning_rate": 1.9507422557107077e-05, + "loss": 1.2497, + "step": 2031 + }, + { + "epoch": 0.12740610696595397, + "grad_norm": 2.999723434448242, + "learning_rate": 1.9506792859574865e-05, + "loss": 1.1803, + "step": 2032 + }, + { + "epoch": 0.1274688068217443, + "grad_norm": 3.3038434982299805, + "learning_rate": 1.950616276998209e-05, + "loss": 1.2215, + "step": 2033 + }, + { + "epoch": 0.12753150667753463, + "grad_norm": 2.925147294998169, + "learning_rate": 1.9505532288354754e-05, + "loss": 1.1209, + "step": 2034 + }, + { + "epoch": 0.12759420653332498, + "grad_norm": 2.603207588195801, + "learning_rate": 1.950490141471885e-05, + "loss": 1.3078, + "step": 2035 + }, + { + "epoch": 0.1276569063891153, + "grad_norm": 3.1529226303100586, + "learning_rate": 1.9504270149100394e-05, + "loss": 1.2363, + "step": 2036 + }, + { + "epoch": 0.12771960624490564, + "grad_norm": 2.7292556762695312, + "learning_rate": 1.9503638491525425e-05, + "loss": 1.2388, + "step": 2037 + }, + { + "epoch": 0.12778230610069596, + "grad_norm": 2.514141321182251, + "learning_rate": 1.950300644201999e-05, + "loss": 1.3835, + "step": 2038 + }, + { + "epoch": 0.1278450059564863, + "grad_norm": 2.407252311706543, + "learning_rate": 1.9502374000610152e-05, + "loss": 1.2177, + "step": 2039 + }, + { + "epoch": 0.12790770581227665, + "grad_norm": 2.9400484561920166, + "learning_rate": 1.9501741167321996e-05, + "loss": 1.0173, + "step": 2040 + }, + { + "epoch": 0.12797040566806697, + "grad_norm": 2.879840612411499, + "learning_rate": 1.950110794218162e-05, + "loss": 1.1393, + "step": 2041 + }, + { + "epoch": 0.1280331055238573, + "grad_norm": 2.558241844177246, + "learning_rate": 1.9500474325215135e-05, + "loss": 1.3743, + "step": 2042 + }, + { + "epoch": 0.12809580537964763, + "grad_norm": 2.785304069519043, + "learning_rate": 1.9499840316448675e-05, + "loss": 1.2877, + "step": 2043 + }, + { + "epoch": 0.12815850523543795, + "grad_norm": 2.736713409423828, + "learning_rate": 1.949920591590839e-05, + "loss": 1.3733, + "step": 2044 + }, + { + "epoch": 0.12822120509122828, + "grad_norm": 2.866708993911743, + "learning_rate": 1.949857112362043e-05, + "loss": 1.3216, + "step": 2045 + }, + { + "epoch": 0.12828390494701863, + "grad_norm": 2.645808696746826, + "learning_rate": 1.949793593961099e-05, + "loss": 1.3671, + "step": 2046 + }, + { + "epoch": 0.12834660480280896, + "grad_norm": 2.6595163345336914, + "learning_rate": 1.9497300363906253e-05, + "loss": 1.4238, + "step": 2047 + }, + { + "epoch": 0.1284093046585993, + "grad_norm": 3.003244400024414, + "learning_rate": 1.9496664396532434e-05, + "loss": 1.1231, + "step": 2048 + }, + { + "epoch": 0.12847200451438961, + "grad_norm": 2.700322389602661, + "learning_rate": 1.949602803751576e-05, + "loss": 1.2538, + "step": 2049 + }, + { + "epoch": 0.12853470437017994, + "grad_norm": 3.1542038917541504, + "learning_rate": 1.949539128688248e-05, + "loss": 1.224, + "step": 2050 + }, + { + "epoch": 0.12859740422597027, + "grad_norm": 3.093585252761841, + "learning_rate": 1.9494754144658844e-05, + "loss": 1.2073, + "step": 2051 + }, + { + "epoch": 0.12866010408176062, + "grad_norm": 2.9223272800445557, + "learning_rate": 1.9494116610871133e-05, + "loss": 1.2586, + "step": 2052 + }, + { + "epoch": 0.12872280393755095, + "grad_norm": 2.9463303089141846, + "learning_rate": 1.949347868554564e-05, + "loss": 1.2851, + "step": 2053 + }, + { + "epoch": 0.12878550379334128, + "grad_norm": 2.7526934146881104, + "learning_rate": 1.9492840368708668e-05, + "loss": 1.3611, + "step": 2054 + }, + { + "epoch": 0.1288482036491316, + "grad_norm": 2.5276827812194824, + "learning_rate": 1.949220166038655e-05, + "loss": 1.2576, + "step": 2055 + }, + { + "epoch": 0.12891090350492193, + "grad_norm": 2.695162534713745, + "learning_rate": 1.9491562560605616e-05, + "loss": 1.2985, + "step": 2056 + }, + { + "epoch": 0.12897360336071226, + "grad_norm": 2.723562479019165, + "learning_rate": 1.949092306939223e-05, + "loss": 1.1337, + "step": 2057 + }, + { + "epoch": 0.1290363032165026, + "grad_norm": 2.610109329223633, + "learning_rate": 1.949028318677276e-05, + "loss": 1.1676, + "step": 2058 + }, + { + "epoch": 0.12909900307229294, + "grad_norm": 2.9299378395080566, + "learning_rate": 1.94896429127736e-05, + "loss": 1.2018, + "step": 2059 + }, + { + "epoch": 0.12916170292808327, + "grad_norm": 2.675280809402466, + "learning_rate": 1.948900224742115e-05, + "loss": 1.1467, + "step": 2060 + }, + { + "epoch": 0.1292244027838736, + "grad_norm": 2.885284185409546, + "learning_rate": 1.9488361190741836e-05, + "loss": 1.073, + "step": 2061 + }, + { + "epoch": 0.12928710263966392, + "grad_norm": 3.0149993896484375, + "learning_rate": 1.948771974276209e-05, + "loss": 1.1586, + "step": 2062 + }, + { + "epoch": 0.12934980249545425, + "grad_norm": 3.0277249813079834, + "learning_rate": 1.9487077903508363e-05, + "loss": 1.2539, + "step": 2063 + }, + { + "epoch": 0.1294125023512446, + "grad_norm": 3.1046857833862305, + "learning_rate": 1.9486435673007134e-05, + "loss": 1.2366, + "step": 2064 + }, + { + "epoch": 0.12947520220703493, + "grad_norm": 2.9485228061676025, + "learning_rate": 1.9485793051284884e-05, + "loss": 1.3555, + "step": 2065 + }, + { + "epoch": 0.12953790206282526, + "grad_norm": 2.8814001083374023, + "learning_rate": 1.9485150038368113e-05, + "loss": 1.0367, + "step": 2066 + }, + { + "epoch": 0.12960060191861558, + "grad_norm": 2.8801381587982178, + "learning_rate": 1.948450663428334e-05, + "loss": 1.2092, + "step": 2067 + }, + { + "epoch": 0.1296633017744059, + "grad_norm": 3.026062488555908, + "learning_rate": 1.94838628390571e-05, + "loss": 1.1591, + "step": 2068 + }, + { + "epoch": 0.12972600163019626, + "grad_norm": 3.017383575439453, + "learning_rate": 1.948321865271594e-05, + "loss": 1.238, + "step": 2069 + }, + { + "epoch": 0.1297887014859866, + "grad_norm": 2.9971799850463867, + "learning_rate": 1.9482574075286427e-05, + "loss": 1.252, + "step": 2070 + }, + { + "epoch": 0.12985140134177692, + "grad_norm": 2.650566339492798, + "learning_rate": 1.948192910679515e-05, + "loss": 1.2432, + "step": 2071 + }, + { + "epoch": 0.12991410119756724, + "grad_norm": 2.895205497741699, + "learning_rate": 1.94812837472687e-05, + "loss": 1.2154, + "step": 2072 + }, + { + "epoch": 0.12997680105335757, + "grad_norm": 2.764014482498169, + "learning_rate": 1.9480637996733697e-05, + "loss": 1.1869, + "step": 2073 + }, + { + "epoch": 0.1300395009091479, + "grad_norm": 2.834005117416382, + "learning_rate": 1.9479991855216768e-05, + "loss": 1.2469, + "step": 2074 + }, + { + "epoch": 0.13010220076493825, + "grad_norm": 2.834685802459717, + "learning_rate": 1.947934532274456e-05, + "loss": 1.1673, + "step": 2075 + }, + { + "epoch": 0.13016490062072858, + "grad_norm": 3.1184494495391846, + "learning_rate": 1.9478698399343737e-05, + "loss": 1.2296, + "step": 2076 + }, + { + "epoch": 0.1302276004765189, + "grad_norm": 2.635169506072998, + "learning_rate": 1.9478051085040978e-05, + "loss": 1.1538, + "step": 2077 + }, + { + "epoch": 0.13029030033230923, + "grad_norm": 2.634432315826416, + "learning_rate": 1.9477403379862976e-05, + "loss": 1.1917, + "step": 2078 + }, + { + "epoch": 0.13035300018809956, + "grad_norm": 2.8759958744049072, + "learning_rate": 1.9476755283836448e-05, + "loss": 1.2002, + "step": 2079 + }, + { + "epoch": 0.1304157000438899, + "grad_norm": 2.64841628074646, + "learning_rate": 1.9476106796988118e-05, + "loss": 1.2274, + "step": 2080 + }, + { + "epoch": 0.13047839989968024, + "grad_norm": 3.048020601272583, + "learning_rate": 1.947545791934473e-05, + "loss": 1.1451, + "step": 2081 + }, + { + "epoch": 0.13054109975547057, + "grad_norm": 2.9255142211914062, + "learning_rate": 1.947480865093304e-05, + "loss": 1.2692, + "step": 2082 + }, + { + "epoch": 0.1306037996112609, + "grad_norm": 2.9127769470214844, + "learning_rate": 1.9474158991779832e-05, + "loss": 1.3211, + "step": 2083 + }, + { + "epoch": 0.13066649946705122, + "grad_norm": 2.6169662475585938, + "learning_rate": 1.947350894191189e-05, + "loss": 1.0995, + "step": 2084 + }, + { + "epoch": 0.13072919932284155, + "grad_norm": 3.102783203125, + "learning_rate": 1.947285850135603e-05, + "loss": 1.2092, + "step": 2085 + }, + { + "epoch": 0.13079189917863188, + "grad_norm": 3.0882809162139893, + "learning_rate": 1.9472207670139066e-05, + "loss": 1.4549, + "step": 2086 + }, + { + "epoch": 0.13085459903442223, + "grad_norm": 2.970754861831665, + "learning_rate": 1.947155644828785e-05, + "loss": 1.3317, + "step": 2087 + }, + { + "epoch": 0.13091729889021256, + "grad_norm": 2.6496317386627197, + "learning_rate": 1.947090483582923e-05, + "loss": 1.2523, + "step": 2088 + }, + { + "epoch": 0.13097999874600288, + "grad_norm": 2.6180942058563232, + "learning_rate": 1.947025283279008e-05, + "loss": 1.4125, + "step": 2089 + }, + { + "epoch": 0.1310426986017932, + "grad_norm": 2.7230947017669678, + "learning_rate": 1.9469600439197287e-05, + "loss": 1.2359, + "step": 2090 + }, + { + "epoch": 0.13110539845758354, + "grad_norm": 2.4813649654388428, + "learning_rate": 1.9468947655077763e-05, + "loss": 1.1155, + "step": 2091 + }, + { + "epoch": 0.13116809831337387, + "grad_norm": 2.828503370285034, + "learning_rate": 1.9468294480458424e-05, + "loss": 1.3262, + "step": 2092 + }, + { + "epoch": 0.13123079816916422, + "grad_norm": 2.988966226577759, + "learning_rate": 1.9467640915366205e-05, + "loss": 1.0949, + "step": 2093 + }, + { + "epoch": 0.13129349802495455, + "grad_norm": 2.9583256244659424, + "learning_rate": 1.9466986959828063e-05, + "loss": 1.1935, + "step": 2094 + }, + { + "epoch": 0.13135619788074487, + "grad_norm": 2.8567144870758057, + "learning_rate": 1.946633261387096e-05, + "loss": 1.3327, + "step": 2095 + }, + { + "epoch": 0.1314188977365352, + "grad_norm": 2.549621105194092, + "learning_rate": 1.9465677877521894e-05, + "loss": 1.093, + "step": 2096 + }, + { + "epoch": 0.13148159759232553, + "grad_norm": 2.754221200942993, + "learning_rate": 1.9465022750807857e-05, + "loss": 1.1413, + "step": 2097 + }, + { + "epoch": 0.13154429744811588, + "grad_norm": 3.096482515335083, + "learning_rate": 1.9464367233755865e-05, + "loss": 1.275, + "step": 2098 + }, + { + "epoch": 0.1316069973039062, + "grad_norm": 3.2859530448913574, + "learning_rate": 1.9463711326392957e-05, + "loss": 0.9697, + "step": 2099 + }, + { + "epoch": 0.13166969715969654, + "grad_norm": 2.704796075820923, + "learning_rate": 1.946305502874618e-05, + "loss": 1.2296, + "step": 2100 + }, + { + "epoch": 0.13173239701548686, + "grad_norm": 3.114853620529175, + "learning_rate": 1.9462398340842602e-05, + "loss": 1.2669, + "step": 2101 + }, + { + "epoch": 0.1317950968712772, + "grad_norm": 3.0210275650024414, + "learning_rate": 1.9461741262709304e-05, + "loss": 1.2584, + "step": 2102 + }, + { + "epoch": 0.13185779672706752, + "grad_norm": 2.9362547397613525, + "learning_rate": 1.946108379437338e-05, + "loss": 1.0496, + "step": 2103 + }, + { + "epoch": 0.13192049658285787, + "grad_norm": 2.6601057052612305, + "learning_rate": 1.946042593586195e-05, + "loss": 1.1993, + "step": 2104 + }, + { + "epoch": 0.1319831964386482, + "grad_norm": 2.8911654949188232, + "learning_rate": 1.945976768720214e-05, + "loss": 1.0615, + "step": 2105 + }, + { + "epoch": 0.13204589629443853, + "grad_norm": 2.5307517051696777, + "learning_rate": 1.9459109048421095e-05, + "loss": 1.1204, + "step": 2106 + }, + { + "epoch": 0.13210859615022885, + "grad_norm": 3.251556396484375, + "learning_rate": 1.9458450019545983e-05, + "loss": 1.1992, + "step": 2107 + }, + { + "epoch": 0.13217129600601918, + "grad_norm": 2.756946325302124, + "learning_rate": 1.945779060060398e-05, + "loss": 1.0321, + "step": 2108 + }, + { + "epoch": 0.1322339958618095, + "grad_norm": 3.0346524715423584, + "learning_rate": 1.9457130791622277e-05, + "loss": 1.1748, + "step": 2109 + }, + { + "epoch": 0.13229669571759986, + "grad_norm": 2.7594244480133057, + "learning_rate": 1.9456470592628086e-05, + "loss": 1.1955, + "step": 2110 + }, + { + "epoch": 0.1323593955733902, + "grad_norm": 3.16619873046875, + "learning_rate": 1.945581000364864e-05, + "loss": 1.2605, + "step": 2111 + }, + { + "epoch": 0.13242209542918051, + "grad_norm": 2.7134392261505127, + "learning_rate": 1.9455149024711173e-05, + "loss": 1.3461, + "step": 2112 + }, + { + "epoch": 0.13248479528497084, + "grad_norm": 3.057732343673706, + "learning_rate": 1.9454487655842946e-05, + "loss": 1.1864, + "step": 2113 + }, + { + "epoch": 0.13254749514076117, + "grad_norm": 2.8485145568847656, + "learning_rate": 1.9453825897071237e-05, + "loss": 1.2733, + "step": 2114 + }, + { + "epoch": 0.1326101949965515, + "grad_norm": 2.898811101913452, + "learning_rate": 1.9453163748423334e-05, + "loss": 1.267, + "step": 2115 + }, + { + "epoch": 0.13267289485234185, + "grad_norm": 2.770739793777466, + "learning_rate": 1.945250120992655e-05, + "loss": 1.284, + "step": 2116 + }, + { + "epoch": 0.13273559470813218, + "grad_norm": 2.7505974769592285, + "learning_rate": 1.94518382816082e-05, + "loss": 1.192, + "step": 2117 + }, + { + "epoch": 0.1327982945639225, + "grad_norm": 2.7891600131988525, + "learning_rate": 1.9451174963495624e-05, + "loss": 1.2182, + "step": 2118 + }, + { + "epoch": 0.13286099441971283, + "grad_norm": 2.9877917766571045, + "learning_rate": 1.945051125561618e-05, + "loss": 1.2499, + "step": 2119 + }, + { + "epoch": 0.13292369427550316, + "grad_norm": 3.094097852706909, + "learning_rate": 1.9449847157997242e-05, + "loss": 1.1368, + "step": 2120 + }, + { + "epoch": 0.1329863941312935, + "grad_norm": 2.8001763820648193, + "learning_rate": 1.9449182670666194e-05, + "loss": 1.0842, + "step": 2121 + }, + { + "epoch": 0.13304909398708384, + "grad_norm": 2.929612874984741, + "learning_rate": 1.944851779365044e-05, + "loss": 1.2055, + "step": 2122 + }, + { + "epoch": 0.13311179384287417, + "grad_norm": 2.80228853225708, + "learning_rate": 1.9447852526977397e-05, + "loss": 1.1669, + "step": 2123 + }, + { + "epoch": 0.1331744936986645, + "grad_norm": 3.0903007984161377, + "learning_rate": 1.9447186870674505e-05, + "loss": 1.3092, + "step": 2124 + }, + { + "epoch": 0.13323719355445482, + "grad_norm": 3.05387282371521, + "learning_rate": 1.9446520824769214e-05, + "loss": 1.0865, + "step": 2125 + }, + { + "epoch": 0.13329989341024515, + "grad_norm": 2.7662761211395264, + "learning_rate": 1.944585438928899e-05, + "loss": 1.1442, + "step": 2126 + }, + { + "epoch": 0.1333625932660355, + "grad_norm": 3.0297434329986572, + "learning_rate": 1.9445187564261324e-05, + "loss": 1.1747, + "step": 2127 + }, + { + "epoch": 0.13342529312182583, + "grad_norm": 2.935739755630493, + "learning_rate": 1.9444520349713705e-05, + "loss": 1.128, + "step": 2128 + }, + { + "epoch": 0.13348799297761615, + "grad_norm": 2.861539840698242, + "learning_rate": 1.944385274567366e-05, + "loss": 1.0455, + "step": 2129 + }, + { + "epoch": 0.13355069283340648, + "grad_norm": 2.9167520999908447, + "learning_rate": 1.944318475216871e-05, + "loss": 1.1927, + "step": 2130 + }, + { + "epoch": 0.1336133926891968, + "grad_norm": 2.8169467449188232, + "learning_rate": 1.9442516369226408e-05, + "loss": 1.2451, + "step": 2131 + }, + { + "epoch": 0.13367609254498714, + "grad_norm": 3.0627169609069824, + "learning_rate": 1.9441847596874322e-05, + "loss": 1.2328, + "step": 2132 + }, + { + "epoch": 0.1337387924007775, + "grad_norm": 2.619436025619507, + "learning_rate": 1.9441178435140028e-05, + "loss": 1.1724, + "step": 2133 + }, + { + "epoch": 0.13380149225656782, + "grad_norm": 2.9385766983032227, + "learning_rate": 1.944050888405112e-05, + "loss": 1.3465, + "step": 2134 + }, + { + "epoch": 0.13386419211235814, + "grad_norm": 2.7910940647125244, + "learning_rate": 1.9439838943635216e-05, + "loss": 1.2126, + "step": 2135 + }, + { + "epoch": 0.13392689196814847, + "grad_norm": 3.233905792236328, + "learning_rate": 1.943916861391994e-05, + "loss": 1.0868, + "step": 2136 + }, + { + "epoch": 0.1339895918239388, + "grad_norm": 2.7250380516052246, + "learning_rate": 1.943849789493294e-05, + "loss": 1.3409, + "step": 2137 + }, + { + "epoch": 0.13405229167972912, + "grad_norm": 2.9179561138153076, + "learning_rate": 1.9437826786701874e-05, + "loss": 1.2937, + "step": 2138 + }, + { + "epoch": 0.13411499153551948, + "grad_norm": 2.763029098510742, + "learning_rate": 1.943715528925442e-05, + "loss": 1.309, + "step": 2139 + }, + { + "epoch": 0.1341776913913098, + "grad_norm": 2.7288331985473633, + "learning_rate": 1.943648340261827e-05, + "loss": 1.2795, + "step": 2140 + }, + { + "epoch": 0.13424039124710013, + "grad_norm": 3.078824520111084, + "learning_rate": 1.943581112682113e-05, + "loss": 1.2494, + "step": 2141 + }, + { + "epoch": 0.13430309110289046, + "grad_norm": 2.883852958679199, + "learning_rate": 1.9435138461890726e-05, + "loss": 1.1288, + "step": 2142 + }, + { + "epoch": 0.1343657909586808, + "grad_norm": 2.8067853450775146, + "learning_rate": 1.9434465407854803e-05, + "loss": 1.2056, + "step": 2143 + }, + { + "epoch": 0.1344284908144711, + "grad_norm": 2.7792279720306396, + "learning_rate": 1.9433791964741117e-05, + "loss": 1.2102, + "step": 2144 + }, + { + "epoch": 0.13449119067026147, + "grad_norm": 3.211933135986328, + "learning_rate": 1.9433118132577432e-05, + "loss": 1.2076, + "step": 2145 + }, + { + "epoch": 0.1345538905260518, + "grad_norm": 2.647555112838745, + "learning_rate": 1.943244391139155e-05, + "loss": 1.1874, + "step": 2146 + }, + { + "epoch": 0.13461659038184212, + "grad_norm": 2.7566885948181152, + "learning_rate": 1.9431769301211263e-05, + "loss": 1.2926, + "step": 2147 + }, + { + "epoch": 0.13467929023763245, + "grad_norm": 2.629041910171509, + "learning_rate": 1.94310943020644e-05, + "loss": 1.2574, + "step": 2148 + }, + { + "epoch": 0.13474199009342278, + "grad_norm": 2.753368854522705, + "learning_rate": 1.9430418913978798e-05, + "loss": 1.1894, + "step": 2149 + }, + { + "epoch": 0.13480468994921313, + "grad_norm": 2.9341485500335693, + "learning_rate": 1.9429743136982306e-05, + "loss": 1.1189, + "step": 2150 + }, + { + "epoch": 0.13486738980500346, + "grad_norm": 2.6535794734954834, + "learning_rate": 1.9429066971102797e-05, + "loss": 1.3071, + "step": 2151 + }, + { + "epoch": 0.13493008966079378, + "grad_norm": 2.6376967430114746, + "learning_rate": 1.9428390416368154e-05, + "loss": 1.3381, + "step": 2152 + }, + { + "epoch": 0.1349927895165841, + "grad_norm": 3.0976648330688477, + "learning_rate": 1.942771347280628e-05, + "loss": 1.1477, + "step": 2153 + }, + { + "epoch": 0.13505548937237444, + "grad_norm": 2.568844795227051, + "learning_rate": 1.9427036140445087e-05, + "loss": 1.2343, + "step": 2154 + }, + { + "epoch": 0.13511818922816476, + "grad_norm": 2.9265072345733643, + "learning_rate": 1.9426358419312513e-05, + "loss": 1.2368, + "step": 2155 + }, + { + "epoch": 0.13518088908395512, + "grad_norm": 2.8721535205841064, + "learning_rate": 1.9425680309436506e-05, + "loss": 1.03, + "step": 2156 + }, + { + "epoch": 0.13524358893974545, + "grad_norm": 3.2347335815429688, + "learning_rate": 1.9425001810845034e-05, + "loss": 1.1013, + "step": 2157 + }, + { + "epoch": 0.13530628879553577, + "grad_norm": 2.934326410293579, + "learning_rate": 1.9424322923566074e-05, + "loss": 1.1354, + "step": 2158 + }, + { + "epoch": 0.1353689886513261, + "grad_norm": 2.7744181156158447, + "learning_rate": 1.9423643647627625e-05, + "loss": 1.2936, + "step": 2159 + }, + { + "epoch": 0.13543168850711643, + "grad_norm": 2.5225441455841064, + "learning_rate": 1.94229639830577e-05, + "loss": 1.1022, + "step": 2160 + }, + { + "epoch": 0.13549438836290675, + "grad_norm": 2.996849298477173, + "learning_rate": 1.9422283929884325e-05, + "loss": 1.099, + "step": 2161 + }, + { + "epoch": 0.1355570882186971, + "grad_norm": 2.8626272678375244, + "learning_rate": 1.942160348813556e-05, + "loss": 1.1051, + "step": 2162 + }, + { + "epoch": 0.13561978807448744, + "grad_norm": 2.9912631511688232, + "learning_rate": 1.9420922657839444e-05, + "loss": 1.476, + "step": 2163 + }, + { + "epoch": 0.13568248793027776, + "grad_norm": 2.9535796642303467, + "learning_rate": 1.9420241439024074e-05, + "loss": 1.2593, + "step": 2164 + }, + { + "epoch": 0.1357451877860681, + "grad_norm": 2.524048328399658, + "learning_rate": 1.9419559831717532e-05, + "loss": 1.052, + "step": 2165 + }, + { + "epoch": 0.13580788764185842, + "grad_norm": 3.0672953128814697, + "learning_rate": 1.9418877835947934e-05, + "loss": 1.1288, + "step": 2166 + }, + { + "epoch": 0.13587058749764874, + "grad_norm": 2.8188085556030273, + "learning_rate": 1.94181954517434e-05, + "loss": 1.1273, + "step": 2167 + }, + { + "epoch": 0.1359332873534391, + "grad_norm": 3.0758259296417236, + "learning_rate": 1.9417512679132075e-05, + "loss": 1.1947, + "step": 2168 + }, + { + "epoch": 0.13599598720922942, + "grad_norm": 3.0260584354400635, + "learning_rate": 1.941682951814212e-05, + "loss": 1.3436, + "step": 2169 + }, + { + "epoch": 0.13605868706501975, + "grad_norm": 2.6834325790405273, + "learning_rate": 1.9416145968801697e-05, + "loss": 1.2472, + "step": 2170 + }, + { + "epoch": 0.13612138692081008, + "grad_norm": 2.9128310680389404, + "learning_rate": 1.9415462031139008e-05, + "loss": 1.0458, + "step": 2171 + }, + { + "epoch": 0.1361840867766004, + "grad_norm": 3.0424656867980957, + "learning_rate": 1.9414777705182253e-05, + "loss": 1.1282, + "step": 2172 + }, + { + "epoch": 0.13624678663239073, + "grad_norm": 2.7686972618103027, + "learning_rate": 1.9414092990959653e-05, + "loss": 1.2479, + "step": 2173 + }, + { + "epoch": 0.1363094864881811, + "grad_norm": 3.153430461883545, + "learning_rate": 1.9413407888499444e-05, + "loss": 1.1406, + "step": 2174 + }, + { + "epoch": 0.1363721863439714, + "grad_norm": 2.7363193035125732, + "learning_rate": 1.941272239782989e-05, + "loss": 1.1284, + "step": 2175 + }, + { + "epoch": 0.13643488619976174, + "grad_norm": 3.0165014266967773, + "learning_rate": 1.941203651897925e-05, + "loss": 1.0804, + "step": 2176 + }, + { + "epoch": 0.13649758605555207, + "grad_norm": 3.1309614181518555, + "learning_rate": 1.941135025197581e-05, + "loss": 1.0962, + "step": 2177 + }, + { + "epoch": 0.1365602859113424, + "grad_norm": 2.900949478149414, + "learning_rate": 1.9410663596847877e-05, + "loss": 1.1712, + "step": 2178 + }, + { + "epoch": 0.13662298576713275, + "grad_norm": 2.754634141921997, + "learning_rate": 1.9409976553623767e-05, + "loss": 1.3008, + "step": 2179 + }, + { + "epoch": 0.13668568562292308, + "grad_norm": 3.039827346801758, + "learning_rate": 1.9409289122331812e-05, + "loss": 1.0372, + "step": 2180 + }, + { + "epoch": 0.1367483854787134, + "grad_norm": 2.9904165267944336, + "learning_rate": 1.940860130300036e-05, + "loss": 1.223, + "step": 2181 + }, + { + "epoch": 0.13681108533450373, + "grad_norm": 2.714442253112793, + "learning_rate": 1.9407913095657785e-05, + "loss": 1.2063, + "step": 2182 + }, + { + "epoch": 0.13687378519029406, + "grad_norm": 2.9032199382781982, + "learning_rate": 1.940722450033246e-05, + "loss": 1.3223, + "step": 2183 + }, + { + "epoch": 0.13693648504608438, + "grad_norm": 2.843318462371826, + "learning_rate": 1.9406535517052788e-05, + "loss": 1.1078, + "step": 2184 + }, + { + "epoch": 0.13699918490187474, + "grad_norm": 3.1952626705169678, + "learning_rate": 1.9405846145847177e-05, + "loss": 1.2889, + "step": 2185 + }, + { + "epoch": 0.13706188475766506, + "grad_norm": 3.143062114715576, + "learning_rate": 1.940515638674406e-05, + "loss": 1.2788, + "step": 2186 + }, + { + "epoch": 0.1371245846134554, + "grad_norm": 3.014993190765381, + "learning_rate": 1.9404466239771887e-05, + "loss": 1.2307, + "step": 2187 + }, + { + "epoch": 0.13718728446924572, + "grad_norm": 3.017692804336548, + "learning_rate": 1.9403775704959112e-05, + "loss": 1.1667, + "step": 2188 + }, + { + "epoch": 0.13724998432503605, + "grad_norm": 2.9570882320404053, + "learning_rate": 1.940308478233422e-05, + "loss": 1.2369, + "step": 2189 + }, + { + "epoch": 0.13731268418082637, + "grad_norm": 2.7455835342407227, + "learning_rate": 1.9402393471925694e-05, + "loss": 1.0907, + "step": 2190 + }, + { + "epoch": 0.13737538403661673, + "grad_norm": 2.7107105255126953, + "learning_rate": 1.940170177376206e-05, + "loss": 1.3899, + "step": 2191 + }, + { + "epoch": 0.13743808389240705, + "grad_norm": 2.5368540287017822, + "learning_rate": 1.940100968787183e-05, + "loss": 0.9874, + "step": 2192 + }, + { + "epoch": 0.13750078374819738, + "grad_norm": 2.778528928756714, + "learning_rate": 1.9400317214283548e-05, + "loss": 1.255, + "step": 2193 + }, + { + "epoch": 0.1375634836039877, + "grad_norm": 2.715057134628296, + "learning_rate": 1.9399624353025774e-05, + "loss": 1.2707, + "step": 2194 + }, + { + "epoch": 0.13762618345977803, + "grad_norm": 2.7080633640289307, + "learning_rate": 1.939893110412708e-05, + "loss": 1.2254, + "step": 2195 + }, + { + "epoch": 0.13768888331556836, + "grad_norm": 2.797727346420288, + "learning_rate": 1.9398237467616063e-05, + "loss": 1.182, + "step": 2196 + }, + { + "epoch": 0.13775158317135872, + "grad_norm": 2.692811965942383, + "learning_rate": 1.9397543443521318e-05, + "loss": 1.0823, + "step": 2197 + }, + { + "epoch": 0.13781428302714904, + "grad_norm": 2.8538947105407715, + "learning_rate": 1.939684903187147e-05, + "loss": 1.2664, + "step": 2198 + }, + { + "epoch": 0.13787698288293937, + "grad_norm": 2.667208433151245, + "learning_rate": 1.939615423269516e-05, + "loss": 1.1637, + "step": 2199 + }, + { + "epoch": 0.1379396827387297, + "grad_norm": 2.930510997772217, + "learning_rate": 1.9395459046021033e-05, + "loss": 1.3212, + "step": 2200 + }, + { + "epoch": 0.13800238259452002, + "grad_norm": 2.6799213886260986, + "learning_rate": 1.9394763471877774e-05, + "loss": 1.1754, + "step": 2201 + }, + { + "epoch": 0.13806508245031038, + "grad_norm": 2.6558868885040283, + "learning_rate": 1.9394067510294055e-05, + "loss": 1.2495, + "step": 2202 + }, + { + "epoch": 0.1381277823061007, + "grad_norm": 2.822828769683838, + "learning_rate": 1.939337116129858e-05, + "loss": 1.2286, + "step": 2203 + }, + { + "epoch": 0.13819048216189103, + "grad_norm": 2.9352574348449707, + "learning_rate": 1.939267442492007e-05, + "loss": 1.3071, + "step": 2204 + }, + { + "epoch": 0.13825318201768136, + "grad_norm": 2.698648691177368, + "learning_rate": 1.9391977301187258e-05, + "loss": 1.2619, + "step": 2205 + }, + { + "epoch": 0.13831588187347169, + "grad_norm": 2.8129830360412598, + "learning_rate": 1.939127979012889e-05, + "loss": 1.1798, + "step": 2206 + }, + { + "epoch": 0.138378581729262, + "grad_norm": 2.9511632919311523, + "learning_rate": 1.939058189177373e-05, + "loss": 1.2012, + "step": 2207 + }, + { + "epoch": 0.13844128158505237, + "grad_norm": 2.6771514415740967, + "learning_rate": 1.938988360615057e-05, + "loss": 1.1502, + "step": 2208 + }, + { + "epoch": 0.1385039814408427, + "grad_norm": 2.9324734210968018, + "learning_rate": 1.9389184933288193e-05, + "loss": 1.1198, + "step": 2209 + }, + { + "epoch": 0.13856668129663302, + "grad_norm": 2.7615933418273926, + "learning_rate": 1.9388485873215425e-05, + "loss": 1.0778, + "step": 2210 + }, + { + "epoch": 0.13862938115242335, + "grad_norm": 3.1005332469940186, + "learning_rate": 1.9387786425961085e-05, + "loss": 1.1868, + "step": 2211 + }, + { + "epoch": 0.13869208100821367, + "grad_norm": 2.9041247367858887, + "learning_rate": 1.9387086591554025e-05, + "loss": 1.1452, + "step": 2212 + }, + { + "epoch": 0.138754780864004, + "grad_norm": 2.6678643226623535, + "learning_rate": 1.9386386370023104e-05, + "loss": 1.1649, + "step": 2213 + }, + { + "epoch": 0.13881748071979436, + "grad_norm": 2.4944801330566406, + "learning_rate": 1.9385685761397197e-05, + "loss": 1.1373, + "step": 2214 + }, + { + "epoch": 0.13888018057558468, + "grad_norm": 2.8386740684509277, + "learning_rate": 1.9384984765705202e-05, + "loss": 1.3476, + "step": 2215 + }, + { + "epoch": 0.138942880431375, + "grad_norm": 3.020277261734009, + "learning_rate": 1.9384283382976027e-05, + "loss": 1.2611, + "step": 2216 + }, + { + "epoch": 0.13900558028716534, + "grad_norm": 2.953195810317993, + "learning_rate": 1.9383581613238593e-05, + "loss": 1.1526, + "step": 2217 + }, + { + "epoch": 0.13906828014295566, + "grad_norm": 2.651338815689087, + "learning_rate": 1.938287945652184e-05, + "loss": 1.1402, + "step": 2218 + }, + { + "epoch": 0.139130979998746, + "grad_norm": 2.9626333713531494, + "learning_rate": 1.9382176912854732e-05, + "loss": 1.1688, + "step": 2219 + }, + { + "epoch": 0.13919367985453635, + "grad_norm": 2.835289716720581, + "learning_rate": 1.938147398226624e-05, + "loss": 1.4638, + "step": 2220 + }, + { + "epoch": 0.13925637971032667, + "grad_norm": 2.728511333465576, + "learning_rate": 1.9380770664785352e-05, + "loss": 1.1904, + "step": 2221 + }, + { + "epoch": 0.139319079566117, + "grad_norm": 2.8506147861480713, + "learning_rate": 1.9380066960441072e-05, + "loss": 1.1132, + "step": 2222 + }, + { + "epoch": 0.13938177942190733, + "grad_norm": 2.9593088626861572, + "learning_rate": 1.9379362869262418e-05, + "loss": 1.2249, + "step": 2223 + }, + { + "epoch": 0.13944447927769765, + "grad_norm": 2.722045421600342, + "learning_rate": 1.937865839127843e-05, + "loss": 1.1364, + "step": 2224 + }, + { + "epoch": 0.13950717913348798, + "grad_norm": 2.638596773147583, + "learning_rate": 1.9377953526518158e-05, + "loss": 1.3367, + "step": 2225 + }, + { + "epoch": 0.13956987898927833, + "grad_norm": 2.979722261428833, + "learning_rate": 1.937724827501068e-05, + "loss": 1.286, + "step": 2226 + }, + { + "epoch": 0.13963257884506866, + "grad_norm": 2.8448076248168945, + "learning_rate": 1.9376542636785067e-05, + "loss": 1.1571, + "step": 2227 + }, + { + "epoch": 0.139695278700859, + "grad_norm": 2.9422285556793213, + "learning_rate": 1.9375836611870428e-05, + "loss": 1.1176, + "step": 2228 + }, + { + "epoch": 0.13975797855664931, + "grad_norm": 2.847398281097412, + "learning_rate": 1.937513020029588e-05, + "loss": 1.2761, + "step": 2229 + }, + { + "epoch": 0.13982067841243964, + "grad_norm": 2.810490131378174, + "learning_rate": 1.9374423402090553e-05, + "loss": 1.1371, + "step": 2230 + }, + { + "epoch": 0.13988337826823, + "grad_norm": 3.1877546310424805, + "learning_rate": 1.9373716217283593e-05, + "loss": 1.1224, + "step": 2231 + }, + { + "epoch": 0.13994607812402032, + "grad_norm": 2.959711790084839, + "learning_rate": 1.937300864590417e-05, + "loss": 1.132, + "step": 2232 + }, + { + "epoch": 0.14000877797981065, + "grad_norm": 2.4733433723449707, + "learning_rate": 1.9372300687981455e-05, + "loss": 1.2061, + "step": 2233 + }, + { + "epoch": 0.14007147783560098, + "grad_norm": 2.892197608947754, + "learning_rate": 1.9371592343544655e-05, + "loss": 1.1763, + "step": 2234 + }, + { + "epoch": 0.1401341776913913, + "grad_norm": 2.820028066635132, + "learning_rate": 1.9370883612622977e-05, + "loss": 1.336, + "step": 2235 + }, + { + "epoch": 0.14019687754718163, + "grad_norm": 3.3080203533172607, + "learning_rate": 1.937017449524565e-05, + "loss": 1.3476, + "step": 2236 + }, + { + "epoch": 0.14025957740297199, + "grad_norm": 2.9834136962890625, + "learning_rate": 1.9369464991441917e-05, + "loss": 1.0999, + "step": 2237 + }, + { + "epoch": 0.1403222772587623, + "grad_norm": 3.019282102584839, + "learning_rate": 1.936875510124104e-05, + "loss": 1.2458, + "step": 2238 + }, + { + "epoch": 0.14038497711455264, + "grad_norm": 2.9033396244049072, + "learning_rate": 1.9368044824672292e-05, + "loss": 1.4083, + "step": 2239 + }, + { + "epoch": 0.14044767697034297, + "grad_norm": 2.762652635574341, + "learning_rate": 1.936733416176497e-05, + "loss": 1.1147, + "step": 2240 + }, + { + "epoch": 0.1405103768261333, + "grad_norm": 2.9846439361572266, + "learning_rate": 1.9366623112548373e-05, + "loss": 1.1761, + "step": 2241 + }, + { + "epoch": 0.14057307668192362, + "grad_norm": 3.0114872455596924, + "learning_rate": 1.936591167705183e-05, + "loss": 1.161, + "step": 2242 + }, + { + "epoch": 0.14063577653771397, + "grad_norm": 3.101078510284424, + "learning_rate": 1.936519985530468e-05, + "loss": 1.0954, + "step": 2243 + }, + { + "epoch": 0.1406984763935043, + "grad_norm": 2.7635903358459473, + "learning_rate": 1.9364487647336282e-05, + "loss": 1.1545, + "step": 2244 + }, + { + "epoch": 0.14076117624929463, + "grad_norm": 2.819661855697632, + "learning_rate": 1.9363775053176004e-05, + "loss": 0.9404, + "step": 2245 + }, + { + "epoch": 0.14082387610508496, + "grad_norm": 2.7317636013031006, + "learning_rate": 1.9363062072853233e-05, + "loss": 1.2587, + "step": 2246 + }, + { + "epoch": 0.14088657596087528, + "grad_norm": 2.7554001808166504, + "learning_rate": 1.9362348706397374e-05, + "loss": 1.2063, + "step": 2247 + }, + { + "epoch": 0.1409492758166656, + "grad_norm": 2.805783271789551, + "learning_rate": 1.9361634953837843e-05, + "loss": 1.2928, + "step": 2248 + }, + { + "epoch": 0.14101197567245596, + "grad_norm": 3.203364133834839, + "learning_rate": 1.9360920815204083e-05, + "loss": 1.2256, + "step": 2249 + }, + { + "epoch": 0.1410746755282463, + "grad_norm": 3.071117877960205, + "learning_rate": 1.9360206290525533e-05, + "loss": 1.255, + "step": 2250 + }, + { + "epoch": 0.14113737538403662, + "grad_norm": 2.9136087894439697, + "learning_rate": 1.9359491379831673e-05, + "loss": 1.2128, + "step": 2251 + }, + { + "epoch": 0.14120007523982694, + "grad_norm": 3.396852493286133, + "learning_rate": 1.9358776083151977e-05, + "loss": 1.2883, + "step": 2252 + }, + { + "epoch": 0.14126277509561727, + "grad_norm": 3.022444486618042, + "learning_rate": 1.9358060400515945e-05, + "loss": 1.0965, + "step": 2253 + }, + { + "epoch": 0.1413254749514076, + "grad_norm": 3.0246386528015137, + "learning_rate": 1.9357344331953095e-05, + "loss": 1.2031, + "step": 2254 + }, + { + "epoch": 0.14138817480719795, + "grad_norm": 2.6842026710510254, + "learning_rate": 1.9356627877492958e-05, + "loss": 1.2725, + "step": 2255 + }, + { + "epoch": 0.14145087466298828, + "grad_norm": 2.7124578952789307, + "learning_rate": 1.9355911037165077e-05, + "loss": 1.1628, + "step": 2256 + }, + { + "epoch": 0.1415135745187786, + "grad_norm": 2.7266058921813965, + "learning_rate": 1.9355193810999015e-05, + "loss": 1.1843, + "step": 2257 + }, + { + "epoch": 0.14157627437456893, + "grad_norm": 2.7984158992767334, + "learning_rate": 1.9354476199024354e-05, + "loss": 1.2838, + "step": 2258 + }, + { + "epoch": 0.14163897423035926, + "grad_norm": 3.144507646560669, + "learning_rate": 1.9353758201270684e-05, + "loss": 1.3985, + "step": 2259 + }, + { + "epoch": 0.14170167408614961, + "grad_norm": 3.082547187805176, + "learning_rate": 1.935303981776762e-05, + "loss": 1.3029, + "step": 2260 + }, + { + "epoch": 0.14176437394193994, + "grad_norm": 3.046160936355591, + "learning_rate": 1.935232104854478e-05, + "loss": 1.1761, + "step": 2261 + }, + { + "epoch": 0.14182707379773027, + "grad_norm": 2.594625234603882, + "learning_rate": 1.9351601893631817e-05, + "loss": 1.2805, + "step": 2262 + }, + { + "epoch": 0.1418897736535206, + "grad_norm": 2.629582166671753, + "learning_rate": 1.935088235305838e-05, + "loss": 1.1601, + "step": 2263 + }, + { + "epoch": 0.14195247350931092, + "grad_norm": 2.7314188480377197, + "learning_rate": 1.9350162426854152e-05, + "loss": 1.1538, + "step": 2264 + }, + { + "epoch": 0.14201517336510125, + "grad_norm": 2.5842137336730957, + "learning_rate": 1.9349442115048812e-05, + "loss": 1.2141, + "step": 2265 + }, + { + "epoch": 0.1420778732208916, + "grad_norm": 2.78265643119812, + "learning_rate": 1.9348721417672072e-05, + "loss": 1.1622, + "step": 2266 + }, + { + "epoch": 0.14214057307668193, + "grad_norm": 3.1374778747558594, + "learning_rate": 1.9348000334753653e-05, + "loss": 1.2428, + "step": 2267 + }, + { + "epoch": 0.14220327293247226, + "grad_norm": 2.9030089378356934, + "learning_rate": 1.9347278866323293e-05, + "loss": 1.1442, + "step": 2268 + }, + { + "epoch": 0.14226597278826258, + "grad_norm": 2.6467368602752686, + "learning_rate": 1.934655701241074e-05, + "loss": 1.1843, + "step": 2269 + }, + { + "epoch": 0.1423286726440529, + "grad_norm": 2.941525936126709, + "learning_rate": 1.934583477304577e-05, + "loss": 1.2995, + "step": 2270 + }, + { + "epoch": 0.14239137249984324, + "grad_norm": 3.0008556842803955, + "learning_rate": 1.934511214825817e-05, + "loss": 1.4262, + "step": 2271 + }, + { + "epoch": 0.1424540723556336, + "grad_norm": 2.911752700805664, + "learning_rate": 1.934438913807773e-05, + "loss": 1.2061, + "step": 2272 + }, + { + "epoch": 0.14251677221142392, + "grad_norm": 2.8358538150787354, + "learning_rate": 1.934366574253428e-05, + "loss": 1.2076, + "step": 2273 + }, + { + "epoch": 0.14257947206721425, + "grad_norm": 3.1121416091918945, + "learning_rate": 1.9342941961657643e-05, + "loss": 1.1298, + "step": 2274 + }, + { + "epoch": 0.14264217192300457, + "grad_norm": 2.6645448207855225, + "learning_rate": 1.9342217795477673e-05, + "loss": 1.1336, + "step": 2275 + }, + { + "epoch": 0.1427048717787949, + "grad_norm": 2.808627128601074, + "learning_rate": 1.934149324402423e-05, + "loss": 1.2914, + "step": 2276 + }, + { + "epoch": 0.14276757163458523, + "grad_norm": 2.3364086151123047, + "learning_rate": 1.93407683073272e-05, + "loss": 1.2546, + "step": 2277 + }, + { + "epoch": 0.14283027149037558, + "grad_norm": 2.787281036376953, + "learning_rate": 1.934004298541648e-05, + "loss": 1.1885, + "step": 2278 + }, + { + "epoch": 0.1428929713461659, + "grad_norm": 3.248601198196411, + "learning_rate": 1.9339317278321975e-05, + "loss": 1.1812, + "step": 2279 + }, + { + "epoch": 0.14295567120195624, + "grad_norm": 3.169196844100952, + "learning_rate": 1.933859118607362e-05, + "loss": 1.159, + "step": 2280 + }, + { + "epoch": 0.14301837105774656, + "grad_norm": 2.864567279815674, + "learning_rate": 1.933786470870136e-05, + "loss": 1.0676, + "step": 2281 + }, + { + "epoch": 0.1430810709135369, + "grad_norm": 2.8819735050201416, + "learning_rate": 1.9337137846235143e-05, + "loss": 1.3543, + "step": 2282 + }, + { + "epoch": 0.14314377076932724, + "grad_norm": 2.9026193618774414, + "learning_rate": 1.933641059870496e-05, + "loss": 1.2826, + "step": 2283 + }, + { + "epoch": 0.14320647062511757, + "grad_norm": 2.786284923553467, + "learning_rate": 1.9335682966140795e-05, + "loss": 1.0798, + "step": 2284 + }, + { + "epoch": 0.1432691704809079, + "grad_norm": 2.789133310317993, + "learning_rate": 1.9334954948572656e-05, + "loss": 1.1638, + "step": 2285 + }, + { + "epoch": 0.14333187033669822, + "grad_norm": 2.928499937057495, + "learning_rate": 1.9334226546030567e-05, + "loss": 1.2485, + "step": 2286 + }, + { + "epoch": 0.14339457019248855, + "grad_norm": 2.765075206756592, + "learning_rate": 1.9333497758544566e-05, + "loss": 1.2031, + "step": 2287 + }, + { + "epoch": 0.14345727004827888, + "grad_norm": 3.0717570781707764, + "learning_rate": 1.9332768586144714e-05, + "loss": 1.1464, + "step": 2288 + }, + { + "epoch": 0.14351996990406923, + "grad_norm": 2.790004014968872, + "learning_rate": 1.9332039028861076e-05, + "loss": 1.2973, + "step": 2289 + }, + { + "epoch": 0.14358266975985956, + "grad_norm": 2.919487714767456, + "learning_rate": 1.933130908672374e-05, + "loss": 1.1492, + "step": 2290 + }, + { + "epoch": 0.1436453696156499, + "grad_norm": 2.929522752761841, + "learning_rate": 1.933057875976281e-05, + "loss": 1.1157, + "step": 2291 + }, + { + "epoch": 0.14370806947144021, + "grad_norm": 2.661743402481079, + "learning_rate": 1.9329848048008405e-05, + "loss": 1.2963, + "step": 2292 + }, + { + "epoch": 0.14377076932723054, + "grad_norm": 3.1880950927734375, + "learning_rate": 1.9329116951490658e-05, + "loss": 1.1179, + "step": 2293 + }, + { + "epoch": 0.14383346918302087, + "grad_norm": 2.7985007762908936, + "learning_rate": 1.9328385470239724e-05, + "loss": 1.1856, + "step": 2294 + }, + { + "epoch": 0.14389616903881122, + "grad_norm": 2.752988815307617, + "learning_rate": 1.9327653604285762e-05, + "loss": 1.1662, + "step": 2295 + }, + { + "epoch": 0.14395886889460155, + "grad_norm": 2.795670509338379, + "learning_rate": 1.932692135365896e-05, + "loss": 1.2335, + "step": 2296 + }, + { + "epoch": 0.14402156875039188, + "grad_norm": 2.6857967376708984, + "learning_rate": 1.9326188718389512e-05, + "loss": 1.0398, + "step": 2297 + }, + { + "epoch": 0.1440842686061822, + "grad_norm": 2.9096760749816895, + "learning_rate": 1.9325455698507638e-05, + "loss": 1.2448, + "step": 2298 + }, + { + "epoch": 0.14414696846197253, + "grad_norm": 2.765874147415161, + "learning_rate": 1.932472229404356e-05, + "loss": 1.2497, + "step": 2299 + }, + { + "epoch": 0.14420966831776286, + "grad_norm": 2.880338191986084, + "learning_rate": 1.932398850502753e-05, + "loss": 1.2065, + "step": 2300 + }, + { + "epoch": 0.1442723681735532, + "grad_norm": 3.0336203575134277, + "learning_rate": 1.9323254331489806e-05, + "loss": 1.108, + "step": 2301 + }, + { + "epoch": 0.14433506802934354, + "grad_norm": 3.027841806411743, + "learning_rate": 1.9322519773460664e-05, + "loss": 1.1846, + "step": 2302 + }, + { + "epoch": 0.14439776788513387, + "grad_norm": 2.7663986682891846, + "learning_rate": 1.93217848309704e-05, + "loss": 1.1893, + "step": 2303 + }, + { + "epoch": 0.1444604677409242, + "grad_norm": 2.959444761276245, + "learning_rate": 1.9321049504049325e-05, + "loss": 1.1161, + "step": 2304 + }, + { + "epoch": 0.14452316759671452, + "grad_norm": 2.850374221801758, + "learning_rate": 1.932031379272776e-05, + "loss": 1.2332, + "step": 2305 + }, + { + "epoch": 0.14458586745250485, + "grad_norm": 2.656127452850342, + "learning_rate": 1.9319577697036048e-05, + "loss": 1.2503, + "step": 2306 + }, + { + "epoch": 0.1446485673082952, + "grad_norm": 3.0356955528259277, + "learning_rate": 1.9318841217004543e-05, + "loss": 1.0937, + "step": 2307 + }, + { + "epoch": 0.14471126716408553, + "grad_norm": 2.9511828422546387, + "learning_rate": 1.931810435266362e-05, + "loss": 1.1801, + "step": 2308 + }, + { + "epoch": 0.14477396701987585, + "grad_norm": 3.068718910217285, + "learning_rate": 1.9317367104043665e-05, + "loss": 1.2195, + "step": 2309 + }, + { + "epoch": 0.14483666687566618, + "grad_norm": 3.1955814361572266, + "learning_rate": 1.9316629471175087e-05, + "loss": 1.0427, + "step": 2310 + }, + { + "epoch": 0.1448993667314565, + "grad_norm": 2.9997446537017822, + "learning_rate": 1.9315891454088303e-05, + "loss": 1.0823, + "step": 2311 + }, + { + "epoch": 0.14496206658724686, + "grad_norm": 3.142122507095337, + "learning_rate": 1.9315153052813743e-05, + "loss": 1.0994, + "step": 2312 + }, + { + "epoch": 0.1450247664430372, + "grad_norm": 2.6743931770324707, + "learning_rate": 1.931441426738187e-05, + "loss": 1.2461, + "step": 2313 + }, + { + "epoch": 0.14508746629882752, + "grad_norm": 2.7200076580047607, + "learning_rate": 1.9313675097823143e-05, + "loss": 1.1887, + "step": 2314 + }, + { + "epoch": 0.14515016615461784, + "grad_norm": 2.888153553009033, + "learning_rate": 1.931293554416805e-05, + "loss": 1.0272, + "step": 2315 + }, + { + "epoch": 0.14521286601040817, + "grad_norm": 3.1278879642486572, + "learning_rate": 1.9312195606447087e-05, + "loss": 1.3086, + "step": 2316 + }, + { + "epoch": 0.1452755658661985, + "grad_norm": 2.6888372898101807, + "learning_rate": 1.9311455284690772e-05, + "loss": 1.382, + "step": 2317 + }, + { + "epoch": 0.14533826572198885, + "grad_norm": 2.886679172515869, + "learning_rate": 1.9310714578929635e-05, + "loss": 1.2955, + "step": 2318 + }, + { + "epoch": 0.14540096557777918, + "grad_norm": 2.8525421619415283, + "learning_rate": 1.9309973489194222e-05, + "loss": 1.2351, + "step": 2319 + }, + { + "epoch": 0.1454636654335695, + "grad_norm": 2.969233751296997, + "learning_rate": 1.9309232015515095e-05, + "loss": 1.106, + "step": 2320 + }, + { + "epoch": 0.14552636528935983, + "grad_norm": 3.057473659515381, + "learning_rate": 1.930849015792283e-05, + "loss": 1.2074, + "step": 2321 + }, + { + "epoch": 0.14558906514515016, + "grad_norm": 2.7486722469329834, + "learning_rate": 1.930774791644803e-05, + "loss": 1.3212, + "step": 2322 + }, + { + "epoch": 0.1456517650009405, + "grad_norm": 2.8831958770751953, + "learning_rate": 1.9307005291121294e-05, + "loss": 1.1199, + "step": 2323 + }, + { + "epoch": 0.14571446485673084, + "grad_norm": 3.094770908355713, + "learning_rate": 1.9306262281973254e-05, + "loss": 1.1952, + "step": 2324 + }, + { + "epoch": 0.14577716471252117, + "grad_norm": 2.8285276889801025, + "learning_rate": 1.9305518889034552e-05, + "loss": 1.0708, + "step": 2325 + }, + { + "epoch": 0.1458398645683115, + "grad_norm": 3.0043087005615234, + "learning_rate": 1.9304775112335847e-05, + "loss": 1.1012, + "step": 2326 + }, + { + "epoch": 0.14590256442410182, + "grad_norm": 2.9240362644195557, + "learning_rate": 1.930403095190781e-05, + "loss": 1.1434, + "step": 2327 + }, + { + "epoch": 0.14596526427989215, + "grad_norm": 2.9622817039489746, + "learning_rate": 1.930328640778113e-05, + "loss": 1.0828, + "step": 2328 + }, + { + "epoch": 0.14602796413568248, + "grad_norm": 3.1308937072753906, + "learning_rate": 1.930254147998651e-05, + "loss": 1.3004, + "step": 2329 + }, + { + "epoch": 0.14609066399147283, + "grad_norm": 2.733914375305176, + "learning_rate": 1.9301796168554675e-05, + "loss": 1.2082, + "step": 2330 + }, + { + "epoch": 0.14615336384726316, + "grad_norm": 2.971262216567993, + "learning_rate": 1.9301050473516358e-05, + "loss": 1.28, + "step": 2331 + }, + { + "epoch": 0.14621606370305348, + "grad_norm": 3.0361392498016357, + "learning_rate": 1.9300304394902315e-05, + "loss": 1.343, + "step": 2332 + }, + { + "epoch": 0.1462787635588438, + "grad_norm": 2.575258731842041, + "learning_rate": 1.9299557932743313e-05, + "loss": 1.208, + "step": 2333 + }, + { + "epoch": 0.14634146341463414, + "grad_norm": 3.0563771724700928, + "learning_rate": 1.9298811087070134e-05, + "loss": 1.0945, + "step": 2334 + }, + { + "epoch": 0.14640416327042446, + "grad_norm": 3.0727508068084717, + "learning_rate": 1.929806385791358e-05, + "loss": 1.1796, + "step": 2335 + }, + { + "epoch": 0.14646686312621482, + "grad_norm": 2.8527209758758545, + "learning_rate": 1.9297316245304468e-05, + "loss": 1.2383, + "step": 2336 + }, + { + "epoch": 0.14652956298200515, + "grad_norm": 3.2361831665039062, + "learning_rate": 1.9296568249273628e-05, + "loss": 1.0656, + "step": 2337 + }, + { + "epoch": 0.14659226283779547, + "grad_norm": 3.038855791091919, + "learning_rate": 1.9295819869851904e-05, + "loss": 1.2128, + "step": 2338 + }, + { + "epoch": 0.1466549626935858, + "grad_norm": 3.8822662830352783, + "learning_rate": 1.9295071107070164e-05, + "loss": 1.3194, + "step": 2339 + }, + { + "epoch": 0.14671766254937613, + "grad_norm": 3.0067055225372314, + "learning_rate": 1.929432196095929e-05, + "loss": 1.3355, + "step": 2340 + }, + { + "epoch": 0.14678036240516648, + "grad_norm": 2.9706122875213623, + "learning_rate": 1.9293572431550166e-05, + "loss": 1.0523, + "step": 2341 + }, + { + "epoch": 0.1468430622609568, + "grad_norm": 2.583209991455078, + "learning_rate": 1.9292822518873714e-05, + "loss": 1.2365, + "step": 2342 + }, + { + "epoch": 0.14690576211674714, + "grad_norm": 3.008408308029175, + "learning_rate": 1.9292072222960852e-05, + "loss": 1.2029, + "step": 2343 + }, + { + "epoch": 0.14696846197253746, + "grad_norm": 2.6335642337799072, + "learning_rate": 1.929132154384253e-05, + "loss": 1.1971, + "step": 2344 + }, + { + "epoch": 0.1470311618283278, + "grad_norm": 2.602595567703247, + "learning_rate": 1.92905704815497e-05, + "loss": 1.2164, + "step": 2345 + }, + { + "epoch": 0.14709386168411812, + "grad_norm": 3.063446044921875, + "learning_rate": 1.9289819036113333e-05, + "loss": 1.1389, + "step": 2346 + }, + { + "epoch": 0.14715656153990847, + "grad_norm": 3.088221311569214, + "learning_rate": 1.9289067207564425e-05, + "loss": 1.0509, + "step": 2347 + }, + { + "epoch": 0.1472192613956988, + "grad_norm": 2.7518482208251953, + "learning_rate": 1.9288314995933985e-05, + "loss": 1.1463, + "step": 2348 + }, + { + "epoch": 0.14728196125148912, + "grad_norm": 2.862886428833008, + "learning_rate": 1.9287562401253023e-05, + "loss": 1.3559, + "step": 2349 + }, + { + "epoch": 0.14734466110727945, + "grad_norm": 2.7971503734588623, + "learning_rate": 1.928680942355259e-05, + "loss": 1.1787, + "step": 2350 + }, + { + "epoch": 0.14740736096306978, + "grad_norm": 2.687337875366211, + "learning_rate": 1.928605606286372e-05, + "loss": 1.1977, + "step": 2351 + }, + { + "epoch": 0.1474700608188601, + "grad_norm": 2.82012677192688, + "learning_rate": 1.92853023192175e-05, + "loss": 1.2403, + "step": 2352 + }, + { + "epoch": 0.14753276067465046, + "grad_norm": 2.96662974357605, + "learning_rate": 1.9284548192645006e-05, + "loss": 1.2433, + "step": 2353 + }, + { + "epoch": 0.1475954605304408, + "grad_norm": 2.6610591411590576, + "learning_rate": 1.9283793683177335e-05, + "loss": 1.2368, + "step": 2354 + }, + { + "epoch": 0.1476581603862311, + "grad_norm": 2.542185068130493, + "learning_rate": 1.9283038790845612e-05, + "loss": 1.1844, + "step": 2355 + }, + { + "epoch": 0.14772086024202144, + "grad_norm": 2.9807698726654053, + "learning_rate": 1.928228351568096e-05, + "loss": 1.0086, + "step": 2356 + }, + { + "epoch": 0.14778356009781177, + "grad_norm": 2.658735752105713, + "learning_rate": 1.9281527857714533e-05, + "loss": 1.2037, + "step": 2357 + }, + { + "epoch": 0.1478462599536021, + "grad_norm": 3.073124408721924, + "learning_rate": 1.928077181697749e-05, + "loss": 1.2917, + "step": 2358 + }, + { + "epoch": 0.14790895980939245, + "grad_norm": 2.9323620796203613, + "learning_rate": 1.9280015393501014e-05, + "loss": 1.1529, + "step": 2359 + }, + { + "epoch": 0.14797165966518278, + "grad_norm": 3.2224810123443604, + "learning_rate": 1.9279258587316297e-05, + "loss": 1.0682, + "step": 2360 + }, + { + "epoch": 0.1480343595209731, + "grad_norm": 2.899503469467163, + "learning_rate": 1.9278501398454552e-05, + "loss": 1.1557, + "step": 2361 + }, + { + "epoch": 0.14809705937676343, + "grad_norm": 3.315298557281494, + "learning_rate": 1.9277743826947002e-05, + "loss": 1.2523, + "step": 2362 + }, + { + "epoch": 0.14815975923255376, + "grad_norm": 3.0521459579467773, + "learning_rate": 1.927698587282489e-05, + "loss": 1.1507, + "step": 2363 + }, + { + "epoch": 0.1482224590883441, + "grad_norm": 3.5609889030456543, + "learning_rate": 1.927622753611948e-05, + "loss": 1.2908, + "step": 2364 + }, + { + "epoch": 0.14828515894413444, + "grad_norm": 2.7996838092803955, + "learning_rate": 1.9275468816862038e-05, + "loss": 1.2116, + "step": 2365 + }, + { + "epoch": 0.14834785879992476, + "grad_norm": 2.838366746902466, + "learning_rate": 1.927470971508386e-05, + "loss": 1.005, + "step": 2366 + }, + { + "epoch": 0.1484105586557151, + "grad_norm": 2.6365466117858887, + "learning_rate": 1.9273950230816246e-05, + "loss": 1.4043, + "step": 2367 + }, + { + "epoch": 0.14847325851150542, + "grad_norm": 2.9667863845825195, + "learning_rate": 1.927319036409052e-05, + "loss": 1.175, + "step": 2368 + }, + { + "epoch": 0.14853595836729575, + "grad_norm": 2.8865323066711426, + "learning_rate": 1.9272430114938018e-05, + "loss": 1.1062, + "step": 2369 + }, + { + "epoch": 0.1485986582230861, + "grad_norm": 2.831176996231079, + "learning_rate": 1.927166948339009e-05, + "loss": 1.2073, + "step": 2370 + }, + { + "epoch": 0.14866135807887643, + "grad_norm": 2.704817295074463, + "learning_rate": 1.9270908469478115e-05, + "loss": 1.1753, + "step": 2371 + }, + { + "epoch": 0.14872405793466675, + "grad_norm": 2.8885464668273926, + "learning_rate": 1.927014707323346e-05, + "loss": 1.3353, + "step": 2372 + }, + { + "epoch": 0.14878675779045708, + "grad_norm": 2.7233963012695312, + "learning_rate": 1.9269385294687544e-05, + "loss": 1.2008, + "step": 2373 + }, + { + "epoch": 0.1488494576462474, + "grad_norm": 3.2420971393585205, + "learning_rate": 1.9268623133871772e-05, + "loss": 1.0836, + "step": 2374 + }, + { + "epoch": 0.14891215750203773, + "grad_norm": 2.776151180267334, + "learning_rate": 1.9267860590817572e-05, + "loss": 1.1916, + "step": 2375 + }, + { + "epoch": 0.1489748573578281, + "grad_norm": 2.5789637565612793, + "learning_rate": 1.92670976655564e-05, + "loss": 1.1769, + "step": 2376 + }, + { + "epoch": 0.14903755721361842, + "grad_norm": 2.722646474838257, + "learning_rate": 1.9266334358119718e-05, + "loss": 1.1739, + "step": 2377 + }, + { + "epoch": 0.14910025706940874, + "grad_norm": 2.900888681411743, + "learning_rate": 1.9265570668538997e-05, + "loss": 1.2891, + "step": 2378 + }, + { + "epoch": 0.14916295692519907, + "grad_norm": 2.6166012287139893, + "learning_rate": 1.9264806596845742e-05, + "loss": 1.302, + "step": 2379 + }, + { + "epoch": 0.1492256567809894, + "grad_norm": 2.8788695335388184, + "learning_rate": 1.9264042143071456e-05, + "loss": 1.2737, + "step": 2380 + }, + { + "epoch": 0.14928835663677972, + "grad_norm": 3.004357099533081, + "learning_rate": 1.926327730724767e-05, + "loss": 1.1237, + "step": 2381 + }, + { + "epoch": 0.14935105649257008, + "grad_norm": 2.823988676071167, + "learning_rate": 1.9262512089405917e-05, + "loss": 1.3206, + "step": 2382 + }, + { + "epoch": 0.1494137563483604, + "grad_norm": 2.950429677963257, + "learning_rate": 1.9261746489577767e-05, + "loss": 1.3161, + "step": 2383 + }, + { + "epoch": 0.14947645620415073, + "grad_norm": 2.9542412757873535, + "learning_rate": 1.9260980507794788e-05, + "loss": 1.0751, + "step": 2384 + }, + { + "epoch": 0.14953915605994106, + "grad_norm": 3.335322618484497, + "learning_rate": 1.9260214144088562e-05, + "loss": 1.142, + "step": 2385 + }, + { + "epoch": 0.14960185591573139, + "grad_norm": 2.8433685302734375, + "learning_rate": 1.9259447398490705e-05, + "loss": 1.3198, + "step": 2386 + }, + { + "epoch": 0.1496645557715217, + "grad_norm": 2.9299511909484863, + "learning_rate": 1.925868027103283e-05, + "loss": 1.2921, + "step": 2387 + }, + { + "epoch": 0.14972725562731207, + "grad_norm": 2.9076480865478516, + "learning_rate": 1.925791276174658e-05, + "loss": 1.2004, + "step": 2388 + }, + { + "epoch": 0.1497899554831024, + "grad_norm": 2.906336545944214, + "learning_rate": 1.92571448706636e-05, + "loss": 1.1513, + "step": 2389 + }, + { + "epoch": 0.14985265533889272, + "grad_norm": 2.8393497467041016, + "learning_rate": 1.9256376597815565e-05, + "loss": 1.2516, + "step": 2390 + }, + { + "epoch": 0.14991535519468305, + "grad_norm": 3.1787047386169434, + "learning_rate": 1.925560794323415e-05, + "loss": 1.085, + "step": 2391 + }, + { + "epoch": 0.14997805505047337, + "grad_norm": 2.770097017288208, + "learning_rate": 1.9254838906951067e-05, + "loss": 1.2091, + "step": 2392 + }, + { + "epoch": 0.15004075490626373, + "grad_norm": 2.8954975605010986, + "learning_rate": 1.9254069488998013e-05, + "loss": 1.2401, + "step": 2393 + }, + { + "epoch": 0.15010345476205406, + "grad_norm": 3.0445396900177, + "learning_rate": 1.9253299689406738e-05, + "loss": 1.2617, + "step": 2394 + }, + { + "epoch": 0.15016615461784438, + "grad_norm": 3.296513080596924, + "learning_rate": 1.9252529508208976e-05, + "loss": 1.0804, + "step": 2395 + }, + { + "epoch": 0.1502288544736347, + "grad_norm": 2.8445470333099365, + "learning_rate": 1.9251758945436494e-05, + "loss": 1.2457, + "step": 2396 + }, + { + "epoch": 0.15029155432942504, + "grad_norm": 2.687607526779175, + "learning_rate": 1.9250988001121068e-05, + "loss": 1.3357, + "step": 2397 + }, + { + "epoch": 0.15035425418521536, + "grad_norm": 3.1604886054992676, + "learning_rate": 1.9250216675294494e-05, + "loss": 1.2231, + "step": 2398 + }, + { + "epoch": 0.15041695404100572, + "grad_norm": 2.6118836402893066, + "learning_rate": 1.9249444967988577e-05, + "loss": 1.218, + "step": 2399 + }, + { + "epoch": 0.15047965389679605, + "grad_norm": 2.952923536300659, + "learning_rate": 1.924867287923515e-05, + "loss": 1.2892, + "step": 2400 + }, + { + "epoch": 0.15054235375258637, + "grad_norm": 2.5081796646118164, + "learning_rate": 1.9247900409066047e-05, + "loss": 1.1642, + "step": 2401 + }, + { + "epoch": 0.1506050536083767, + "grad_norm": 3.10925030708313, + "learning_rate": 1.9247127557513126e-05, + "loss": 1.2888, + "step": 2402 + }, + { + "epoch": 0.15066775346416703, + "grad_norm": 2.807929277420044, + "learning_rate": 1.9246354324608264e-05, + "loss": 1.1596, + "step": 2403 + }, + { + "epoch": 0.15073045331995735, + "grad_norm": 2.8940062522888184, + "learning_rate": 1.9245580710383344e-05, + "loss": 1.2643, + "step": 2404 + }, + { + "epoch": 0.1507931531757477, + "grad_norm": 2.797881841659546, + "learning_rate": 1.9244806714870273e-05, + "loss": 1.384, + "step": 2405 + }, + { + "epoch": 0.15085585303153803, + "grad_norm": 2.7540318965911865, + "learning_rate": 1.9244032338100968e-05, + "loss": 1.2758, + "step": 2406 + }, + { + "epoch": 0.15091855288732836, + "grad_norm": 3.347339153289795, + "learning_rate": 1.9243257580107365e-05, + "loss": 1.0985, + "step": 2407 + }, + { + "epoch": 0.1509812527431187, + "grad_norm": 2.8310749530792236, + "learning_rate": 1.9242482440921417e-05, + "loss": 1.196, + "step": 2408 + }, + { + "epoch": 0.15104395259890901, + "grad_norm": 2.7666971683502197, + "learning_rate": 1.924170692057509e-05, + "loss": 1.1926, + "step": 2409 + }, + { + "epoch": 0.15110665245469934, + "grad_norm": 2.7232189178466797, + "learning_rate": 1.9240931019100365e-05, + "loss": 1.2661, + "step": 2410 + }, + { + "epoch": 0.1511693523104897, + "grad_norm": 2.9780733585357666, + "learning_rate": 1.9240154736529242e-05, + "loss": 1.2424, + "step": 2411 + }, + { + "epoch": 0.15123205216628002, + "grad_norm": 3.044715642929077, + "learning_rate": 1.9239378072893735e-05, + "loss": 1.0077, + "step": 2412 + }, + { + "epoch": 0.15129475202207035, + "grad_norm": 2.788191556930542, + "learning_rate": 1.923860102822587e-05, + "loss": 1.3007, + "step": 2413 + }, + { + "epoch": 0.15135745187786068, + "grad_norm": 2.6820833683013916, + "learning_rate": 1.92378236025577e-05, + "loss": 1.2005, + "step": 2414 + }, + { + "epoch": 0.151420151733651, + "grad_norm": 2.763310670852661, + "learning_rate": 1.9237045795921277e-05, + "loss": 1.3478, + "step": 2415 + }, + { + "epoch": 0.15148285158944133, + "grad_norm": 2.7536139488220215, + "learning_rate": 1.9236267608348682e-05, + "loss": 1.0996, + "step": 2416 + }, + { + "epoch": 0.15154555144523169, + "grad_norm": 2.8829870223999023, + "learning_rate": 1.923548903987201e-05, + "loss": 1.1784, + "step": 2417 + }, + { + "epoch": 0.151608251301022, + "grad_norm": 2.7234764099121094, + "learning_rate": 1.9234710090523365e-05, + "loss": 1.1364, + "step": 2418 + }, + { + "epoch": 0.15167095115681234, + "grad_norm": 2.746642589569092, + "learning_rate": 1.923393076033487e-05, + "loss": 1.0242, + "step": 2419 + }, + { + "epoch": 0.15173365101260267, + "grad_norm": 2.888486623764038, + "learning_rate": 1.9233151049338673e-05, + "loss": 1.2322, + "step": 2420 + }, + { + "epoch": 0.151796350868393, + "grad_norm": 2.870211362838745, + "learning_rate": 1.923237095756692e-05, + "loss": 1.3332, + "step": 2421 + }, + { + "epoch": 0.15185905072418335, + "grad_norm": 2.985626697540283, + "learning_rate": 1.9231590485051788e-05, + "loss": 1.1875, + "step": 2422 + }, + { + "epoch": 0.15192175057997367, + "grad_norm": 3.263702630996704, + "learning_rate": 1.9230809631825457e-05, + "loss": 1.2321, + "step": 2423 + }, + { + "epoch": 0.151984450435764, + "grad_norm": 3.141171455383301, + "learning_rate": 1.9230028397920138e-05, + "loss": 1.2004, + "step": 2424 + }, + { + "epoch": 0.15204715029155433, + "grad_norm": 2.7623178958892822, + "learning_rate": 1.922924678336804e-05, + "loss": 1.1634, + "step": 2425 + }, + { + "epoch": 0.15210985014734466, + "grad_norm": 3.3462882041931152, + "learning_rate": 1.9228464788201405e-05, + "loss": 1.2562, + "step": 2426 + }, + { + "epoch": 0.15217255000313498, + "grad_norm": 2.8475470542907715, + "learning_rate": 1.9227682412452478e-05, + "loss": 1.155, + "step": 2427 + }, + { + "epoch": 0.15223524985892534, + "grad_norm": 3.0941176414489746, + "learning_rate": 1.9226899656153528e-05, + "loss": 1.0816, + "step": 2428 + }, + { + "epoch": 0.15229794971471566, + "grad_norm": 2.9883370399475098, + "learning_rate": 1.922611651933683e-05, + "loss": 1.1322, + "step": 2429 + }, + { + "epoch": 0.152360649570506, + "grad_norm": 2.7294607162475586, + "learning_rate": 1.9225333002034686e-05, + "loss": 1.295, + "step": 2430 + }, + { + "epoch": 0.15242334942629632, + "grad_norm": 2.7831242084503174, + "learning_rate": 1.9224549104279405e-05, + "loss": 1.1991, + "step": 2431 + }, + { + "epoch": 0.15248604928208664, + "grad_norm": 2.802464485168457, + "learning_rate": 1.9223764826103317e-05, + "loss": 1.1307, + "step": 2432 + }, + { + "epoch": 0.15254874913787697, + "grad_norm": 3.074810743331909, + "learning_rate": 1.9222980167538764e-05, + "loss": 1.0296, + "step": 2433 + }, + { + "epoch": 0.15261144899366733, + "grad_norm": 3.23614501953125, + "learning_rate": 1.9222195128618108e-05, + "loss": 0.9946, + "step": 2434 + }, + { + "epoch": 0.15267414884945765, + "grad_norm": 3.4702117443084717, + "learning_rate": 1.922140970937372e-05, + "loss": 1.0941, + "step": 2435 + }, + { + "epoch": 0.15273684870524798, + "grad_norm": 2.8639402389526367, + "learning_rate": 1.922062390983799e-05, + "loss": 1.127, + "step": 2436 + }, + { + "epoch": 0.1527995485610383, + "grad_norm": 3.153754234313965, + "learning_rate": 1.921983773004333e-05, + "loss": 1.1146, + "step": 2437 + }, + { + "epoch": 0.15286224841682863, + "grad_norm": 3.0183358192443848, + "learning_rate": 1.9219051170022162e-05, + "loss": 1.2501, + "step": 2438 + }, + { + "epoch": 0.15292494827261896, + "grad_norm": 2.9327054023742676, + "learning_rate": 1.9218264229806917e-05, + "loss": 1.0461, + "step": 2439 + }, + { + "epoch": 0.15298764812840931, + "grad_norm": 2.6985039710998535, + "learning_rate": 1.9217476909430054e-05, + "loss": 1.3347, + "step": 2440 + }, + { + "epoch": 0.15305034798419964, + "grad_norm": 3.024998664855957, + "learning_rate": 1.9216689208924043e-05, + "loss": 1.1521, + "step": 2441 + }, + { + "epoch": 0.15311304783998997, + "grad_norm": 2.8399014472961426, + "learning_rate": 1.9215901128321364e-05, + "loss": 1.1734, + "step": 2442 + }, + { + "epoch": 0.1531757476957803, + "grad_norm": 3.007298231124878, + "learning_rate": 1.921511266765452e-05, + "loss": 1.2422, + "step": 2443 + }, + { + "epoch": 0.15323844755157062, + "grad_norm": 3.048468828201294, + "learning_rate": 1.9214323826956026e-05, + "loss": 1.2914, + "step": 2444 + }, + { + "epoch": 0.15330114740736095, + "grad_norm": 2.817063808441162, + "learning_rate": 1.921353460625842e-05, + "loss": 1.2421, + "step": 2445 + }, + { + "epoch": 0.1533638472631513, + "grad_norm": 2.8208730220794678, + "learning_rate": 1.9212745005594242e-05, + "loss": 1.1838, + "step": 2446 + }, + { + "epoch": 0.15342654711894163, + "grad_norm": 2.936661958694458, + "learning_rate": 1.9211955024996056e-05, + "loss": 1.2243, + "step": 2447 + }, + { + "epoch": 0.15348924697473196, + "grad_norm": 2.777331590652466, + "learning_rate": 1.9211164664496444e-05, + "loss": 1.2612, + "step": 2448 + }, + { + "epoch": 0.15355194683052228, + "grad_norm": 2.644570827484131, + "learning_rate": 1.9210373924128e-05, + "loss": 1.3419, + "step": 2449 + }, + { + "epoch": 0.1536146466863126, + "grad_norm": 2.7693896293640137, + "learning_rate": 1.920958280392333e-05, + "loss": 1.1789, + "step": 2450 + }, + { + "epoch": 0.15367734654210297, + "grad_norm": 2.60111927986145, + "learning_rate": 1.9208791303915063e-05, + "loss": 1.3232, + "step": 2451 + }, + { + "epoch": 0.1537400463978933, + "grad_norm": 2.4274322986602783, + "learning_rate": 1.9207999424135845e-05, + "loss": 1.2768, + "step": 2452 + }, + { + "epoch": 0.15380274625368362, + "grad_norm": 2.7315926551818848, + "learning_rate": 1.9207207164618323e-05, + "loss": 1.2702, + "step": 2453 + }, + { + "epoch": 0.15386544610947395, + "grad_norm": 2.6485791206359863, + "learning_rate": 1.920641452539518e-05, + "loss": 1.3728, + "step": 2454 + }, + { + "epoch": 0.15392814596526427, + "grad_norm": 2.746910810470581, + "learning_rate": 1.9205621506499097e-05, + "loss": 1.1765, + "step": 2455 + }, + { + "epoch": 0.1539908458210546, + "grad_norm": 2.463325262069702, + "learning_rate": 1.920482810796278e-05, + "loss": 1.119, + "step": 2456 + }, + { + "epoch": 0.15405354567684496, + "grad_norm": 2.8843774795532227, + "learning_rate": 1.9204034329818954e-05, + "loss": 1.1958, + "step": 2457 + }, + { + "epoch": 0.15411624553263528, + "grad_norm": 3.1730377674102783, + "learning_rate": 1.920324017210035e-05, + "loss": 1.1783, + "step": 2458 + }, + { + "epoch": 0.1541789453884256, + "grad_norm": 2.8808774948120117, + "learning_rate": 1.9202445634839716e-05, + "loss": 1.0884, + "step": 2459 + }, + { + "epoch": 0.15424164524421594, + "grad_norm": 2.905910015106201, + "learning_rate": 1.9201650718069826e-05, + "loss": 1.2857, + "step": 2460 + }, + { + "epoch": 0.15430434510000626, + "grad_norm": 2.8113865852355957, + "learning_rate": 1.9200855421823456e-05, + "loss": 1.2783, + "step": 2461 + }, + { + "epoch": 0.1543670449557966, + "grad_norm": 2.945671796798706, + "learning_rate": 1.9200059746133407e-05, + "loss": 1.3347, + "step": 2462 + }, + { + "epoch": 0.15442974481158694, + "grad_norm": 2.8884873390197754, + "learning_rate": 1.9199263691032494e-05, + "loss": 1.2117, + "step": 2463 + }, + { + "epoch": 0.15449244466737727, + "grad_norm": 3.0177736282348633, + "learning_rate": 1.9198467256553543e-05, + "loss": 0.9674, + "step": 2464 + }, + { + "epoch": 0.1545551445231676, + "grad_norm": 2.951321601867676, + "learning_rate": 1.91976704427294e-05, + "loss": 1.0835, + "step": 2465 + }, + { + "epoch": 0.15461784437895792, + "grad_norm": 2.954315662384033, + "learning_rate": 1.9196873249592927e-05, + "loss": 0.9641, + "step": 2466 + }, + { + "epoch": 0.15468054423474825, + "grad_norm": 2.9745123386383057, + "learning_rate": 1.9196075677177e-05, + "loss": 1.2894, + "step": 2467 + }, + { + "epoch": 0.15474324409053858, + "grad_norm": 2.7328686714172363, + "learning_rate": 1.919527772551451e-05, + "loss": 1.2588, + "step": 2468 + }, + { + "epoch": 0.15480594394632893, + "grad_norm": 2.7804081439971924, + "learning_rate": 1.9194479394638362e-05, + "loss": 1.1209, + "step": 2469 + }, + { + "epoch": 0.15486864380211926, + "grad_norm": 2.7189693450927734, + "learning_rate": 1.9193680684581485e-05, + "loss": 1.2093, + "step": 2470 + }, + { + "epoch": 0.1549313436579096, + "grad_norm": 2.848973512649536, + "learning_rate": 1.9192881595376818e-05, + "loss": 1.3179, + "step": 2471 + }, + { + "epoch": 0.15499404351369991, + "grad_norm": 2.923966646194458, + "learning_rate": 1.9192082127057307e-05, + "loss": 1.2159, + "step": 2472 + }, + { + "epoch": 0.15505674336949024, + "grad_norm": 3.288614273071289, + "learning_rate": 1.9191282279655935e-05, + "loss": 1.1191, + "step": 2473 + }, + { + "epoch": 0.1551194432252806, + "grad_norm": 3.1646788120269775, + "learning_rate": 1.9190482053205673e-05, + "loss": 1.1163, + "step": 2474 + }, + { + "epoch": 0.15518214308107092, + "grad_norm": 2.9343626499176025, + "learning_rate": 1.918968144773953e-05, + "loss": 1.0494, + "step": 2475 + }, + { + "epoch": 0.15524484293686125, + "grad_norm": 3.142833948135376, + "learning_rate": 1.9188880463290526e-05, + "loss": 1.2542, + "step": 2476 + }, + { + "epoch": 0.15530754279265158, + "grad_norm": 3.041891098022461, + "learning_rate": 1.9188079099891693e-05, + "loss": 1.3642, + "step": 2477 + }, + { + "epoch": 0.1553702426484419, + "grad_norm": 2.6680469512939453, + "learning_rate": 1.9187277357576072e-05, + "loss": 1.1815, + "step": 2478 + }, + { + "epoch": 0.15543294250423223, + "grad_norm": 2.8225786685943604, + "learning_rate": 1.9186475236376733e-05, + "loss": 1.2529, + "step": 2479 + }, + { + "epoch": 0.15549564236002258, + "grad_norm": 2.5814998149871826, + "learning_rate": 1.9185672736326756e-05, + "loss": 1.17, + "step": 2480 + }, + { + "epoch": 0.1555583422158129, + "grad_norm": 2.951326370239258, + "learning_rate": 1.9184869857459233e-05, + "loss": 0.9906, + "step": 2481 + }, + { + "epoch": 0.15562104207160324, + "grad_norm": 2.516123056411743, + "learning_rate": 1.9184066599807274e-05, + "loss": 1.2225, + "step": 2482 + }, + { + "epoch": 0.15568374192739357, + "grad_norm": 2.943465232849121, + "learning_rate": 1.9183262963404008e-05, + "loss": 1.0842, + "step": 2483 + }, + { + "epoch": 0.1557464417831839, + "grad_norm": 2.8368794918060303, + "learning_rate": 1.9182458948282576e-05, + "loss": 1.2641, + "step": 2484 + }, + { + "epoch": 0.15580914163897422, + "grad_norm": 2.600468635559082, + "learning_rate": 1.918165455447614e-05, + "loss": 1.2755, + "step": 2485 + }, + { + "epoch": 0.15587184149476457, + "grad_norm": 2.9064669609069824, + "learning_rate": 1.9180849782017865e-05, + "loss": 1.2554, + "step": 2486 + }, + { + "epoch": 0.1559345413505549, + "grad_norm": 2.8801844120025635, + "learning_rate": 1.9180044630940947e-05, + "loss": 1.177, + "step": 2487 + }, + { + "epoch": 0.15599724120634523, + "grad_norm": 2.970581531524658, + "learning_rate": 1.9179239101278585e-05, + "loss": 1.1543, + "step": 2488 + }, + { + "epoch": 0.15605994106213555, + "grad_norm": 2.854703426361084, + "learning_rate": 1.9178433193064002e-05, + "loss": 1.2561, + "step": 2489 + }, + { + "epoch": 0.15612264091792588, + "grad_norm": 2.8486204147338867, + "learning_rate": 1.9177626906330436e-05, + "loss": 1.2238, + "step": 2490 + }, + { + "epoch": 0.1561853407737162, + "grad_norm": 3.0483670234680176, + "learning_rate": 1.917682024111113e-05, + "loss": 1.243, + "step": 2491 + }, + { + "epoch": 0.15624804062950656, + "grad_norm": 2.904285430908203, + "learning_rate": 1.917601319743936e-05, + "loss": 1.2178, + "step": 2492 + }, + { + "epoch": 0.1563107404852969, + "grad_norm": 2.841224193572998, + "learning_rate": 1.9175205775348406e-05, + "loss": 1.21, + "step": 2493 + }, + { + "epoch": 0.15637344034108722, + "grad_norm": 3.0699880123138428, + "learning_rate": 1.9174397974871563e-05, + "loss": 1.1863, + "step": 2494 + }, + { + "epoch": 0.15643614019687754, + "grad_norm": 3.145326614379883, + "learning_rate": 1.917358979604215e-05, + "loss": 1.187, + "step": 2495 + }, + { + "epoch": 0.15649884005266787, + "grad_norm": 2.6771674156188965, + "learning_rate": 1.917278123889349e-05, + "loss": 1.0989, + "step": 2496 + }, + { + "epoch": 0.1565615399084582, + "grad_norm": 2.846752882003784, + "learning_rate": 1.9171972303458934e-05, + "loss": 1.1976, + "step": 2497 + }, + { + "epoch": 0.15662423976424855, + "grad_norm": 2.7809500694274902, + "learning_rate": 1.9171162989771836e-05, + "loss": 1.379, + "step": 2498 + }, + { + "epoch": 0.15668693962003888, + "grad_norm": 2.6564180850982666, + "learning_rate": 1.9170353297865577e-05, + "loss": 1.2554, + "step": 2499 + }, + { + "epoch": 0.1567496394758292, + "grad_norm": 2.7142200469970703, + "learning_rate": 1.9169543227773547e-05, + "loss": 1.0521, + "step": 2500 + }, + { + "epoch": 0.15681233933161953, + "grad_norm": 2.9882099628448486, + "learning_rate": 1.9168732779529157e-05, + "loss": 1.323, + "step": 2501 + }, + { + "epoch": 0.15687503918740986, + "grad_norm": 2.958988666534424, + "learning_rate": 1.9167921953165827e-05, + "loss": 1.1114, + "step": 2502 + }, + { + "epoch": 0.15693773904320021, + "grad_norm": 2.928309917449951, + "learning_rate": 1.916711074871699e-05, + "loss": 1.1421, + "step": 2503 + }, + { + "epoch": 0.15700043889899054, + "grad_norm": 2.6479740142822266, + "learning_rate": 1.916629916621611e-05, + "loss": 1.1794, + "step": 2504 + }, + { + "epoch": 0.15706313875478087, + "grad_norm": 3.0905814170837402, + "learning_rate": 1.916548720569665e-05, + "loss": 1.2731, + "step": 2505 + }, + { + "epoch": 0.1571258386105712, + "grad_norm": 3.267104387283325, + "learning_rate": 1.91646748671921e-05, + "loss": 1.1181, + "step": 2506 + }, + { + "epoch": 0.15718853846636152, + "grad_norm": 2.709012985229492, + "learning_rate": 1.9163862150735958e-05, + "loss": 1.115, + "step": 2507 + }, + { + "epoch": 0.15725123832215185, + "grad_norm": 2.7813303470611572, + "learning_rate": 1.9163049056361735e-05, + "loss": 1.139, + "step": 2508 + }, + { + "epoch": 0.1573139381779422, + "grad_norm": 2.822059392929077, + "learning_rate": 1.9162235584102973e-05, + "loss": 1.2448, + "step": 2509 + }, + { + "epoch": 0.15737663803373253, + "grad_norm": 2.7050797939300537, + "learning_rate": 1.9161421733993216e-05, + "loss": 1.2793, + "step": 2510 + }, + { + "epoch": 0.15743933788952286, + "grad_norm": 3.1722958087921143, + "learning_rate": 1.9160607506066028e-05, + "loss": 1.1748, + "step": 2511 + }, + { + "epoch": 0.15750203774531318, + "grad_norm": 2.683439254760742, + "learning_rate": 1.915979290035498e-05, + "loss": 1.2935, + "step": 2512 + }, + { + "epoch": 0.1575647376011035, + "grad_norm": 2.8791120052337646, + "learning_rate": 1.9158977916893678e-05, + "loss": 1.1149, + "step": 2513 + }, + { + "epoch": 0.15762743745689384, + "grad_norm": 2.7614927291870117, + "learning_rate": 1.9158162555715726e-05, + "loss": 1.0299, + "step": 2514 + }, + { + "epoch": 0.1576901373126842, + "grad_norm": 2.88627290725708, + "learning_rate": 1.915734681685475e-05, + "loss": 1.2017, + "step": 2515 + }, + { + "epoch": 0.15775283716847452, + "grad_norm": 2.8285748958587646, + "learning_rate": 1.9156530700344392e-05, + "loss": 1.0358, + "step": 2516 + }, + { + "epoch": 0.15781553702426485, + "grad_norm": 2.982661008834839, + "learning_rate": 1.9155714206218308e-05, + "loss": 1.1579, + "step": 2517 + }, + { + "epoch": 0.15787823688005517, + "grad_norm": 3.2040815353393555, + "learning_rate": 1.915489733451017e-05, + "loss": 1.1125, + "step": 2518 + }, + { + "epoch": 0.1579409367358455, + "grad_norm": 2.680677890777588, + "learning_rate": 1.9154080085253665e-05, + "loss": 1.2044, + "step": 2519 + }, + { + "epoch": 0.15800363659163583, + "grad_norm": 2.8071558475494385, + "learning_rate": 1.9153262458482496e-05, + "loss": 1.1725, + "step": 2520 + }, + { + "epoch": 0.15806633644742618, + "grad_norm": 2.956368923187256, + "learning_rate": 1.9152444454230387e-05, + "loss": 1.0859, + "step": 2521 + }, + { + "epoch": 0.1581290363032165, + "grad_norm": 3.126951217651367, + "learning_rate": 1.9151626072531068e-05, + "loss": 1.1188, + "step": 2522 + }, + { + "epoch": 0.15819173615900683, + "grad_norm": 2.624000072479248, + "learning_rate": 1.9150807313418293e-05, + "loss": 1.1199, + "step": 2523 + }, + { + "epoch": 0.15825443601479716, + "grad_norm": 3.0121145248413086, + "learning_rate": 1.914998817692582e-05, + "loss": 1.3541, + "step": 2524 + }, + { + "epoch": 0.1583171358705875, + "grad_norm": 2.8445546627044678, + "learning_rate": 1.9149168663087436e-05, + "loss": 1.2849, + "step": 2525 + }, + { + "epoch": 0.15837983572637782, + "grad_norm": 3.0838685035705566, + "learning_rate": 1.9148348771936936e-05, + "loss": 1.0778, + "step": 2526 + }, + { + "epoch": 0.15844253558216817, + "grad_norm": 2.775376796722412, + "learning_rate": 1.9147528503508137e-05, + "loss": 1.276, + "step": 2527 + }, + { + "epoch": 0.1585052354379585, + "grad_norm": 2.847609758377075, + "learning_rate": 1.914670785783486e-05, + "loss": 1.2177, + "step": 2528 + }, + { + "epoch": 0.15856793529374882, + "grad_norm": 3.09761643409729, + "learning_rate": 1.914588683495095e-05, + "loss": 1.1843, + "step": 2529 + }, + { + "epoch": 0.15863063514953915, + "grad_norm": 3.2425878047943115, + "learning_rate": 1.914506543489027e-05, + "loss": 1.351, + "step": 2530 + }, + { + "epoch": 0.15869333500532948, + "grad_norm": 2.887422561645508, + "learning_rate": 1.9144243657686694e-05, + "loss": 1.2486, + "step": 2531 + }, + { + "epoch": 0.15875603486111983, + "grad_norm": 2.8209331035614014, + "learning_rate": 1.9143421503374107e-05, + "loss": 0.9562, + "step": 2532 + }, + { + "epoch": 0.15881873471691016, + "grad_norm": 2.8894243240356445, + "learning_rate": 1.914259897198642e-05, + "loss": 1.1563, + "step": 2533 + }, + { + "epoch": 0.1588814345727005, + "grad_norm": 2.670954942703247, + "learning_rate": 1.9141776063557552e-05, + "loss": 1.1923, + "step": 2534 + }, + { + "epoch": 0.1589441344284908, + "grad_norm": 3.0448718070983887, + "learning_rate": 1.9140952778121437e-05, + "loss": 1.198, + "step": 2535 + }, + { + "epoch": 0.15900683428428114, + "grad_norm": 2.8827264308929443, + "learning_rate": 1.9140129115712035e-05, + "loss": 1.2176, + "step": 2536 + }, + { + "epoch": 0.15906953414007147, + "grad_norm": 2.8670051097869873, + "learning_rate": 1.9139305076363305e-05, + "loss": 1.3353, + "step": 2537 + }, + { + "epoch": 0.15913223399586182, + "grad_norm": 2.840832233428955, + "learning_rate": 1.9138480660109237e-05, + "loss": 1.117, + "step": 2538 + }, + { + "epoch": 0.15919493385165215, + "grad_norm": 2.763810634613037, + "learning_rate": 1.9137655866983827e-05, + "loss": 1.2387, + "step": 2539 + }, + { + "epoch": 0.15925763370744248, + "grad_norm": 2.9445817470550537, + "learning_rate": 1.913683069702109e-05, + "loss": 1.2971, + "step": 2540 + }, + { + "epoch": 0.1593203335632328, + "grad_norm": 3.1675634384155273, + "learning_rate": 1.9136005150255054e-05, + "loss": 1.021, + "step": 2541 + }, + { + "epoch": 0.15938303341902313, + "grad_norm": 3.1016902923583984, + "learning_rate": 1.9135179226719768e-05, + "loss": 1.2913, + "step": 2542 + }, + { + "epoch": 0.15944573327481346, + "grad_norm": 2.876302480697632, + "learning_rate": 1.9134352926449292e-05, + "loss": 1.0996, + "step": 2543 + }, + { + "epoch": 0.1595084331306038, + "grad_norm": 2.6187286376953125, + "learning_rate": 1.91335262494777e-05, + "loss": 1.1183, + "step": 2544 + }, + { + "epoch": 0.15957113298639414, + "grad_norm": 2.6435070037841797, + "learning_rate": 1.913269919583909e-05, + "loss": 1.2626, + "step": 2545 + }, + { + "epoch": 0.15963383284218446, + "grad_norm": 3.338867664337158, + "learning_rate": 1.9131871765567568e-05, + "loss": 1.1709, + "step": 2546 + }, + { + "epoch": 0.1596965326979748, + "grad_norm": 3.204352855682373, + "learning_rate": 1.913104395869725e-05, + "loss": 1.2116, + "step": 2547 + }, + { + "epoch": 0.15975923255376512, + "grad_norm": 3.196790933609009, + "learning_rate": 1.9130215775262283e-05, + "loss": 1.1915, + "step": 2548 + }, + { + "epoch": 0.15982193240955544, + "grad_norm": 2.8257291316986084, + "learning_rate": 1.912938721529682e-05, + "loss": 1.1875, + "step": 2549 + }, + { + "epoch": 0.1598846322653458, + "grad_norm": 2.879462718963623, + "learning_rate": 1.9128558278835027e-05, + "loss": 1.2776, + "step": 2550 + }, + { + "epoch": 0.15994733212113613, + "grad_norm": 2.5934836864471436, + "learning_rate": 1.9127728965911094e-05, + "loss": 1.0954, + "step": 2551 + }, + { + "epoch": 0.16001003197692645, + "grad_norm": 2.7322728633880615, + "learning_rate": 1.9126899276559217e-05, + "loss": 1.2839, + "step": 2552 + }, + { + "epoch": 0.16007273183271678, + "grad_norm": 2.7719509601593018, + "learning_rate": 1.912606921081362e-05, + "loss": 1.3047, + "step": 2553 + }, + { + "epoch": 0.1601354316885071, + "grad_norm": 3.100252628326416, + "learning_rate": 1.9125238768708527e-05, + "loss": 1.0229, + "step": 2554 + }, + { + "epoch": 0.16019813154429746, + "grad_norm": 3.0557971000671387, + "learning_rate": 1.912440795027819e-05, + "loss": 1.2396, + "step": 2555 + }, + { + "epoch": 0.1602608314000878, + "grad_norm": 3.167060613632202, + "learning_rate": 1.912357675555687e-05, + "loss": 1.22, + "step": 2556 + }, + { + "epoch": 0.16032353125587812, + "grad_norm": 2.669327735900879, + "learning_rate": 1.912274518457885e-05, + "loss": 1.0857, + "step": 2557 + }, + { + "epoch": 0.16038623111166844, + "grad_norm": 3.051267623901367, + "learning_rate": 1.9121913237378415e-05, + "loss": 0.9981, + "step": 2558 + }, + { + "epoch": 0.16044893096745877, + "grad_norm": 3.03044056892395, + "learning_rate": 1.912108091398988e-05, + "loss": 1.2239, + "step": 2559 + }, + { + "epoch": 0.1605116308232491, + "grad_norm": 2.7198233604431152, + "learning_rate": 1.912024821444757e-05, + "loss": 1.2458, + "step": 2560 + }, + { + "epoch": 0.16057433067903945, + "grad_norm": 2.6108827590942383, + "learning_rate": 1.9119415138785826e-05, + "loss": 1.2371, + "step": 2561 + }, + { + "epoch": 0.16063703053482978, + "grad_norm": 2.808833360671997, + "learning_rate": 1.9118581687039003e-05, + "loss": 1.2441, + "step": 2562 + }, + { + "epoch": 0.1606997303906201, + "grad_norm": 2.8852412700653076, + "learning_rate": 1.9117747859241477e-05, + "loss": 1.1299, + "step": 2563 + }, + { + "epoch": 0.16076243024641043, + "grad_norm": 2.720945358276367, + "learning_rate": 1.9116913655427625e-05, + "loss": 1.2915, + "step": 2564 + }, + { + "epoch": 0.16082513010220076, + "grad_norm": 3.088677406311035, + "learning_rate": 1.911607907563186e-05, + "loss": 1.1535, + "step": 2565 + }, + { + "epoch": 0.16088782995799109, + "grad_norm": 2.8558242321014404, + "learning_rate": 1.9115244119888593e-05, + "loss": 1.2592, + "step": 2566 + }, + { + "epoch": 0.16095052981378144, + "grad_norm": 2.7740705013275146, + "learning_rate": 1.9114408788232262e-05, + "loss": 1.2259, + "step": 2567 + }, + { + "epoch": 0.16101322966957177, + "grad_norm": 3.0332870483398438, + "learning_rate": 1.9113573080697316e-05, + "loss": 1.1584, + "step": 2568 + }, + { + "epoch": 0.1610759295253621, + "grad_norm": 3.1428873538970947, + "learning_rate": 1.9112736997318214e-05, + "loss": 1.1771, + "step": 2569 + }, + { + "epoch": 0.16113862938115242, + "grad_norm": 2.912884473800659, + "learning_rate": 1.9111900538129443e-05, + "loss": 1.1733, + "step": 2570 + }, + { + "epoch": 0.16120132923694275, + "grad_norm": 2.7225730419158936, + "learning_rate": 1.91110637031655e-05, + "loss": 1.2707, + "step": 2571 + }, + { + "epoch": 0.16126402909273307, + "grad_norm": 3.147698402404785, + "learning_rate": 1.9110226492460886e-05, + "loss": 1.1819, + "step": 2572 + }, + { + "epoch": 0.16132672894852343, + "grad_norm": 3.2596442699432373, + "learning_rate": 1.9109388906050135e-05, + "loss": 1.1531, + "step": 2573 + }, + { + "epoch": 0.16138942880431376, + "grad_norm": 2.9387378692626953, + "learning_rate": 1.9108550943967786e-05, + "loss": 1.274, + "step": 2574 + }, + { + "epoch": 0.16145212866010408, + "grad_norm": 2.9528119564056396, + "learning_rate": 1.9107712606248402e-05, + "loss": 1.1013, + "step": 2575 + }, + { + "epoch": 0.1615148285158944, + "grad_norm": 2.5451831817626953, + "learning_rate": 1.910687389292655e-05, + "loss": 1.3344, + "step": 2576 + }, + { + "epoch": 0.16157752837168474, + "grad_norm": 3.1617836952209473, + "learning_rate": 1.9106034804036822e-05, + "loss": 1.0185, + "step": 2577 + }, + { + "epoch": 0.16164022822747506, + "grad_norm": 2.9634554386138916, + "learning_rate": 1.9105195339613823e-05, + "loss": 0.9931, + "step": 2578 + }, + { + "epoch": 0.16170292808326542, + "grad_norm": 2.5616235733032227, + "learning_rate": 1.9104355499692166e-05, + "loss": 1.2282, + "step": 2579 + }, + { + "epoch": 0.16176562793905574, + "grad_norm": 2.9264819622039795, + "learning_rate": 1.9103515284306492e-05, + "loss": 0.9687, + "step": 2580 + }, + { + "epoch": 0.16182832779484607, + "grad_norm": 3.0016028881073, + "learning_rate": 1.9102674693491452e-05, + "loss": 1.3125, + "step": 2581 + }, + { + "epoch": 0.1618910276506364, + "grad_norm": 2.943364381790161, + "learning_rate": 1.910183372728171e-05, + "loss": 1.2314, + "step": 2582 + }, + { + "epoch": 0.16195372750642673, + "grad_norm": 3.0822768211364746, + "learning_rate": 1.9100992385711947e-05, + "loss": 1.1734, + "step": 2583 + }, + { + "epoch": 0.16201642736221708, + "grad_norm": 3.0605945587158203, + "learning_rate": 1.910015066881686e-05, + "loss": 1.2604, + "step": 2584 + }, + { + "epoch": 0.1620791272180074, + "grad_norm": 2.766188383102417, + "learning_rate": 1.909930857663116e-05, + "loss": 1.2027, + "step": 2585 + }, + { + "epoch": 0.16214182707379773, + "grad_norm": 2.6268324851989746, + "learning_rate": 1.909846610918958e-05, + "loss": 1.2245, + "step": 2586 + }, + { + "epoch": 0.16220452692958806, + "grad_norm": 3.070892572402954, + "learning_rate": 1.909762326652686e-05, + "loss": 1.0637, + "step": 2587 + }, + { + "epoch": 0.1622672267853784, + "grad_norm": 2.8303427696228027, + "learning_rate": 1.909678004867776e-05, + "loss": 1.3702, + "step": 2588 + }, + { + "epoch": 0.16232992664116871, + "grad_norm": 3.1123626232147217, + "learning_rate": 1.909593645567705e-05, + "loss": 1.1683, + "step": 2589 + }, + { + "epoch": 0.16239262649695907, + "grad_norm": 3.0748186111450195, + "learning_rate": 1.9095092487559528e-05, + "loss": 1.2229, + "step": 2590 + }, + { + "epoch": 0.1624553263527494, + "grad_norm": 2.812638282775879, + "learning_rate": 1.9094248144359993e-05, + "loss": 1.385, + "step": 2591 + }, + { + "epoch": 0.16251802620853972, + "grad_norm": 2.850374937057495, + "learning_rate": 1.9093403426113264e-05, + "loss": 1.2861, + "step": 2592 + }, + { + "epoch": 0.16258072606433005, + "grad_norm": 2.9039559364318848, + "learning_rate": 1.9092558332854186e-05, + "loss": 1.1556, + "step": 2593 + }, + { + "epoch": 0.16264342592012038, + "grad_norm": 3.060579299926758, + "learning_rate": 1.90917128646176e-05, + "loss": 0.9433, + "step": 2594 + }, + { + "epoch": 0.1627061257759107, + "grad_norm": 2.77121639251709, + "learning_rate": 1.909086702143838e-05, + "loss": 1.1917, + "step": 2595 + }, + { + "epoch": 0.16276882563170106, + "grad_norm": 2.8846073150634766, + "learning_rate": 1.909002080335141e-05, + "loss": 1.1814, + "step": 2596 + }, + { + "epoch": 0.16283152548749139, + "grad_norm": 2.711839437484741, + "learning_rate": 1.9089174210391582e-05, + "loss": 1.2533, + "step": 2597 + }, + { + "epoch": 0.1628942253432817, + "grad_norm": 3.370508909225464, + "learning_rate": 1.9088327242593813e-05, + "loss": 1.2217, + "step": 2598 + }, + { + "epoch": 0.16295692519907204, + "grad_norm": 2.9007697105407715, + "learning_rate": 1.9087479899993034e-05, + "loss": 1.1698, + "step": 2599 + }, + { + "epoch": 0.16301962505486237, + "grad_norm": 2.947283983230591, + "learning_rate": 1.9086632182624185e-05, + "loss": 1.2433, + "step": 2600 + }, + { + "epoch": 0.1630823249106527, + "grad_norm": 2.8637216091156006, + "learning_rate": 1.9085784090522227e-05, + "loss": 1.1082, + "step": 2601 + }, + { + "epoch": 0.16314502476644305, + "grad_norm": 2.880129337310791, + "learning_rate": 1.908493562372214e-05, + "loss": 1.3488, + "step": 2602 + }, + { + "epoch": 0.16320772462223337, + "grad_norm": 2.974203109741211, + "learning_rate": 1.908408678225891e-05, + "loss": 1.263, + "step": 2603 + }, + { + "epoch": 0.1632704244780237, + "grad_norm": 2.820371627807617, + "learning_rate": 1.908323756616754e-05, + "loss": 1.1632, + "step": 2604 + }, + { + "epoch": 0.16333312433381403, + "grad_norm": 2.897542953491211, + "learning_rate": 1.908238797548306e-05, + "loss": 1.2608, + "step": 2605 + }, + { + "epoch": 0.16339582418960436, + "grad_norm": 3.453684091567993, + "learning_rate": 1.9081538010240504e-05, + "loss": 1.1124, + "step": 2606 + }, + { + "epoch": 0.16345852404539468, + "grad_norm": 2.772716999053955, + "learning_rate": 1.9080687670474923e-05, + "loss": 1.1778, + "step": 2607 + }, + { + "epoch": 0.16352122390118504, + "grad_norm": 2.816518545150757, + "learning_rate": 1.9079836956221384e-05, + "loss": 1.1972, + "step": 2608 + }, + { + "epoch": 0.16358392375697536, + "grad_norm": 2.76812744140625, + "learning_rate": 1.9078985867514973e-05, + "loss": 1.2692, + "step": 2609 + }, + { + "epoch": 0.1636466236127657, + "grad_norm": 2.9229414463043213, + "learning_rate": 1.907813440439079e-05, + "loss": 1.3404, + "step": 2610 + }, + { + "epoch": 0.16370932346855602, + "grad_norm": 3.4189512729644775, + "learning_rate": 1.9077282566883947e-05, + "loss": 1.0983, + "step": 2611 + }, + { + "epoch": 0.16377202332434634, + "grad_norm": 2.716747999191284, + "learning_rate": 1.9076430355029575e-05, + "loss": 1.3258, + "step": 2612 + }, + { + "epoch": 0.1638347231801367, + "grad_norm": 2.576225519180298, + "learning_rate": 1.907557776886282e-05, + "loss": 1.322, + "step": 2613 + }, + { + "epoch": 0.16389742303592703, + "grad_norm": 2.851222276687622, + "learning_rate": 1.9074724808418837e-05, + "loss": 1.3739, + "step": 2614 + }, + { + "epoch": 0.16396012289171735, + "grad_norm": 2.6536803245544434, + "learning_rate": 1.907387147373281e-05, + "loss": 1.2164, + "step": 2615 + }, + { + "epoch": 0.16402282274750768, + "grad_norm": 2.9195215702056885, + "learning_rate": 1.9073017764839928e-05, + "loss": 1.2322, + "step": 2616 + }, + { + "epoch": 0.164085522603298, + "grad_norm": 2.979871988296509, + "learning_rate": 1.9072163681775392e-05, + "loss": 1.3603, + "step": 2617 + }, + { + "epoch": 0.16414822245908833, + "grad_norm": 2.7202608585357666, + "learning_rate": 1.9071309224574434e-05, + "loss": 1.1407, + "step": 2618 + }, + { + "epoch": 0.1642109223148787, + "grad_norm": 2.676138162612915, + "learning_rate": 1.9070454393272285e-05, + "loss": 1.2548, + "step": 2619 + }, + { + "epoch": 0.16427362217066901, + "grad_norm": 2.5041849613189697, + "learning_rate": 1.9069599187904205e-05, + "loss": 1.3095, + "step": 2620 + }, + { + "epoch": 0.16433632202645934, + "grad_norm": 2.912447452545166, + "learning_rate": 1.9068743608505454e-05, + "loss": 1.3664, + "step": 2621 + }, + { + "epoch": 0.16439902188224967, + "grad_norm": 3.18320631980896, + "learning_rate": 1.9067887655111326e-05, + "loss": 1.1964, + "step": 2622 + }, + { + "epoch": 0.16446172173804, + "grad_norm": 2.73201847076416, + "learning_rate": 1.906703132775711e-05, + "loss": 1.13, + "step": 2623 + }, + { + "epoch": 0.16452442159383032, + "grad_norm": 3.287229537963867, + "learning_rate": 1.906617462647813e-05, + "loss": 1.0883, + "step": 2624 + }, + { + "epoch": 0.16458712144962068, + "grad_norm": 3.1093409061431885, + "learning_rate": 1.906531755130971e-05, + "loss": 1.1707, + "step": 2625 + }, + { + "epoch": 0.164649821305411, + "grad_norm": 3.3399956226348877, + "learning_rate": 1.9064460102287198e-05, + "loss": 1.2825, + "step": 2626 + }, + { + "epoch": 0.16471252116120133, + "grad_norm": 2.6236698627471924, + "learning_rate": 1.9063602279445955e-05, + "loss": 1.3253, + "step": 2627 + }, + { + "epoch": 0.16477522101699166, + "grad_norm": 3.5722315311431885, + "learning_rate": 1.9062744082821357e-05, + "loss": 1.1703, + "step": 2628 + }, + { + "epoch": 0.16483792087278198, + "grad_norm": 2.9268620014190674, + "learning_rate": 1.9061885512448797e-05, + "loss": 1.143, + "step": 2629 + }, + { + "epoch": 0.1649006207285723, + "grad_norm": 2.8623814582824707, + "learning_rate": 1.906102656836369e-05, + "loss": 1.0705, + "step": 2630 + }, + { + "epoch": 0.16496332058436267, + "grad_norm": 2.6747286319732666, + "learning_rate": 1.9060167250601444e-05, + "loss": 1.1132, + "step": 2631 + }, + { + "epoch": 0.165026020440153, + "grad_norm": 3.141880512237549, + "learning_rate": 1.905930755919751e-05, + "loss": 1.1251, + "step": 2632 + }, + { + "epoch": 0.16508872029594332, + "grad_norm": 3.0824549198150635, + "learning_rate": 1.905844749418733e-05, + "loss": 1.2534, + "step": 2633 + }, + { + "epoch": 0.16515142015173365, + "grad_norm": 2.9186582565307617, + "learning_rate": 1.9057587055606385e-05, + "loss": 1.261, + "step": 2634 + }, + { + "epoch": 0.16521412000752397, + "grad_norm": 2.688230276107788, + "learning_rate": 1.9056726243490152e-05, + "loss": 1.1363, + "step": 2635 + }, + { + "epoch": 0.16527681986331433, + "grad_norm": 2.8095202445983887, + "learning_rate": 1.9055865057874135e-05, + "loss": 1.3462, + "step": 2636 + }, + { + "epoch": 0.16533951971910466, + "grad_norm": 3.167020559310913, + "learning_rate": 1.9055003498793848e-05, + "loss": 1.0683, + "step": 2637 + }, + { + "epoch": 0.16540221957489498, + "grad_norm": 2.925154685974121, + "learning_rate": 1.9054141566284822e-05, + "loss": 1.0087, + "step": 2638 + }, + { + "epoch": 0.1654649194306853, + "grad_norm": 3.0684657096862793, + "learning_rate": 1.90532792603826e-05, + "loss": 1.1306, + "step": 2639 + }, + { + "epoch": 0.16552761928647564, + "grad_norm": 2.9106688499450684, + "learning_rate": 1.9052416581122747e-05, + "loss": 1.1798, + "step": 2640 + }, + { + "epoch": 0.16559031914226596, + "grad_norm": 2.866365432739258, + "learning_rate": 1.9051553528540838e-05, + "loss": 1.2534, + "step": 2641 + }, + { + "epoch": 0.16565301899805632, + "grad_norm": 2.907555341720581, + "learning_rate": 1.9050690102672468e-05, + "loss": 1.2403, + "step": 2642 + }, + { + "epoch": 0.16571571885384664, + "grad_norm": 3.040022373199463, + "learning_rate": 1.904982630355324e-05, + "loss": 1.2334, + "step": 2643 + }, + { + "epoch": 0.16577841870963697, + "grad_norm": 2.9519848823547363, + "learning_rate": 1.9048962131218783e-05, + "loss": 1.0174, + "step": 2644 + }, + { + "epoch": 0.1658411185654273, + "grad_norm": 3.4639101028442383, + "learning_rate": 1.904809758570473e-05, + "loss": 1.1182, + "step": 2645 + }, + { + "epoch": 0.16590381842121762, + "grad_norm": 2.7853329181671143, + "learning_rate": 1.9047232667046737e-05, + "loss": 1.1526, + "step": 2646 + }, + { + "epoch": 0.16596651827700795, + "grad_norm": 3.049187421798706, + "learning_rate": 1.9046367375280476e-05, + "loss": 1.206, + "step": 2647 + }, + { + "epoch": 0.1660292181327983, + "grad_norm": 2.760565757751465, + "learning_rate": 1.904550171044163e-05, + "loss": 1.1455, + "step": 2648 + }, + { + "epoch": 0.16609191798858863, + "grad_norm": 3.009612560272217, + "learning_rate": 1.9044635672565898e-05, + "loss": 1.1079, + "step": 2649 + }, + { + "epoch": 0.16615461784437896, + "grad_norm": 3.0144894123077393, + "learning_rate": 1.9043769261688997e-05, + "loss": 1.2758, + "step": 2650 + }, + { + "epoch": 0.1662173177001693, + "grad_norm": 2.982835292816162, + "learning_rate": 1.9042902477846653e-05, + "loss": 1.1579, + "step": 2651 + }, + { + "epoch": 0.1662800175559596, + "grad_norm": 3.5067052841186523, + "learning_rate": 1.9042035321074616e-05, + "loss": 1.0532, + "step": 2652 + }, + { + "epoch": 0.16634271741174994, + "grad_norm": 3.1636452674865723, + "learning_rate": 1.904116779140865e-05, + "loss": 1.3229, + "step": 2653 + }, + { + "epoch": 0.1664054172675403, + "grad_norm": 3.1820104122161865, + "learning_rate": 1.904029988888453e-05, + "loss": 1.2412, + "step": 2654 + }, + { + "epoch": 0.16646811712333062, + "grad_norm": 2.711704969406128, + "learning_rate": 1.9039431613538047e-05, + "loss": 1.3259, + "step": 2655 + }, + { + "epoch": 0.16653081697912095, + "grad_norm": 3.031031847000122, + "learning_rate": 1.9038562965405006e-05, + "loss": 1.2936, + "step": 2656 + }, + { + "epoch": 0.16659351683491128, + "grad_norm": 2.6242403984069824, + "learning_rate": 1.9037693944521235e-05, + "loss": 1.2226, + "step": 2657 + }, + { + "epoch": 0.1666562166907016, + "grad_norm": 2.9863712787628174, + "learning_rate": 1.9036824550922575e-05, + "loss": 1.3693, + "step": 2658 + }, + { + "epoch": 0.16671891654649193, + "grad_norm": 3.090608596801758, + "learning_rate": 1.9035954784644874e-05, + "loss": 1.1517, + "step": 2659 + }, + { + "epoch": 0.16678161640228228, + "grad_norm": 2.7400264739990234, + "learning_rate": 1.9035084645724003e-05, + "loss": 1.3093, + "step": 2660 + }, + { + "epoch": 0.1668443162580726, + "grad_norm": 2.549685478210449, + "learning_rate": 1.9034214134195846e-05, + "loss": 1.1619, + "step": 2661 + }, + { + "epoch": 0.16690701611386294, + "grad_norm": 2.7075765132904053, + "learning_rate": 1.9033343250096304e-05, + "loss": 1.1756, + "step": 2662 + }, + { + "epoch": 0.16696971596965327, + "grad_norm": 3.1768157482147217, + "learning_rate": 1.903247199346129e-05, + "loss": 1.1376, + "step": 2663 + }, + { + "epoch": 0.1670324158254436, + "grad_norm": 2.737856149673462, + "learning_rate": 1.903160036432674e-05, + "loss": 1.067, + "step": 2664 + }, + { + "epoch": 0.16709511568123395, + "grad_norm": 3.070871114730835, + "learning_rate": 1.9030728362728596e-05, + "loss": 1.1338, + "step": 2665 + }, + { + "epoch": 0.16715781553702427, + "grad_norm": 2.988913059234619, + "learning_rate": 1.9029855988702815e-05, + "loss": 1.2752, + "step": 2666 + }, + { + "epoch": 0.1672205153928146, + "grad_norm": 2.755075693130493, + "learning_rate": 1.9028983242285383e-05, + "loss": 1.4432, + "step": 2667 + }, + { + "epoch": 0.16728321524860493, + "grad_norm": 2.531670093536377, + "learning_rate": 1.902811012351229e-05, + "loss": 0.9782, + "step": 2668 + }, + { + "epoch": 0.16734591510439525, + "grad_norm": 2.7924556732177734, + "learning_rate": 1.9027236632419533e-05, + "loss": 1.1905, + "step": 2669 + }, + { + "epoch": 0.16740861496018558, + "grad_norm": 2.9443492889404297, + "learning_rate": 1.902636276904315e-05, + "loss": 1.323, + "step": 2670 + }, + { + "epoch": 0.16747131481597594, + "grad_norm": 2.8527188301086426, + "learning_rate": 1.902548853341917e-05, + "loss": 1.0556, + "step": 2671 + }, + { + "epoch": 0.16753401467176626, + "grad_norm": 2.8454105854034424, + "learning_rate": 1.9024613925583652e-05, + "loss": 1.2144, + "step": 2672 + }, + { + "epoch": 0.1675967145275566, + "grad_norm": 2.7167351245880127, + "learning_rate": 1.902373894557266e-05, + "loss": 1.1679, + "step": 2673 + }, + { + "epoch": 0.16765941438334692, + "grad_norm": 2.73966646194458, + "learning_rate": 1.902286359342228e-05, + "loss": 1.1101, + "step": 2674 + }, + { + "epoch": 0.16772211423913724, + "grad_norm": 2.578110694885254, + "learning_rate": 1.9021987869168608e-05, + "loss": 1.0965, + "step": 2675 + }, + { + "epoch": 0.16778481409492757, + "grad_norm": 3.0119080543518066, + "learning_rate": 1.9021111772847765e-05, + "loss": 1.1364, + "step": 2676 + }, + { + "epoch": 0.16784751395071792, + "grad_norm": 2.8952043056488037, + "learning_rate": 1.9020235304495877e-05, + "loss": 1.2, + "step": 2677 + }, + { + "epoch": 0.16791021380650825, + "grad_norm": 2.793642520904541, + "learning_rate": 1.9019358464149093e-05, + "loss": 1.1496, + "step": 2678 + }, + { + "epoch": 0.16797291366229858, + "grad_norm": 2.725207805633545, + "learning_rate": 1.901848125184357e-05, + "loss": 1.1881, + "step": 2679 + }, + { + "epoch": 0.1680356135180889, + "grad_norm": 2.729750394821167, + "learning_rate": 1.9017603667615486e-05, + "loss": 1.3783, + "step": 2680 + }, + { + "epoch": 0.16809831337387923, + "grad_norm": 2.8725693225860596, + "learning_rate": 1.9016725711501037e-05, + "loss": 1.451, + "step": 2681 + }, + { + "epoch": 0.16816101322966956, + "grad_norm": 2.7671074867248535, + "learning_rate": 1.9015847383536424e-05, + "loss": 1.1654, + "step": 2682 + }, + { + "epoch": 0.1682237130854599, + "grad_norm": 2.9867827892303467, + "learning_rate": 1.901496868375787e-05, + "loss": 1.0686, + "step": 2683 + }, + { + "epoch": 0.16828641294125024, + "grad_norm": 3.278604030609131, + "learning_rate": 1.9014089612201612e-05, + "loss": 1.2134, + "step": 2684 + }, + { + "epoch": 0.16834911279704057, + "grad_norm": 2.7501580715179443, + "learning_rate": 1.901321016890391e-05, + "loss": 1.1653, + "step": 2685 + }, + { + "epoch": 0.1684118126528309, + "grad_norm": 3.101644515991211, + "learning_rate": 1.9012330353901024e-05, + "loss": 1.0495, + "step": 2686 + }, + { + "epoch": 0.16847451250862122, + "grad_norm": 2.9226150512695312, + "learning_rate": 1.9011450167229243e-05, + "loss": 1.0695, + "step": 2687 + }, + { + "epoch": 0.16853721236441155, + "grad_norm": 2.9409234523773193, + "learning_rate": 1.901056960892486e-05, + "loss": 1.0497, + "step": 2688 + }, + { + "epoch": 0.1685999122202019, + "grad_norm": 2.868196487426758, + "learning_rate": 1.900968867902419e-05, + "loss": 1.064, + "step": 2689 + }, + { + "epoch": 0.16866261207599223, + "grad_norm": 3.1354923248291016, + "learning_rate": 1.9008807377563573e-05, + "loss": 1.1228, + "step": 2690 + }, + { + "epoch": 0.16872531193178256, + "grad_norm": 2.7755990028381348, + "learning_rate": 1.9007925704579346e-05, + "loss": 1.0123, + "step": 2691 + }, + { + "epoch": 0.16878801178757288, + "grad_norm": 3.390023708343506, + "learning_rate": 1.9007043660107864e-05, + "loss": 1.2754, + "step": 2692 + }, + { + "epoch": 0.1688507116433632, + "grad_norm": 3.025682210922241, + "learning_rate": 1.9006161244185512e-05, + "loss": 1.1659, + "step": 2693 + }, + { + "epoch": 0.16891341149915357, + "grad_norm": 2.6929843425750732, + "learning_rate": 1.9005278456848676e-05, + "loss": 1.1091, + "step": 2694 + }, + { + "epoch": 0.1689761113549439, + "grad_norm": 2.9739346504211426, + "learning_rate": 1.900439529813376e-05, + "loss": 1.1538, + "step": 2695 + }, + { + "epoch": 0.16903881121073422, + "grad_norm": 2.6528666019439697, + "learning_rate": 1.9003511768077193e-05, + "loss": 1.1262, + "step": 2696 + }, + { + "epoch": 0.16910151106652455, + "grad_norm": 2.747525453567505, + "learning_rate": 1.9002627866715405e-05, + "loss": 1.1324, + "step": 2697 + }, + { + "epoch": 0.16916421092231487, + "grad_norm": 2.689943313598633, + "learning_rate": 1.9001743594084852e-05, + "loss": 1.4273, + "step": 2698 + }, + { + "epoch": 0.1692269107781052, + "grad_norm": 3.18796968460083, + "learning_rate": 1.9000858950222e-05, + "loss": 1.2178, + "step": 2699 + }, + { + "epoch": 0.16928961063389555, + "grad_norm": 2.9647607803344727, + "learning_rate": 1.899997393516333e-05, + "loss": 1.1027, + "step": 2700 + }, + { + "epoch": 0.16935231048968588, + "grad_norm": 3.027115821838379, + "learning_rate": 1.8999088548945342e-05, + "loss": 1.1963, + "step": 2701 + }, + { + "epoch": 0.1694150103454762, + "grad_norm": 2.807645797729492, + "learning_rate": 1.8998202791604554e-05, + "loss": 1.256, + "step": 2702 + }, + { + "epoch": 0.16947771020126653, + "grad_norm": 3.0145938396453857, + "learning_rate": 1.8997316663177486e-05, + "loss": 1.1943, + "step": 2703 + }, + { + "epoch": 0.16954041005705686, + "grad_norm": 2.8790359497070312, + "learning_rate": 1.8996430163700686e-05, + "loss": 1.2411, + "step": 2704 + }, + { + "epoch": 0.1696031099128472, + "grad_norm": 3.074158191680908, + "learning_rate": 1.8995543293210713e-05, + "loss": 1.2032, + "step": 2705 + }, + { + "epoch": 0.16966580976863754, + "grad_norm": 2.9535999298095703, + "learning_rate": 1.899465605174414e-05, + "loss": 1.0686, + "step": 2706 + }, + { + "epoch": 0.16972850962442787, + "grad_norm": 3.0198512077331543, + "learning_rate": 1.8993768439337565e-05, + "loss": 1.1143, + "step": 2707 + }, + { + "epoch": 0.1697912094802182, + "grad_norm": 2.903451681137085, + "learning_rate": 1.899288045602758e-05, + "loss": 1.0922, + "step": 2708 + }, + { + "epoch": 0.16985390933600852, + "grad_norm": 2.9563183784484863, + "learning_rate": 1.8991992101850818e-05, + "loss": 1.2028, + "step": 2709 + }, + { + "epoch": 0.16991660919179885, + "grad_norm": 3.0272319316864014, + "learning_rate": 1.8991103376843905e-05, + "loss": 1.1773, + "step": 2710 + }, + { + "epoch": 0.16997930904758918, + "grad_norm": 2.6141843795776367, + "learning_rate": 1.8990214281043497e-05, + "loss": 1.1614, + "step": 2711 + }, + { + "epoch": 0.17004200890337953, + "grad_norm": 2.971191644668579, + "learning_rate": 1.898932481448626e-05, + "loss": 1.0507, + "step": 2712 + }, + { + "epoch": 0.17010470875916986, + "grad_norm": 2.9903457164764404, + "learning_rate": 1.8988434977208876e-05, + "loss": 1.1907, + "step": 2713 + }, + { + "epoch": 0.17016740861496019, + "grad_norm": 3.144174575805664, + "learning_rate": 1.898754476924804e-05, + "loss": 1.2021, + "step": 2714 + }, + { + "epoch": 0.1702301084707505, + "grad_norm": 3.2488479614257812, + "learning_rate": 1.8986654190640462e-05, + "loss": 1.1262, + "step": 2715 + }, + { + "epoch": 0.17029280832654084, + "grad_norm": 2.9210011959075928, + "learning_rate": 1.8985763241422878e-05, + "loss": 1.1988, + "step": 2716 + }, + { + "epoch": 0.17035550818233117, + "grad_norm": 3.0148704051971436, + "learning_rate": 1.8984871921632022e-05, + "loss": 1.2277, + "step": 2717 + }, + { + "epoch": 0.17041820803812152, + "grad_norm": 3.1559596061706543, + "learning_rate": 1.898398023130466e-05, + "loss": 1.1164, + "step": 2718 + }, + { + "epoch": 0.17048090789391185, + "grad_norm": 2.8620333671569824, + "learning_rate": 1.8983088170477556e-05, + "loss": 1.2227, + "step": 2719 + }, + { + "epoch": 0.17054360774970218, + "grad_norm": 2.922215461730957, + "learning_rate": 1.8982195739187505e-05, + "loss": 1.2233, + "step": 2720 + }, + { + "epoch": 0.1706063076054925, + "grad_norm": 2.8852837085723877, + "learning_rate": 1.898130293747131e-05, + "loss": 1.2929, + "step": 2721 + }, + { + "epoch": 0.17066900746128283, + "grad_norm": 3.2843403816223145, + "learning_rate": 1.8980409765365788e-05, + "loss": 1.0448, + "step": 2722 + }, + { + "epoch": 0.17073170731707318, + "grad_norm": 2.8485605716705322, + "learning_rate": 1.8979516222907776e-05, + "loss": 1.2881, + "step": 2723 + }, + { + "epoch": 0.1707944071728635, + "grad_norm": 3.216569185256958, + "learning_rate": 1.8978622310134126e-05, + "loss": 1.2073, + "step": 2724 + }, + { + "epoch": 0.17085710702865384, + "grad_norm": 2.6641485691070557, + "learning_rate": 1.8977728027081696e-05, + "loss": 1.2256, + "step": 2725 + }, + { + "epoch": 0.17091980688444416, + "grad_norm": 2.8564200401306152, + "learning_rate": 1.897683337378737e-05, + "loss": 1.1708, + "step": 2726 + }, + { + "epoch": 0.1709825067402345, + "grad_norm": 3.1733837127685547, + "learning_rate": 1.8975938350288046e-05, + "loss": 1.1628, + "step": 2727 + }, + { + "epoch": 0.17104520659602482, + "grad_norm": 2.7199904918670654, + "learning_rate": 1.8975042956620632e-05, + "loss": 1.304, + "step": 2728 + }, + { + "epoch": 0.17110790645181517, + "grad_norm": 2.6176869869232178, + "learning_rate": 1.8974147192822053e-05, + "loss": 1.2087, + "step": 2729 + }, + { + "epoch": 0.1711706063076055, + "grad_norm": 3.021177053451538, + "learning_rate": 1.897325105892925e-05, + "loss": 1.2101, + "step": 2730 + }, + { + "epoch": 0.17123330616339583, + "grad_norm": 2.894465446472168, + "learning_rate": 1.8972354554979188e-05, + "loss": 1.3407, + "step": 2731 + }, + { + "epoch": 0.17129600601918615, + "grad_norm": 2.7253975868225098, + "learning_rate": 1.8971457681008827e-05, + "loss": 1.3692, + "step": 2732 + }, + { + "epoch": 0.17135870587497648, + "grad_norm": 2.8234810829162598, + "learning_rate": 1.8970560437055162e-05, + "loss": 1.1237, + "step": 2733 + }, + { + "epoch": 0.1714214057307668, + "grad_norm": 2.9131417274475098, + "learning_rate": 1.8969662823155192e-05, + "loss": 1.1446, + "step": 2734 + }, + { + "epoch": 0.17148410558655716, + "grad_norm": 2.876309633255005, + "learning_rate": 1.8968764839345935e-05, + "loss": 1.085, + "step": 2735 + }, + { + "epoch": 0.1715468054423475, + "grad_norm": 2.645297050476074, + "learning_rate": 1.8967866485664425e-05, + "loss": 1.2109, + "step": 2736 + }, + { + "epoch": 0.17160950529813782, + "grad_norm": 3.04596209526062, + "learning_rate": 1.896696776214771e-05, + "loss": 1.2076, + "step": 2737 + }, + { + "epoch": 0.17167220515392814, + "grad_norm": 2.993608236312866, + "learning_rate": 1.8966068668832852e-05, + "loss": 1.2141, + "step": 2738 + }, + { + "epoch": 0.17173490500971847, + "grad_norm": 2.864475727081299, + "learning_rate": 1.896516920575693e-05, + "loss": 1.1466, + "step": 2739 + }, + { + "epoch": 0.1717976048655088, + "grad_norm": 2.6355535984039307, + "learning_rate": 1.896426937295704e-05, + "loss": 1.2163, + "step": 2740 + }, + { + "epoch": 0.17186030472129915, + "grad_norm": 2.769090414047241, + "learning_rate": 1.896336917047029e-05, + "loss": 1.2181, + "step": 2741 + }, + { + "epoch": 0.17192300457708948, + "grad_norm": 2.9678244590759277, + "learning_rate": 1.89624685983338e-05, + "loss": 1.2514, + "step": 2742 + }, + { + "epoch": 0.1719857044328798, + "grad_norm": 2.7453622817993164, + "learning_rate": 1.8961567656584716e-05, + "loss": 1.3396, + "step": 2743 + }, + { + "epoch": 0.17204840428867013, + "grad_norm": 2.801621913909912, + "learning_rate": 1.8960666345260194e-05, + "loss": 1.2818, + "step": 2744 + }, + { + "epoch": 0.17211110414446046, + "grad_norm": 2.8262038230895996, + "learning_rate": 1.8959764664397396e-05, + "loss": 1.1166, + "step": 2745 + }, + { + "epoch": 0.1721738040002508, + "grad_norm": 2.7081587314605713, + "learning_rate": 1.895886261403351e-05, + "loss": 1.2203, + "step": 2746 + }, + { + "epoch": 0.17223650385604114, + "grad_norm": 2.721705198287964, + "learning_rate": 1.8957960194205743e-05, + "loss": 1.2457, + "step": 2747 + }, + { + "epoch": 0.17229920371183147, + "grad_norm": 2.997276544570923, + "learning_rate": 1.8957057404951308e-05, + "loss": 1.2531, + "step": 2748 + }, + { + "epoch": 0.1723619035676218, + "grad_norm": 2.831190586090088, + "learning_rate": 1.895615424630743e-05, + "loss": 1.0912, + "step": 2749 + }, + { + "epoch": 0.17242460342341212, + "grad_norm": 2.7170708179473877, + "learning_rate": 1.895525071831136e-05, + "loss": 1.1665, + "step": 2750 + }, + { + "epoch": 0.17248730327920245, + "grad_norm": 2.9839630126953125, + "learning_rate": 1.8954346821000364e-05, + "loss": 1.3047, + "step": 2751 + }, + { + "epoch": 0.1725500031349928, + "grad_norm": 2.7156124114990234, + "learning_rate": 1.895344255441171e-05, + "loss": 1.2513, + "step": 2752 + }, + { + "epoch": 0.17261270299078313, + "grad_norm": 3.0374231338500977, + "learning_rate": 1.8952537918582694e-05, + "loss": 1.1869, + "step": 2753 + }, + { + "epoch": 0.17267540284657346, + "grad_norm": 2.7530572414398193, + "learning_rate": 1.8951632913550625e-05, + "loss": 1.4416, + "step": 2754 + }, + { + "epoch": 0.17273810270236378, + "grad_norm": 2.801767349243164, + "learning_rate": 1.8950727539352822e-05, + "loss": 1.1561, + "step": 2755 + }, + { + "epoch": 0.1728008025581541, + "grad_norm": 3.0775840282440186, + "learning_rate": 1.8949821796026626e-05, + "loss": 1.3023, + "step": 2756 + }, + { + "epoch": 0.17286350241394444, + "grad_norm": 2.8160829544067383, + "learning_rate": 1.8948915683609387e-05, + "loss": 1.2001, + "step": 2757 + }, + { + "epoch": 0.1729262022697348, + "grad_norm": 2.961881637573242, + "learning_rate": 1.8948009202138477e-05, + "loss": 1.2388, + "step": 2758 + }, + { + "epoch": 0.17298890212552512, + "grad_norm": 2.9470200538635254, + "learning_rate": 1.8947102351651273e-05, + "loss": 1.091, + "step": 2759 + }, + { + "epoch": 0.17305160198131544, + "grad_norm": 3.113398313522339, + "learning_rate": 1.894619513218518e-05, + "loss": 1.1219, + "step": 2760 + }, + { + "epoch": 0.17311430183710577, + "grad_norm": 2.7649755477905273, + "learning_rate": 1.894528754377761e-05, + "loss": 1.1089, + "step": 2761 + }, + { + "epoch": 0.1731770016928961, + "grad_norm": 2.6532680988311768, + "learning_rate": 1.8944379586465986e-05, + "loss": 1.1108, + "step": 2762 + }, + { + "epoch": 0.17323970154868643, + "grad_norm": 2.8037846088409424, + "learning_rate": 1.8943471260287763e-05, + "loss": 1.1446, + "step": 2763 + }, + { + "epoch": 0.17330240140447678, + "grad_norm": 2.961660146713257, + "learning_rate": 1.894256256528039e-05, + "loss": 1.122, + "step": 2764 + }, + { + "epoch": 0.1733651012602671, + "grad_norm": 2.6646223068237305, + "learning_rate": 1.894165350148135e-05, + "loss": 1.0273, + "step": 2765 + }, + { + "epoch": 0.17342780111605743, + "grad_norm": 2.697222948074341, + "learning_rate": 1.8940744068928126e-05, + "loss": 1.3298, + "step": 2766 + }, + { + "epoch": 0.17349050097184776, + "grad_norm": 2.7711102962493896, + "learning_rate": 1.8939834267658228e-05, + "loss": 1.0286, + "step": 2767 + }, + { + "epoch": 0.1735532008276381, + "grad_norm": 2.9094626903533936, + "learning_rate": 1.8938924097709175e-05, + "loss": 1.1414, + "step": 2768 + }, + { + "epoch": 0.17361590068342841, + "grad_norm": 2.895867347717285, + "learning_rate": 1.89380135591185e-05, + "loss": 1.2056, + "step": 2769 + }, + { + "epoch": 0.17367860053921877, + "grad_norm": 3.373023509979248, + "learning_rate": 1.8937102651923757e-05, + "loss": 1.1284, + "step": 2770 + }, + { + "epoch": 0.1737413003950091, + "grad_norm": 3.139241933822632, + "learning_rate": 1.893619137616251e-05, + "loss": 1.287, + "step": 2771 + }, + { + "epoch": 0.17380400025079942, + "grad_norm": 2.722507953643799, + "learning_rate": 1.893527973187234e-05, + "loss": 1.2551, + "step": 2772 + }, + { + "epoch": 0.17386670010658975, + "grad_norm": 3.0342001914978027, + "learning_rate": 1.8934367719090842e-05, + "loss": 1.2449, + "step": 2773 + }, + { + "epoch": 0.17392939996238008, + "grad_norm": 3.0632762908935547, + "learning_rate": 1.8933455337855633e-05, + "loss": 1.3014, + "step": 2774 + }, + { + "epoch": 0.17399209981817043, + "grad_norm": 2.913644313812256, + "learning_rate": 1.8932542588204334e-05, + "loss": 1.1016, + "step": 2775 + }, + { + "epoch": 0.17405479967396076, + "grad_norm": 3.1172475814819336, + "learning_rate": 1.8931629470174585e-05, + "loss": 1.2689, + "step": 2776 + }, + { + "epoch": 0.17411749952975109, + "grad_norm": 2.547609567642212, + "learning_rate": 1.8930715983804054e-05, + "loss": 1.1136, + "step": 2777 + }, + { + "epoch": 0.1741801993855414, + "grad_norm": 2.87872314453125, + "learning_rate": 1.8929802129130398e-05, + "loss": 1.1176, + "step": 2778 + }, + { + "epoch": 0.17424289924133174, + "grad_norm": 2.7386531829833984, + "learning_rate": 1.892888790619132e-05, + "loss": 1.1664, + "step": 2779 + }, + { + "epoch": 0.17430559909712207, + "grad_norm": 2.8294100761413574, + "learning_rate": 1.8927973315024508e-05, + "loss": 1.1457, + "step": 2780 + }, + { + "epoch": 0.17436829895291242, + "grad_norm": 3.041231155395508, + "learning_rate": 1.892705835566769e-05, + "loss": 1.2184, + "step": 2781 + }, + { + "epoch": 0.17443099880870275, + "grad_norm": 2.7401955127716064, + "learning_rate": 1.8926143028158597e-05, + "loss": 1.0145, + "step": 2782 + }, + { + "epoch": 0.17449369866449307, + "grad_norm": 3.050311803817749, + "learning_rate": 1.892522733253497e-05, + "loss": 1.2287, + "step": 2783 + }, + { + "epoch": 0.1745563985202834, + "grad_norm": 3.1074516773223877, + "learning_rate": 1.8924311268834584e-05, + "loss": 0.9278, + "step": 2784 + }, + { + "epoch": 0.17461909837607373, + "grad_norm": 2.7620186805725098, + "learning_rate": 1.8923394837095207e-05, + "loss": 1.1573, + "step": 2785 + }, + { + "epoch": 0.17468179823186405, + "grad_norm": 2.8111486434936523, + "learning_rate": 1.892247803735464e-05, + "loss": 1.3534, + "step": 2786 + }, + { + "epoch": 0.1747444980876544, + "grad_norm": 2.720156192779541, + "learning_rate": 1.892156086965069e-05, + "loss": 1.3052, + "step": 2787 + }, + { + "epoch": 0.17480719794344474, + "grad_norm": 2.7748618125915527, + "learning_rate": 1.892064333402118e-05, + "loss": 1.1744, + "step": 2788 + }, + { + "epoch": 0.17486989779923506, + "grad_norm": 2.898322820663452, + "learning_rate": 1.8919725430503946e-05, + "loss": 1.1427, + "step": 2789 + }, + { + "epoch": 0.1749325976550254, + "grad_norm": 2.9672677516937256, + "learning_rate": 1.891880715913685e-05, + "loss": 1.2517, + "step": 2790 + }, + { + "epoch": 0.17499529751081572, + "grad_norm": 2.7148098945617676, + "learning_rate": 1.8917888519957756e-05, + "loss": 1.2974, + "step": 2791 + }, + { + "epoch": 0.17505799736660604, + "grad_norm": 3.1995620727539062, + "learning_rate": 1.8916969513004546e-05, + "loss": 1.0627, + "step": 2792 + }, + { + "epoch": 0.1751206972223964, + "grad_norm": 3.132359743118286, + "learning_rate": 1.8916050138315127e-05, + "loss": 1.3226, + "step": 2793 + }, + { + "epoch": 0.17518339707818673, + "grad_norm": 3.0997564792633057, + "learning_rate": 1.891513039592741e-05, + "loss": 1.1899, + "step": 2794 + }, + { + "epoch": 0.17524609693397705, + "grad_norm": 2.739711046218872, + "learning_rate": 1.891421028587933e-05, + "loss": 1.1555, + "step": 2795 + }, + { + "epoch": 0.17530879678976738, + "grad_norm": 2.787647008895874, + "learning_rate": 1.8913289808208823e-05, + "loss": 1.2065, + "step": 2796 + }, + { + "epoch": 0.1753714966455577, + "grad_norm": 3.1336863040924072, + "learning_rate": 1.891236896295386e-05, + "loss": 1.1123, + "step": 2797 + }, + { + "epoch": 0.17543419650134803, + "grad_norm": 2.909325122833252, + "learning_rate": 1.8911447750152407e-05, + "loss": 1.1907, + "step": 2798 + }, + { + "epoch": 0.1754968963571384, + "grad_norm": 3.004171133041382, + "learning_rate": 1.891052616984246e-05, + "loss": 1.1071, + "step": 2799 + }, + { + "epoch": 0.17555959621292871, + "grad_norm": 2.6269941329956055, + "learning_rate": 1.8909604222062027e-05, + "loss": 1.2115, + "step": 2800 + }, + { + "epoch": 0.17562229606871904, + "grad_norm": 2.6703240871429443, + "learning_rate": 1.8908681906849125e-05, + "loss": 1.3936, + "step": 2801 + }, + { + "epoch": 0.17568499592450937, + "grad_norm": 2.862783193588257, + "learning_rate": 1.8907759224241795e-05, + "loss": 1.1824, + "step": 2802 + }, + { + "epoch": 0.1757476957802997, + "grad_norm": 2.9030139446258545, + "learning_rate": 1.8906836174278088e-05, + "loss": 1.1942, + "step": 2803 + }, + { + "epoch": 0.17581039563609005, + "grad_norm": 2.7227783203125, + "learning_rate": 1.890591275699606e-05, + "loss": 1.1415, + "step": 2804 + }, + { + "epoch": 0.17587309549188038, + "grad_norm": 2.996716260910034, + "learning_rate": 1.8904988972433807e-05, + "loss": 1.0451, + "step": 2805 + }, + { + "epoch": 0.1759357953476707, + "grad_norm": 2.744638681411743, + "learning_rate": 1.8904064820629418e-05, + "loss": 1.1452, + "step": 2806 + }, + { + "epoch": 0.17599849520346103, + "grad_norm": 2.9094326496124268, + "learning_rate": 1.8903140301621007e-05, + "loss": 1.191, + "step": 2807 + }, + { + "epoch": 0.17606119505925136, + "grad_norm": 2.7718887329101562, + "learning_rate": 1.89022154154467e-05, + "loss": 1.2497, + "step": 2808 + }, + { + "epoch": 0.17612389491504168, + "grad_norm": 2.9387755393981934, + "learning_rate": 1.8901290162144645e-05, + "loss": 1.0601, + "step": 2809 + }, + { + "epoch": 0.17618659477083204, + "grad_norm": 2.8881618976593018, + "learning_rate": 1.890036454175299e-05, + "loss": 1.0203, + "step": 2810 + }, + { + "epoch": 0.17624929462662237, + "grad_norm": 2.9676194190979004, + "learning_rate": 1.8899438554309916e-05, + "loss": 1.1266, + "step": 2811 + }, + { + "epoch": 0.1763119944824127, + "grad_norm": 2.7369816303253174, + "learning_rate": 1.8898512199853604e-05, + "loss": 1.2022, + "step": 2812 + }, + { + "epoch": 0.17637469433820302, + "grad_norm": 2.9564011096954346, + "learning_rate": 1.8897585478422263e-05, + "loss": 1.325, + "step": 2813 + }, + { + "epoch": 0.17643739419399335, + "grad_norm": 2.8316428661346436, + "learning_rate": 1.8896658390054105e-05, + "loss": 1.2603, + "step": 2814 + }, + { + "epoch": 0.17650009404978367, + "grad_norm": 3.3397531509399414, + "learning_rate": 1.889573093478737e-05, + "loss": 1.2012, + "step": 2815 + }, + { + "epoch": 0.17656279390557403, + "grad_norm": 3.0888848304748535, + "learning_rate": 1.8894803112660304e-05, + "loss": 1.2721, + "step": 2816 + }, + { + "epoch": 0.17662549376136435, + "grad_norm": 2.97981858253479, + "learning_rate": 1.8893874923711165e-05, + "loss": 1.13, + "step": 2817 + }, + { + "epoch": 0.17668819361715468, + "grad_norm": 2.566985607147217, + "learning_rate": 1.8892946367978237e-05, + "loss": 1.4088, + "step": 2818 + }, + { + "epoch": 0.176750893472945, + "grad_norm": 2.8213136196136475, + "learning_rate": 1.8892017445499812e-05, + "loss": 1.1564, + "step": 2819 + }, + { + "epoch": 0.17681359332873534, + "grad_norm": 2.885575532913208, + "learning_rate": 1.88910881563142e-05, + "loss": 1.1738, + "step": 2820 + }, + { + "epoch": 0.17687629318452566, + "grad_norm": 2.7460994720458984, + "learning_rate": 1.8890158500459726e-05, + "loss": 1.3966, + "step": 2821 + }, + { + "epoch": 0.17693899304031602, + "grad_norm": 2.710650682449341, + "learning_rate": 1.8889228477974728e-05, + "loss": 1.3638, + "step": 2822 + }, + { + "epoch": 0.17700169289610634, + "grad_norm": 3.1509039402008057, + "learning_rate": 1.8888298088897556e-05, + "loss": 1.178, + "step": 2823 + }, + { + "epoch": 0.17706439275189667, + "grad_norm": 2.712672710418701, + "learning_rate": 1.888736733326658e-05, + "loss": 1.286, + "step": 2824 + }, + { + "epoch": 0.177127092607687, + "grad_norm": 3.0522239208221436, + "learning_rate": 1.8886436211120195e-05, + "loss": 1.1891, + "step": 2825 + }, + { + "epoch": 0.17718979246347732, + "grad_norm": 3.1839187145233154, + "learning_rate": 1.8885504722496785e-05, + "loss": 1.2311, + "step": 2826 + }, + { + "epoch": 0.17725249231926768, + "grad_norm": 2.777743339538574, + "learning_rate": 1.8884572867434775e-05, + "loss": 1.1776, + "step": 2827 + }, + { + "epoch": 0.177315192175058, + "grad_norm": 2.8245036602020264, + "learning_rate": 1.8883640645972594e-05, + "loss": 1.1762, + "step": 2828 + }, + { + "epoch": 0.17737789203084833, + "grad_norm": 2.7891335487365723, + "learning_rate": 1.8882708058148683e-05, + "loss": 1.3521, + "step": 2829 + }, + { + "epoch": 0.17744059188663866, + "grad_norm": 2.8249833583831787, + "learning_rate": 1.8881775104001505e-05, + "loss": 1.0333, + "step": 2830 + }, + { + "epoch": 0.177503291742429, + "grad_norm": 2.8832030296325684, + "learning_rate": 1.888084178356953e-05, + "loss": 1.0756, + "step": 2831 + }, + { + "epoch": 0.1775659915982193, + "grad_norm": 3.0738682746887207, + "learning_rate": 1.8879908096891256e-05, + "loss": 1.335, + "step": 2832 + }, + { + "epoch": 0.17762869145400967, + "grad_norm": 2.6453871726989746, + "learning_rate": 1.8878974044005183e-05, + "loss": 1.2754, + "step": 2833 + }, + { + "epoch": 0.1776913913098, + "grad_norm": 3.050286054611206, + "learning_rate": 1.8878039624949834e-05, + "loss": 1.063, + "step": 2834 + }, + { + "epoch": 0.17775409116559032, + "grad_norm": 3.021402597427368, + "learning_rate": 1.887710483976374e-05, + "loss": 1.2321, + "step": 2835 + }, + { + "epoch": 0.17781679102138065, + "grad_norm": 2.9464588165283203, + "learning_rate": 1.8876169688485458e-05, + "loss": 1.2288, + "step": 2836 + }, + { + "epoch": 0.17787949087717098, + "grad_norm": 2.909228801727295, + "learning_rate": 1.887523417115355e-05, + "loss": 1.1803, + "step": 2837 + }, + { + "epoch": 0.1779421907329613, + "grad_norm": 2.9016292095184326, + "learning_rate": 1.8874298287806597e-05, + "loss": 1.1439, + "step": 2838 + }, + { + "epoch": 0.17800489058875166, + "grad_norm": 2.9646642208099365, + "learning_rate": 1.887336203848319e-05, + "loss": 1.111, + "step": 2839 + }, + { + "epoch": 0.17806759044454198, + "grad_norm": 2.7661125659942627, + "learning_rate": 1.8872425423221954e-05, + "loss": 1.1798, + "step": 2840 + }, + { + "epoch": 0.1781302903003323, + "grad_norm": 2.6378400325775146, + "learning_rate": 1.8871488442061502e-05, + "loss": 1.1071, + "step": 2841 + }, + { + "epoch": 0.17819299015612264, + "grad_norm": 2.593076705932617, + "learning_rate": 1.8870551095040476e-05, + "loss": 1.377, + "step": 2842 + }, + { + "epoch": 0.17825569001191297, + "grad_norm": 2.6056764125823975, + "learning_rate": 1.886961338219754e-05, + "loss": 1.2816, + "step": 2843 + }, + { + "epoch": 0.1783183898677033, + "grad_norm": 2.9345057010650635, + "learning_rate": 1.8868675303571356e-05, + "loss": 1.3012, + "step": 2844 + }, + { + "epoch": 0.17838108972349365, + "grad_norm": 2.716686725616455, + "learning_rate": 1.886773685920062e-05, + "loss": 1.106, + "step": 2845 + }, + { + "epoch": 0.17844378957928397, + "grad_norm": 3.0038998126983643, + "learning_rate": 1.8866798049124026e-05, + "loss": 1.2459, + "step": 2846 + }, + { + "epoch": 0.1785064894350743, + "grad_norm": 2.786943197250366, + "learning_rate": 1.8865858873380297e-05, + "loss": 1.207, + "step": 2847 + }, + { + "epoch": 0.17856918929086463, + "grad_norm": 3.1004951000213623, + "learning_rate": 1.886491933200816e-05, + "loss": 1.1141, + "step": 2848 + }, + { + "epoch": 0.17863188914665495, + "grad_norm": 3.0831298828125, + "learning_rate": 1.8863979425046358e-05, + "loss": 1.2644, + "step": 2849 + }, + { + "epoch": 0.17869458900244528, + "grad_norm": 2.7675893306732178, + "learning_rate": 1.886303915253366e-05, + "loss": 1.089, + "step": 2850 + }, + { + "epoch": 0.17875728885823564, + "grad_norm": 2.7534079551696777, + "learning_rate": 1.886209851450884e-05, + "loss": 1.0625, + "step": 2851 + }, + { + "epoch": 0.17881998871402596, + "grad_norm": 2.9956798553466797, + "learning_rate": 1.8861157511010692e-05, + "loss": 1.0519, + "step": 2852 + }, + { + "epoch": 0.1788826885698163, + "grad_norm": 3.0112602710723877, + "learning_rate": 1.886021614207802e-05, + "loss": 1.2923, + "step": 2853 + }, + { + "epoch": 0.17894538842560662, + "grad_norm": 2.70306396484375, + "learning_rate": 1.8859274407749646e-05, + "loss": 1.1573, + "step": 2854 + }, + { + "epoch": 0.17900808828139694, + "grad_norm": 3.143372058868408, + "learning_rate": 1.885833230806441e-05, + "loss": 1.0272, + "step": 2855 + }, + { + "epoch": 0.1790707881371873, + "grad_norm": 3.0222811698913574, + "learning_rate": 1.8857389843061162e-05, + "loss": 1.2308, + "step": 2856 + }, + { + "epoch": 0.17913348799297762, + "grad_norm": 2.919097661972046, + "learning_rate": 1.885644701277877e-05, + "loss": 1.0091, + "step": 2857 + }, + { + "epoch": 0.17919618784876795, + "grad_norm": 2.581102132797241, + "learning_rate": 1.8855503817256118e-05, + "loss": 1.2278, + "step": 2858 + }, + { + "epoch": 0.17925888770455828, + "grad_norm": 2.644347667694092, + "learning_rate": 1.8854560256532098e-05, + "loss": 1.1768, + "step": 2859 + }, + { + "epoch": 0.1793215875603486, + "grad_norm": 2.708163022994995, + "learning_rate": 1.8853616330645632e-05, + "loss": 1.3526, + "step": 2860 + }, + { + "epoch": 0.17938428741613893, + "grad_norm": 2.8539540767669678, + "learning_rate": 1.8852672039635636e-05, + "loss": 1.2065, + "step": 2861 + }, + { + "epoch": 0.1794469872719293, + "grad_norm": 3.0937869548797607, + "learning_rate": 1.8851727383541064e-05, + "loss": 1.261, + "step": 2862 + }, + { + "epoch": 0.1795096871277196, + "grad_norm": 2.856311559677124, + "learning_rate": 1.8850782362400868e-05, + "loss": 1.1802, + "step": 2863 + }, + { + "epoch": 0.17957238698350994, + "grad_norm": 2.9557974338531494, + "learning_rate": 1.8849836976254016e-05, + "loss": 1.301, + "step": 2864 + }, + { + "epoch": 0.17963508683930027, + "grad_norm": 3.017350435256958, + "learning_rate": 1.8848891225139505e-05, + "loss": 1.2354, + "step": 2865 + }, + { + "epoch": 0.1796977866950906, + "grad_norm": 2.852926015853882, + "learning_rate": 1.8847945109096332e-05, + "loss": 1.1858, + "step": 2866 + }, + { + "epoch": 0.17976048655088092, + "grad_norm": 2.8488662242889404, + "learning_rate": 1.8846998628163515e-05, + "loss": 1.1456, + "step": 2867 + }, + { + "epoch": 0.17982318640667128, + "grad_norm": 2.886720657348633, + "learning_rate": 1.884605178238009e-05, + "loss": 1.1511, + "step": 2868 + }, + { + "epoch": 0.1798858862624616, + "grad_norm": 2.7643580436706543, + "learning_rate": 1.88451045717851e-05, + "loss": 1.2165, + "step": 2869 + }, + { + "epoch": 0.17994858611825193, + "grad_norm": 2.75429630279541, + "learning_rate": 1.8844156996417614e-05, + "loss": 1.1296, + "step": 2870 + }, + { + "epoch": 0.18001128597404226, + "grad_norm": 3.118835210800171, + "learning_rate": 1.884320905631671e-05, + "loss": 1.0061, + "step": 2871 + }, + { + "epoch": 0.18007398582983258, + "grad_norm": 2.95624041557312, + "learning_rate": 1.8842260751521473e-05, + "loss": 1.2342, + "step": 2872 + }, + { + "epoch": 0.1801366856856229, + "grad_norm": 2.79201340675354, + "learning_rate": 1.884131208207102e-05, + "loss": 1.1889, + "step": 2873 + }, + { + "epoch": 0.18019938554141327, + "grad_norm": 2.732693910598755, + "learning_rate": 1.884036304800447e-05, + "loss": 1.2284, + "step": 2874 + }, + { + "epoch": 0.1802620853972036, + "grad_norm": 3.2487175464630127, + "learning_rate": 1.8839413649360957e-05, + "loss": 1.1697, + "step": 2875 + }, + { + "epoch": 0.18032478525299392, + "grad_norm": 3.062136650085449, + "learning_rate": 1.8838463886179647e-05, + "loss": 1.3518, + "step": 2876 + }, + { + "epoch": 0.18038748510878425, + "grad_norm": 2.6209964752197266, + "learning_rate": 1.8837513758499698e-05, + "loss": 1.0777, + "step": 2877 + }, + { + "epoch": 0.18045018496457457, + "grad_norm": 2.680304765701294, + "learning_rate": 1.8836563266360292e-05, + "loss": 1.1802, + "step": 2878 + }, + { + "epoch": 0.1805128848203649, + "grad_norm": 2.7982938289642334, + "learning_rate": 1.8835612409800634e-05, + "loss": 1.0969, + "step": 2879 + }, + { + "epoch": 0.18057558467615525, + "grad_norm": 2.8288073539733887, + "learning_rate": 1.8834661188859932e-05, + "loss": 1.2069, + "step": 2880 + }, + { + "epoch": 0.18063828453194558, + "grad_norm": 2.6468467712402344, + "learning_rate": 1.883370960357742e-05, + "loss": 1.1821, + "step": 2881 + }, + { + "epoch": 0.1807009843877359, + "grad_norm": 2.9256622791290283, + "learning_rate": 1.8832757653992335e-05, + "loss": 1.2022, + "step": 2882 + }, + { + "epoch": 0.18076368424352623, + "grad_norm": 3.089616298675537, + "learning_rate": 1.883180534014394e-05, + "loss": 1.332, + "step": 2883 + }, + { + "epoch": 0.18082638409931656, + "grad_norm": 2.906003952026367, + "learning_rate": 1.8830852662071507e-05, + "loss": 1.2568, + "step": 2884 + }, + { + "epoch": 0.18088908395510692, + "grad_norm": 2.6964898109436035, + "learning_rate": 1.8829899619814328e-05, + "loss": 1.1911, + "step": 2885 + }, + { + "epoch": 0.18095178381089724, + "grad_norm": 2.792837142944336, + "learning_rate": 1.8828946213411697e-05, + "loss": 1.3189, + "step": 2886 + }, + { + "epoch": 0.18101448366668757, + "grad_norm": 2.8725197315216064, + "learning_rate": 1.882799244290294e-05, + "loss": 1.0846, + "step": 2887 + }, + { + "epoch": 0.1810771835224779, + "grad_norm": 2.986074924468994, + "learning_rate": 1.882703830832739e-05, + "loss": 1.2289, + "step": 2888 + }, + { + "epoch": 0.18113988337826822, + "grad_norm": 2.9162747859954834, + "learning_rate": 1.8826083809724398e-05, + "loss": 1.1292, + "step": 2889 + }, + { + "epoch": 0.18120258323405855, + "grad_norm": 2.9902055263519287, + "learning_rate": 1.8825128947133323e-05, + "loss": 1.08, + "step": 2890 + }, + { + "epoch": 0.1812652830898489, + "grad_norm": 3.118096351623535, + "learning_rate": 1.8824173720593545e-05, + "loss": 1.1757, + "step": 2891 + }, + { + "epoch": 0.18132798294563923, + "grad_norm": 3.381824254989624, + "learning_rate": 1.8823218130144453e-05, + "loss": 0.9564, + "step": 2892 + }, + { + "epoch": 0.18139068280142956, + "grad_norm": 3.3165335655212402, + "learning_rate": 1.8822262175825463e-05, + "loss": 1.2569, + "step": 2893 + }, + { + "epoch": 0.18145338265721989, + "grad_norm": 3.058852434158325, + "learning_rate": 1.8821305857675997e-05, + "loss": 1.0449, + "step": 2894 + }, + { + "epoch": 0.1815160825130102, + "grad_norm": 3.32616925239563, + "learning_rate": 1.8820349175735488e-05, + "loss": 1.3624, + "step": 2895 + }, + { + "epoch": 0.18157878236880054, + "grad_norm": 3.385446548461914, + "learning_rate": 1.8819392130043396e-05, + "loss": 1.3023, + "step": 2896 + }, + { + "epoch": 0.1816414822245909, + "grad_norm": 2.9729597568511963, + "learning_rate": 1.8818434720639188e-05, + "loss": 1.0958, + "step": 2897 + }, + { + "epoch": 0.18170418208038122, + "grad_norm": 3.076848030090332, + "learning_rate": 1.8817476947562346e-05, + "loss": 1.1605, + "step": 2898 + }, + { + "epoch": 0.18176688193617155, + "grad_norm": 3.0801587104797363, + "learning_rate": 1.881651881085237e-05, + "loss": 1.335, + "step": 2899 + }, + { + "epoch": 0.18182958179196188, + "grad_norm": 2.929202079772949, + "learning_rate": 1.8815560310548778e-05, + "loss": 1.1824, + "step": 2900 + }, + { + "epoch": 0.1818922816477522, + "grad_norm": 2.9286422729492188, + "learning_rate": 1.881460144669109e-05, + "loss": 1.1495, + "step": 2901 + }, + { + "epoch": 0.18195498150354253, + "grad_norm": 3.211233377456665, + "learning_rate": 1.881364221931885e-05, + "loss": 1.0742, + "step": 2902 + }, + { + "epoch": 0.18201768135933288, + "grad_norm": 3.1734187602996826, + "learning_rate": 1.8812682628471624e-05, + "loss": 1.2641, + "step": 2903 + }, + { + "epoch": 0.1820803812151232, + "grad_norm": 2.984389543533325, + "learning_rate": 1.881172267418898e-05, + "loss": 1.1618, + "step": 2904 + }, + { + "epoch": 0.18214308107091354, + "grad_norm": 2.907313585281372, + "learning_rate": 1.881076235651051e-05, + "loss": 1.2251, + "step": 2905 + }, + { + "epoch": 0.18220578092670386, + "grad_norm": 3.13529896736145, + "learning_rate": 1.8809801675475812e-05, + "loss": 1.2843, + "step": 2906 + }, + { + "epoch": 0.1822684807824942, + "grad_norm": 2.82145357131958, + "learning_rate": 1.880884063112451e-05, + "loss": 1.0852, + "step": 2907 + }, + { + "epoch": 0.18233118063828455, + "grad_norm": 3.0501716136932373, + "learning_rate": 1.8807879223496236e-05, + "loss": 1.3375, + "step": 2908 + }, + { + "epoch": 0.18239388049407487, + "grad_norm": 2.959868907928467, + "learning_rate": 1.880691745263064e-05, + "loss": 1.2533, + "step": 2909 + }, + { + "epoch": 0.1824565803498652, + "grad_norm": 3.137331485748291, + "learning_rate": 1.880595531856738e-05, + "loss": 1.2708, + "step": 2910 + }, + { + "epoch": 0.18251928020565553, + "grad_norm": 3.069136619567871, + "learning_rate": 1.8804992821346143e-05, + "loss": 1.0584, + "step": 2911 + }, + { + "epoch": 0.18258198006144585, + "grad_norm": 3.1663005352020264, + "learning_rate": 1.8804029961006615e-05, + "loss": 1.2865, + "step": 2912 + }, + { + "epoch": 0.18264467991723618, + "grad_norm": 3.1188223361968994, + "learning_rate": 1.8803066737588504e-05, + "loss": 1.0756, + "step": 2913 + }, + { + "epoch": 0.18270737977302653, + "grad_norm": 2.9772231578826904, + "learning_rate": 1.8802103151131537e-05, + "loss": 1.0874, + "step": 2914 + }, + { + "epoch": 0.18277007962881686, + "grad_norm": 3.2049570083618164, + "learning_rate": 1.8801139201675457e-05, + "loss": 1.2477, + "step": 2915 + }, + { + "epoch": 0.1828327794846072, + "grad_norm": 3.1263227462768555, + "learning_rate": 1.880017488926001e-05, + "loss": 1.2329, + "step": 2916 + }, + { + "epoch": 0.18289547934039752, + "grad_norm": 3.0901527404785156, + "learning_rate": 1.8799210213924963e-05, + "loss": 1.2097, + "step": 2917 + }, + { + "epoch": 0.18295817919618784, + "grad_norm": 2.9838736057281494, + "learning_rate": 1.8798245175710102e-05, + "loss": 1.0492, + "step": 2918 + }, + { + "epoch": 0.18302087905197817, + "grad_norm": 3.131883382797241, + "learning_rate": 1.879727977465523e-05, + "loss": 1.276, + "step": 2919 + }, + { + "epoch": 0.18308357890776852, + "grad_norm": 3.008759021759033, + "learning_rate": 1.8796314010800154e-05, + "loss": 1.1803, + "step": 2920 + }, + { + "epoch": 0.18314627876355885, + "grad_norm": 2.6786975860595703, + "learning_rate": 1.87953478841847e-05, + "loss": 1.2323, + "step": 2921 + }, + { + "epoch": 0.18320897861934918, + "grad_norm": 2.9841108322143555, + "learning_rate": 1.879438139484872e-05, + "loss": 1.1453, + "step": 2922 + }, + { + "epoch": 0.1832716784751395, + "grad_norm": 3.0182154178619385, + "learning_rate": 1.879341454283207e-05, + "loss": 1.1703, + "step": 2923 + }, + { + "epoch": 0.18333437833092983, + "grad_norm": 2.841233015060425, + "learning_rate": 1.8792447328174613e-05, + "loss": 1.397, + "step": 2924 + }, + { + "epoch": 0.18339707818672016, + "grad_norm": 2.88016414642334, + "learning_rate": 1.879147975091625e-05, + "loss": 1.2233, + "step": 2925 + }, + { + "epoch": 0.1834597780425105, + "grad_norm": 2.7666847705841064, + "learning_rate": 1.8790511811096876e-05, + "loss": 1.1791, + "step": 2926 + }, + { + "epoch": 0.18352247789830084, + "grad_norm": 3.0029330253601074, + "learning_rate": 1.878954350875641e-05, + "loss": 1.005, + "step": 2927 + }, + { + "epoch": 0.18358517775409117, + "grad_norm": 3.010132312774658, + "learning_rate": 1.878857484393479e-05, + "loss": 1.1202, + "step": 2928 + }, + { + "epoch": 0.1836478776098815, + "grad_norm": 2.8456194400787354, + "learning_rate": 1.8787605816671956e-05, + "loss": 1.3028, + "step": 2929 + }, + { + "epoch": 0.18371057746567182, + "grad_norm": 2.8411288261413574, + "learning_rate": 1.8786636427007875e-05, + "loss": 1.2181, + "step": 2930 + }, + { + "epoch": 0.18377327732146215, + "grad_norm": 2.7271149158477783, + "learning_rate": 1.878566667498252e-05, + "loss": 1.2145, + "step": 2931 + }, + { + "epoch": 0.1838359771772525, + "grad_norm": 3.2262661457061768, + "learning_rate": 1.8784696560635894e-05, + "loss": 1.0782, + "step": 2932 + }, + { + "epoch": 0.18389867703304283, + "grad_norm": 2.9411470890045166, + "learning_rate": 1.8783726084007995e-05, + "loss": 1.3072, + "step": 2933 + }, + { + "epoch": 0.18396137688883316, + "grad_norm": 2.8622748851776123, + "learning_rate": 1.878275524513885e-05, + "loss": 1.3141, + "step": 2934 + }, + { + "epoch": 0.18402407674462348, + "grad_norm": 2.9947972297668457, + "learning_rate": 1.8781784044068495e-05, + "loss": 1.0299, + "step": 2935 + }, + { + "epoch": 0.1840867766004138, + "grad_norm": 3.0509440898895264, + "learning_rate": 1.878081248083698e-05, + "loss": 1.1509, + "step": 2936 + }, + { + "epoch": 0.18414947645620416, + "grad_norm": 3.0944955348968506, + "learning_rate": 1.8779840555484375e-05, + "loss": 1.1135, + "step": 2937 + }, + { + "epoch": 0.1842121763119945, + "grad_norm": 3.0690078735351562, + "learning_rate": 1.8778868268050766e-05, + "loss": 1.1624, + "step": 2938 + }, + { + "epoch": 0.18427487616778482, + "grad_norm": 3.1493279933929443, + "learning_rate": 1.8777895618576244e-05, + "loss": 1.2842, + "step": 2939 + }, + { + "epoch": 0.18433757602357514, + "grad_norm": 2.836914539337158, + "learning_rate": 1.8776922607100924e-05, + "loss": 1.2962, + "step": 2940 + }, + { + "epoch": 0.18440027587936547, + "grad_norm": 3.0111351013183594, + "learning_rate": 1.8775949233664933e-05, + "loss": 1.2662, + "step": 2941 + }, + { + "epoch": 0.1844629757351558, + "grad_norm": 2.8850152492523193, + "learning_rate": 1.877497549830841e-05, + "loss": 1.1884, + "step": 2942 + }, + { + "epoch": 0.18452567559094615, + "grad_norm": 2.6440374851226807, + "learning_rate": 1.8774001401071516e-05, + "loss": 1.2397, + "step": 2943 + }, + { + "epoch": 0.18458837544673648, + "grad_norm": 2.769603729248047, + "learning_rate": 1.877302694199442e-05, + "loss": 1.2743, + "step": 2944 + }, + { + "epoch": 0.1846510753025268, + "grad_norm": 3.021319627761841, + "learning_rate": 1.8772052121117314e-05, + "loss": 1.0811, + "step": 2945 + }, + { + "epoch": 0.18471377515831713, + "grad_norm": 3.037628412246704, + "learning_rate": 1.877107693848039e-05, + "loss": 1.2873, + "step": 2946 + }, + { + "epoch": 0.18477647501410746, + "grad_norm": 2.6876840591430664, + "learning_rate": 1.8770101394123872e-05, + "loss": 1.2176, + "step": 2947 + }, + { + "epoch": 0.1848391748698978, + "grad_norm": 2.6496126651763916, + "learning_rate": 1.876912548808799e-05, + "loss": 1.2216, + "step": 2948 + }, + { + "epoch": 0.18490187472568814, + "grad_norm": 2.8271002769470215, + "learning_rate": 1.876814922041299e-05, + "loss": 1.2415, + "step": 2949 + }, + { + "epoch": 0.18496457458147847, + "grad_norm": 2.7897589206695557, + "learning_rate": 1.8767172591139133e-05, + "loss": 1.315, + "step": 2950 + }, + { + "epoch": 0.1850272744372688, + "grad_norm": 2.8026411533355713, + "learning_rate": 1.8766195600306693e-05, + "loss": 1.2364, + "step": 2951 + }, + { + "epoch": 0.18508997429305912, + "grad_norm": 3.045426607131958, + "learning_rate": 1.8765218247955967e-05, + "loss": 1.0751, + "step": 2952 + }, + { + "epoch": 0.18515267414884945, + "grad_norm": 2.8761794567108154, + "learning_rate": 1.8764240534127255e-05, + "loss": 1.1204, + "step": 2953 + }, + { + "epoch": 0.18521537400463978, + "grad_norm": 2.8863790035247803, + "learning_rate": 1.876326245886088e-05, + "loss": 1.0343, + "step": 2954 + }, + { + "epoch": 0.18527807386043013, + "grad_norm": 3.1769230365753174, + "learning_rate": 1.8762284022197184e-05, + "loss": 1.2283, + "step": 2955 + }, + { + "epoch": 0.18534077371622046, + "grad_norm": 3.0386602878570557, + "learning_rate": 1.876130522417651e-05, + "loss": 1.22, + "step": 2956 + }, + { + "epoch": 0.18540347357201079, + "grad_norm": 2.7412989139556885, + "learning_rate": 1.8760326064839222e-05, + "loss": 1.1788, + "step": 2957 + }, + { + "epoch": 0.1854661734278011, + "grad_norm": 2.9935009479522705, + "learning_rate": 1.8759346544225707e-05, + "loss": 1.2423, + "step": 2958 + }, + { + "epoch": 0.18552887328359144, + "grad_norm": 2.7663049697875977, + "learning_rate": 1.875836666237636e-05, + "loss": 1.3284, + "step": 2959 + }, + { + "epoch": 0.18559157313938177, + "grad_norm": 2.797055721282959, + "learning_rate": 1.8757386419331585e-05, + "loss": 1.1597, + "step": 2960 + }, + { + "epoch": 0.18565427299517212, + "grad_norm": 2.7796037197113037, + "learning_rate": 1.8756405815131815e-05, + "loss": 1.1766, + "step": 2961 + }, + { + "epoch": 0.18571697285096245, + "grad_norm": 3.0670273303985596, + "learning_rate": 1.8755424849817486e-05, + "loss": 1.088, + "step": 2962 + }, + { + "epoch": 0.18577967270675277, + "grad_norm": 2.7576539516448975, + "learning_rate": 1.8754443523429057e-05, + "loss": 1.1501, + "step": 2963 + }, + { + "epoch": 0.1858423725625431, + "grad_norm": 2.7345385551452637, + "learning_rate": 1.875346183600699e-05, + "loss": 1.1825, + "step": 2964 + }, + { + "epoch": 0.18590507241833343, + "grad_norm": 2.8564741611480713, + "learning_rate": 1.8752479787591776e-05, + "loss": 1.2313, + "step": 2965 + }, + { + "epoch": 0.18596777227412378, + "grad_norm": 2.796553134918213, + "learning_rate": 1.8751497378223914e-05, + "loss": 1.1924, + "step": 2966 + }, + { + "epoch": 0.1860304721299141, + "grad_norm": 2.978092908859253, + "learning_rate": 1.8750514607943916e-05, + "loss": 1.3213, + "step": 2967 + }, + { + "epoch": 0.18609317198570444, + "grad_norm": 3.3403923511505127, + "learning_rate": 1.8749531476792317e-05, + "loss": 1.0651, + "step": 2968 + }, + { + "epoch": 0.18615587184149476, + "grad_norm": 2.782648801803589, + "learning_rate": 1.8748547984809656e-05, + "loss": 1.2174, + "step": 2969 + }, + { + "epoch": 0.1862185716972851, + "grad_norm": 2.8963236808776855, + "learning_rate": 1.8747564132036493e-05, + "loss": 1.3404, + "step": 2970 + }, + { + "epoch": 0.18628127155307542, + "grad_norm": 2.659069776535034, + "learning_rate": 1.8746579918513404e-05, + "loss": 1.1733, + "step": 2971 + }, + { + "epoch": 0.18634397140886577, + "grad_norm": 2.708893299102783, + "learning_rate": 1.8745595344280975e-05, + "loss": 1.2562, + "step": 2972 + }, + { + "epoch": 0.1864066712646561, + "grad_norm": 3.2402100563049316, + "learning_rate": 1.8744610409379816e-05, + "loss": 0.9747, + "step": 2973 + }, + { + "epoch": 0.18646937112044643, + "grad_norm": 3.0225229263305664, + "learning_rate": 1.8743625113850542e-05, + "loss": 1.0903, + "step": 2974 + }, + { + "epoch": 0.18653207097623675, + "grad_norm": 2.668515682220459, + "learning_rate": 1.8742639457733785e-05, + "loss": 1.0738, + "step": 2975 + }, + { + "epoch": 0.18659477083202708, + "grad_norm": 2.909639358520508, + "learning_rate": 1.874165344107019e-05, + "loss": 1.2975, + "step": 2976 + }, + { + "epoch": 0.1866574706878174, + "grad_norm": 2.8933491706848145, + "learning_rate": 1.874066706390043e-05, + "loss": 1.1315, + "step": 2977 + }, + { + "epoch": 0.18672017054360776, + "grad_norm": 2.696437358856201, + "learning_rate": 1.873968032626518e-05, + "loss": 1.1361, + "step": 2978 + }, + { + "epoch": 0.1867828703993981, + "grad_norm": 3.0405406951904297, + "learning_rate": 1.873869322820513e-05, + "loss": 1.1159, + "step": 2979 + }, + { + "epoch": 0.18684557025518841, + "grad_norm": 2.949875593185425, + "learning_rate": 1.873770576976099e-05, + "loss": 1.2565, + "step": 2980 + }, + { + "epoch": 0.18690827011097874, + "grad_norm": 3.231696844100952, + "learning_rate": 1.8736717950973485e-05, + "loss": 1.2295, + "step": 2981 + }, + { + "epoch": 0.18697096996676907, + "grad_norm": 2.8545026779174805, + "learning_rate": 1.8735729771883348e-05, + "loss": 0.986, + "step": 2982 + }, + { + "epoch": 0.1870336698225594, + "grad_norm": 3.2104480266571045, + "learning_rate": 1.873474123253133e-05, + "loss": 1.3715, + "step": 2983 + }, + { + "epoch": 0.18709636967834975, + "grad_norm": 3.3073813915252686, + "learning_rate": 1.8733752332958208e-05, + "loss": 1.1426, + "step": 2984 + }, + { + "epoch": 0.18715906953414008, + "grad_norm": 3.091254472732544, + "learning_rate": 1.8732763073204755e-05, + "loss": 1.2513, + "step": 2985 + }, + { + "epoch": 0.1872217693899304, + "grad_norm": 3.1182923316955566, + "learning_rate": 1.873177345331177e-05, + "loss": 0.9313, + "step": 2986 + }, + { + "epoch": 0.18728446924572073, + "grad_norm": 3.071592330932617, + "learning_rate": 1.873078347332007e-05, + "loss": 1.2223, + "step": 2987 + }, + { + "epoch": 0.18734716910151106, + "grad_norm": 2.79634952545166, + "learning_rate": 1.872979313327048e-05, + "loss": 1.4128, + "step": 2988 + }, + { + "epoch": 0.1874098689573014, + "grad_norm": 2.9753758907318115, + "learning_rate": 1.8728802433203835e-05, + "loss": 1.1257, + "step": 2989 + }, + { + "epoch": 0.18747256881309174, + "grad_norm": 3.2331268787384033, + "learning_rate": 1.8727811373161e-05, + "loss": 1.251, + "step": 2990 + }, + { + "epoch": 0.18753526866888207, + "grad_norm": 3.0814125537872314, + "learning_rate": 1.872681995318284e-05, + "loss": 1.2654, + "step": 2991 + }, + { + "epoch": 0.1875979685246724, + "grad_norm": 3.025905132293701, + "learning_rate": 1.8725828173310242e-05, + "loss": 1.1523, + "step": 2992 + }, + { + "epoch": 0.18766066838046272, + "grad_norm": 2.8316853046417236, + "learning_rate": 1.8724836033584114e-05, + "loss": 1.1913, + "step": 2993 + }, + { + "epoch": 0.18772336823625305, + "grad_norm": 2.764895439147949, + "learning_rate": 1.872384353404537e-05, + "loss": 1.1539, + "step": 2994 + }, + { + "epoch": 0.1877860680920434, + "grad_norm": 2.913048028945923, + "learning_rate": 1.872285067473493e-05, + "loss": 1.1238, + "step": 2995 + }, + { + "epoch": 0.18784876794783373, + "grad_norm": 2.871539831161499, + "learning_rate": 1.8721857455693748e-05, + "loss": 1.1551, + "step": 2996 + }, + { + "epoch": 0.18791146780362405, + "grad_norm": 2.939009189605713, + "learning_rate": 1.8720863876962786e-05, + "loss": 1.1265, + "step": 2997 + }, + { + "epoch": 0.18797416765941438, + "grad_norm": 2.722538471221924, + "learning_rate": 1.8719869938583018e-05, + "loss": 1.1629, + "step": 2998 + }, + { + "epoch": 0.1880368675152047, + "grad_norm": 2.6758785247802734, + "learning_rate": 1.8718875640595432e-05, + "loss": 1.0871, + "step": 2999 + }, + { + "epoch": 0.18809956737099504, + "grad_norm": 2.994798183441162, + "learning_rate": 1.8717880983041032e-05, + "loss": 1.2424, + "step": 3000 + }, + { + "epoch": 0.18809956737099504, + "eval_loss": 1.1930222511291504, + "eval_runtime": 144.1452, + "eval_samples_per_second": 4.371, + "eval_steps_per_second": 1.096, + "step": 3000 + }, + { + "epoch": 0.1881622672267854, + "grad_norm": 2.854672431945801, + "learning_rate": 1.871688596596084e-05, + "loss": 1.2334, + "step": 3001 + }, + { + "epoch": 0.18822496708257572, + "grad_norm": 3.0170578956604004, + "learning_rate": 1.8715890589395893e-05, + "loss": 1.0738, + "step": 3002 + }, + { + "epoch": 0.18828766693836604, + "grad_norm": 3.0617263317108154, + "learning_rate": 1.8714894853387236e-05, + "loss": 1.1515, + "step": 3003 + }, + { + "epoch": 0.18835036679415637, + "grad_norm": 2.7046799659729004, + "learning_rate": 1.8713898757975935e-05, + "loss": 1.2879, + "step": 3004 + }, + { + "epoch": 0.1884130666499467, + "grad_norm": 2.93300724029541, + "learning_rate": 1.8712902303203067e-05, + "loss": 1.0742, + "step": 3005 + }, + { + "epoch": 0.18847576650573702, + "grad_norm": 2.8415825366973877, + "learning_rate": 1.8711905489109726e-05, + "loss": 1.1546, + "step": 3006 + }, + { + "epoch": 0.18853846636152738, + "grad_norm": 3.024299383163452, + "learning_rate": 1.8710908315737026e-05, + "loss": 1.2117, + "step": 3007 + }, + { + "epoch": 0.1886011662173177, + "grad_norm": 2.8188226222991943, + "learning_rate": 1.8709910783126086e-05, + "loss": 1.1289, + "step": 3008 + }, + { + "epoch": 0.18866386607310803, + "grad_norm": 2.9379477500915527, + "learning_rate": 1.8708912891318045e-05, + "loss": 1.3328, + "step": 3009 + }, + { + "epoch": 0.18872656592889836, + "grad_norm": 2.921104669570923, + "learning_rate": 1.8707914640354054e-05, + "loss": 1.1032, + "step": 3010 + }, + { + "epoch": 0.1887892657846887, + "grad_norm": 2.8659396171569824, + "learning_rate": 1.8706916030275284e-05, + "loss": 1.312, + "step": 3011 + }, + { + "epoch": 0.188851965640479, + "grad_norm": 3.005645751953125, + "learning_rate": 1.8705917061122917e-05, + "loss": 1.1426, + "step": 3012 + }, + { + "epoch": 0.18891466549626937, + "grad_norm": 2.930431365966797, + "learning_rate": 1.8704917732938152e-05, + "loss": 1.1176, + "step": 3013 + }, + { + "epoch": 0.1889773653520597, + "grad_norm": 2.9437079429626465, + "learning_rate": 1.8703918045762197e-05, + "loss": 1.1084, + "step": 3014 + }, + { + "epoch": 0.18904006520785002, + "grad_norm": 2.833941698074341, + "learning_rate": 1.8702917999636283e-05, + "loss": 1.2148, + "step": 3015 + }, + { + "epoch": 0.18910276506364035, + "grad_norm": 3.031247138977051, + "learning_rate": 1.870191759460165e-05, + "loss": 1.1559, + "step": 3016 + }, + { + "epoch": 0.18916546491943068, + "grad_norm": 3.1400787830352783, + "learning_rate": 1.8700916830699556e-05, + "loss": 1.1731, + "step": 3017 + }, + { + "epoch": 0.18922816477522103, + "grad_norm": 2.8750929832458496, + "learning_rate": 1.8699915707971274e-05, + "loss": 1.1208, + "step": 3018 + }, + { + "epoch": 0.18929086463101136, + "grad_norm": 2.7966835498809814, + "learning_rate": 1.8698914226458083e-05, + "loss": 1.1314, + "step": 3019 + }, + { + "epoch": 0.18935356448680168, + "grad_norm": 2.8201889991760254, + "learning_rate": 1.869791238620129e-05, + "loss": 1.3, + "step": 3020 + }, + { + "epoch": 0.189416264342592, + "grad_norm": 3.086127519607544, + "learning_rate": 1.8696910187242214e-05, + "loss": 1.1686, + "step": 3021 + }, + { + "epoch": 0.18947896419838234, + "grad_norm": 2.7834579944610596, + "learning_rate": 1.869590762962218e-05, + "loss": 1.2473, + "step": 3022 + }, + { + "epoch": 0.18954166405417266, + "grad_norm": 2.8086533546447754, + "learning_rate": 1.869490471338254e-05, + "loss": 1.2406, + "step": 3023 + }, + { + "epoch": 0.18960436390996302, + "grad_norm": 2.851789951324463, + "learning_rate": 1.8693901438564644e-05, + "loss": 1.0698, + "step": 3024 + }, + { + "epoch": 0.18966706376575335, + "grad_norm": 2.4854581356048584, + "learning_rate": 1.8692897805209873e-05, + "loss": 1.1576, + "step": 3025 + }, + { + "epoch": 0.18972976362154367, + "grad_norm": 2.8080437183380127, + "learning_rate": 1.869189381335962e-05, + "loss": 1.0713, + "step": 3026 + }, + { + "epoch": 0.189792463477334, + "grad_norm": 2.7358860969543457, + "learning_rate": 1.8690889463055285e-05, + "loss": 1.168, + "step": 3027 + }, + { + "epoch": 0.18985516333312433, + "grad_norm": 3.0789854526519775, + "learning_rate": 1.868988475433829e-05, + "loss": 1.2421, + "step": 3028 + }, + { + "epoch": 0.18991786318891465, + "grad_norm": 2.9445436000823975, + "learning_rate": 1.8688879687250067e-05, + "loss": 1.3691, + "step": 3029 + }, + { + "epoch": 0.189980563044705, + "grad_norm": 2.7637667655944824, + "learning_rate": 1.868787426183207e-05, + "loss": 1.1165, + "step": 3030 + }, + { + "epoch": 0.19004326290049534, + "grad_norm": 3.3925564289093018, + "learning_rate": 1.8686868478125753e-05, + "loss": 1.1073, + "step": 3031 + }, + { + "epoch": 0.19010596275628566, + "grad_norm": 3.3171486854553223, + "learning_rate": 1.8685862336172604e-05, + "loss": 1.2063, + "step": 3032 + }, + { + "epoch": 0.190168662612076, + "grad_norm": 3.3510189056396484, + "learning_rate": 1.868485583601411e-05, + "loss": 1.2297, + "step": 3033 + }, + { + "epoch": 0.19023136246786632, + "grad_norm": 3.5778086185455322, + "learning_rate": 1.8683848977691784e-05, + "loss": 1.3335, + "step": 3034 + }, + { + "epoch": 0.19029406232365664, + "grad_norm": 3.035787582397461, + "learning_rate": 1.868284176124715e-05, + "loss": 0.9887, + "step": 3035 + }, + { + "epoch": 0.190356762179447, + "grad_norm": 2.909966230392456, + "learning_rate": 1.868183418672174e-05, + "loss": 1.2332, + "step": 3036 + }, + { + "epoch": 0.19041946203523732, + "grad_norm": 3.0797691345214844, + "learning_rate": 1.8680826254157108e-05, + "loss": 1.2076, + "step": 3037 + }, + { + "epoch": 0.19048216189102765, + "grad_norm": 2.7962450981140137, + "learning_rate": 1.8679817963594825e-05, + "loss": 1.0261, + "step": 3038 + }, + { + "epoch": 0.19054486174681798, + "grad_norm": 2.8526880741119385, + "learning_rate": 1.8678809315076466e-05, + "loss": 1.2425, + "step": 3039 + }, + { + "epoch": 0.1906075616026083, + "grad_norm": 2.841552972793579, + "learning_rate": 1.8677800308643637e-05, + "loss": 1.06, + "step": 3040 + }, + { + "epoch": 0.19067026145839863, + "grad_norm": 2.791628122329712, + "learning_rate": 1.867679094433794e-05, + "loss": 1.1911, + "step": 3041 + }, + { + "epoch": 0.190732961314189, + "grad_norm": 2.7732911109924316, + "learning_rate": 1.8675781222201008e-05, + "loss": 1.1919, + "step": 3042 + }, + { + "epoch": 0.1907956611699793, + "grad_norm": 2.7192301750183105, + "learning_rate": 1.8674771142274478e-05, + "loss": 1.1283, + "step": 3043 + }, + { + "epoch": 0.19085836102576964, + "grad_norm": 2.732762575149536, + "learning_rate": 1.8673760704600008e-05, + "loss": 0.8935, + "step": 3044 + }, + { + "epoch": 0.19092106088155997, + "grad_norm": 2.923813819885254, + "learning_rate": 1.867274990921927e-05, + "loss": 1.1509, + "step": 3045 + }, + { + "epoch": 0.1909837607373503, + "grad_norm": 3.0378544330596924, + "learning_rate": 1.8671738756173946e-05, + "loss": 1.1236, + "step": 3046 + }, + { + "epoch": 0.19104646059314065, + "grad_norm": 3.2009224891662598, + "learning_rate": 1.867072724550573e-05, + "loss": 1.0197, + "step": 3047 + }, + { + "epoch": 0.19110916044893098, + "grad_norm": 2.670966386795044, + "learning_rate": 1.866971537725635e-05, + "loss": 1.1776, + "step": 3048 + }, + { + "epoch": 0.1911718603047213, + "grad_norm": 3.207517385482788, + "learning_rate": 1.8668703151467534e-05, + "loss": 1.2126, + "step": 3049 + }, + { + "epoch": 0.19123456016051163, + "grad_norm": 3.159608840942383, + "learning_rate": 1.8667690568181014e-05, + "loss": 1.0889, + "step": 3050 + }, + { + "epoch": 0.19129726001630196, + "grad_norm": 2.977445363998413, + "learning_rate": 1.8666677627438557e-05, + "loss": 1.1159, + "step": 3051 + }, + { + "epoch": 0.19135995987209228, + "grad_norm": 2.9462497234344482, + "learning_rate": 1.866566432928194e-05, + "loss": 1.0482, + "step": 3052 + }, + { + "epoch": 0.19142265972788264, + "grad_norm": 2.801382541656494, + "learning_rate": 1.8664650673752945e-05, + "loss": 1.1422, + "step": 3053 + }, + { + "epoch": 0.19148535958367296, + "grad_norm": 3.2901759147644043, + "learning_rate": 1.8663636660893378e-05, + "loss": 1.1579, + "step": 3054 + }, + { + "epoch": 0.1915480594394633, + "grad_norm": 2.9027671813964844, + "learning_rate": 1.8662622290745055e-05, + "loss": 1.0163, + "step": 3055 + }, + { + "epoch": 0.19161075929525362, + "grad_norm": 2.8579764366149902, + "learning_rate": 1.8661607563349813e-05, + "loss": 1.0106, + "step": 3056 + }, + { + "epoch": 0.19167345915104395, + "grad_norm": 3.2831616401672363, + "learning_rate": 1.8660592478749497e-05, + "loss": 1.2217, + "step": 3057 + }, + { + "epoch": 0.19173615900683427, + "grad_norm": 2.944979429244995, + "learning_rate": 1.865957703698597e-05, + "loss": 1.2503, + "step": 3058 + }, + { + "epoch": 0.19179885886262463, + "grad_norm": 2.8454337120056152, + "learning_rate": 1.8658561238101105e-05, + "loss": 1.3187, + "step": 3059 + }, + { + "epoch": 0.19186155871841495, + "grad_norm": 3.037935256958008, + "learning_rate": 1.8657545082136797e-05, + "loss": 1.2433, + "step": 3060 + }, + { + "epoch": 0.19192425857420528, + "grad_norm": 2.7626214027404785, + "learning_rate": 1.8656528569134952e-05, + "loss": 1.3081, + "step": 3061 + }, + { + "epoch": 0.1919869584299956, + "grad_norm": 3.1522061824798584, + "learning_rate": 1.865551169913749e-05, + "loss": 1.1881, + "step": 3062 + }, + { + "epoch": 0.19204965828578593, + "grad_norm": 3.2458736896514893, + "learning_rate": 1.8654494472186352e-05, + "loss": 1.3244, + "step": 3063 + }, + { + "epoch": 0.19211235814157626, + "grad_norm": 3.135164260864258, + "learning_rate": 1.8653476888323477e-05, + "loss": 1.1138, + "step": 3064 + }, + { + "epoch": 0.19217505799736662, + "grad_norm": 2.8523800373077393, + "learning_rate": 1.8652458947590842e-05, + "loss": 1.1855, + "step": 3065 + }, + { + "epoch": 0.19223775785315694, + "grad_norm": 2.837409257888794, + "learning_rate": 1.8651440650030423e-05, + "loss": 1.0751, + "step": 3066 + }, + { + "epoch": 0.19230045770894727, + "grad_norm": 2.9744460582733154, + "learning_rate": 1.8650421995684208e-05, + "loss": 1.0594, + "step": 3067 + }, + { + "epoch": 0.1923631575647376, + "grad_norm": 2.6807289123535156, + "learning_rate": 1.864940298459422e-05, + "loss": 1.2821, + "step": 3068 + }, + { + "epoch": 0.19242585742052792, + "grad_norm": 2.501909017562866, + "learning_rate": 1.864838361680247e-05, + "loss": 1.2283, + "step": 3069 + }, + { + "epoch": 0.19248855727631825, + "grad_norm": 2.7006866931915283, + "learning_rate": 1.8647363892351e-05, + "loss": 1.1742, + "step": 3070 + }, + { + "epoch": 0.1925512571321086, + "grad_norm": 2.8238437175750732, + "learning_rate": 1.864634381128187e-05, + "loss": 1.1256, + "step": 3071 + }, + { + "epoch": 0.19261395698789893, + "grad_norm": 3.2115895748138428, + "learning_rate": 1.864532337363714e-05, + "loss": 1.0442, + "step": 3072 + }, + { + "epoch": 0.19267665684368926, + "grad_norm": 2.7930684089660645, + "learning_rate": 1.86443025794589e-05, + "loss": 1.1652, + "step": 3073 + }, + { + "epoch": 0.19273935669947959, + "grad_norm": 2.7628800868988037, + "learning_rate": 1.8643281428789243e-05, + "loss": 1.1427, + "step": 3074 + }, + { + "epoch": 0.1928020565552699, + "grad_norm": 2.992377996444702, + "learning_rate": 1.8642259921670283e-05, + "loss": 1.2132, + "step": 3075 + }, + { + "epoch": 0.19286475641106027, + "grad_norm": 2.8517558574676514, + "learning_rate": 1.8641238058144147e-05, + "loss": 1.2703, + "step": 3076 + }, + { + "epoch": 0.1929274562668506, + "grad_norm": 2.857041597366333, + "learning_rate": 1.8640215838252972e-05, + "loss": 1.2374, + "step": 3077 + }, + { + "epoch": 0.19299015612264092, + "grad_norm": 2.816401243209839, + "learning_rate": 1.8639193262038923e-05, + "loss": 0.9926, + "step": 3078 + }, + { + "epoch": 0.19305285597843125, + "grad_norm": 3.0934603214263916, + "learning_rate": 1.8638170329544164e-05, + "loss": 1.1655, + "step": 3079 + }, + { + "epoch": 0.19311555583422158, + "grad_norm": 3.188936948776245, + "learning_rate": 1.8637147040810884e-05, + "loss": 1.1101, + "step": 3080 + }, + { + "epoch": 0.1931782556900119, + "grad_norm": 2.844625949859619, + "learning_rate": 1.8636123395881284e-05, + "loss": 1.107, + "step": 3081 + }, + { + "epoch": 0.19324095554580226, + "grad_norm": 3.069951295852661, + "learning_rate": 1.863509939479758e-05, + "loss": 1.0588, + "step": 3082 + }, + { + "epoch": 0.19330365540159258, + "grad_norm": 2.99521803855896, + "learning_rate": 1.8634075037601995e-05, + "loss": 1.059, + "step": 3083 + }, + { + "epoch": 0.1933663552573829, + "grad_norm": 2.821598768234253, + "learning_rate": 1.863305032433678e-05, + "loss": 1.2158, + "step": 3084 + }, + { + "epoch": 0.19342905511317324, + "grad_norm": 3.024358034133911, + "learning_rate": 1.863202525504419e-05, + "loss": 1.1276, + "step": 3085 + }, + { + "epoch": 0.19349175496896356, + "grad_norm": 2.9847376346588135, + "learning_rate": 1.8630999829766505e-05, + "loss": 1.0761, + "step": 3086 + }, + { + "epoch": 0.1935544548247539, + "grad_norm": 3.2863755226135254, + "learning_rate": 1.862997404854601e-05, + "loss": 1.1781, + "step": 3087 + }, + { + "epoch": 0.19361715468054425, + "grad_norm": 2.861799955368042, + "learning_rate": 1.8628947911425008e-05, + "loss": 1.1239, + "step": 3088 + }, + { + "epoch": 0.19367985453633457, + "grad_norm": 3.019453287124634, + "learning_rate": 1.862792141844582e-05, + "loss": 1.1641, + "step": 3089 + }, + { + "epoch": 0.1937425543921249, + "grad_norm": 2.90606689453125, + "learning_rate": 1.862689456965077e-05, + "loss": 1.1776, + "step": 3090 + }, + { + "epoch": 0.19380525424791523, + "grad_norm": 2.619436264038086, + "learning_rate": 1.8625867365082215e-05, + "loss": 1.1659, + "step": 3091 + }, + { + "epoch": 0.19386795410370555, + "grad_norm": 3.191641330718994, + "learning_rate": 1.8624839804782512e-05, + "loss": 1.1076, + "step": 3092 + }, + { + "epoch": 0.19393065395949588, + "grad_norm": 3.1234588623046875, + "learning_rate": 1.862381188879404e-05, + "loss": 1.2824, + "step": 3093 + }, + { + "epoch": 0.19399335381528623, + "grad_norm": 2.6855199337005615, + "learning_rate": 1.862278361715919e-05, + "loss": 1.1209, + "step": 3094 + }, + { + "epoch": 0.19405605367107656, + "grad_norm": 2.6394078731536865, + "learning_rate": 1.8621754989920364e-05, + "loss": 1.3831, + "step": 3095 + }, + { + "epoch": 0.1941187535268669, + "grad_norm": 2.835850477218628, + "learning_rate": 1.8620726007119987e-05, + "loss": 1.1537, + "step": 3096 + }, + { + "epoch": 0.19418145338265722, + "grad_norm": 3.109924793243408, + "learning_rate": 1.8619696668800494e-05, + "loss": 1.2012, + "step": 3097 + }, + { + "epoch": 0.19424415323844754, + "grad_norm": 2.792111396789551, + "learning_rate": 1.861866697500433e-05, + "loss": 1.1729, + "step": 3098 + }, + { + "epoch": 0.1943068530942379, + "grad_norm": 2.7043824195861816, + "learning_rate": 1.8617636925773968e-05, + "loss": 1.26, + "step": 3099 + }, + { + "epoch": 0.19436955295002822, + "grad_norm": 3.0033726692199707, + "learning_rate": 1.861660652115188e-05, + "loss": 1.3504, + "step": 3100 + }, + { + "epoch": 0.19443225280581855, + "grad_norm": 3.3536715507507324, + "learning_rate": 1.8615575761180568e-05, + "loss": 1.0851, + "step": 3101 + }, + { + "epoch": 0.19449495266160888, + "grad_norm": 2.657038688659668, + "learning_rate": 1.861454464590253e-05, + "loss": 1.2569, + "step": 3102 + }, + { + "epoch": 0.1945576525173992, + "grad_norm": 3.0939579010009766, + "learning_rate": 1.86135131753603e-05, + "loss": 1.1685, + "step": 3103 + }, + { + "epoch": 0.19462035237318953, + "grad_norm": 3.053687572479248, + "learning_rate": 1.8612481349596406e-05, + "loss": 1.1405, + "step": 3104 + }, + { + "epoch": 0.19468305222897989, + "grad_norm": 3.0244059562683105, + "learning_rate": 1.8611449168653404e-05, + "loss": 1.075, + "step": 3105 + }, + { + "epoch": 0.1947457520847702, + "grad_norm": 2.8500821590423584, + "learning_rate": 1.8610416632573866e-05, + "loss": 1.1235, + "step": 3106 + }, + { + "epoch": 0.19480845194056054, + "grad_norm": 3.0078940391540527, + "learning_rate": 1.8609383741400365e-05, + "loss": 1.0247, + "step": 3107 + }, + { + "epoch": 0.19487115179635087, + "grad_norm": 2.7713265419006348, + "learning_rate": 1.8608350495175508e-05, + "loss": 1.2216, + "step": 3108 + }, + { + "epoch": 0.1949338516521412, + "grad_norm": 2.6871073246002197, + "learning_rate": 1.8607316893941897e-05, + "loss": 1.16, + "step": 3109 + }, + { + "epoch": 0.19499655150793152, + "grad_norm": 3.045931577682495, + "learning_rate": 1.8606282937742165e-05, + "loss": 0.9956, + "step": 3110 + }, + { + "epoch": 0.19505925136372188, + "grad_norm": 2.9611828327178955, + "learning_rate": 1.8605248626618942e-05, + "loss": 1.1432, + "step": 3111 + }, + { + "epoch": 0.1951219512195122, + "grad_norm": 2.941784143447876, + "learning_rate": 1.8604213960614898e-05, + "loss": 1.3154, + "step": 3112 + }, + { + "epoch": 0.19518465107530253, + "grad_norm": 2.9398653507232666, + "learning_rate": 1.860317893977269e-05, + "loss": 1.1654, + "step": 3113 + }, + { + "epoch": 0.19524735093109286, + "grad_norm": 2.9591236114501953, + "learning_rate": 1.860214356413501e-05, + "loss": 1.1621, + "step": 3114 + }, + { + "epoch": 0.19531005078688318, + "grad_norm": 2.7604663372039795, + "learning_rate": 1.860110783374455e-05, + "loss": 1.261, + "step": 3115 + }, + { + "epoch": 0.1953727506426735, + "grad_norm": 3.3240766525268555, + "learning_rate": 1.860007174864403e-05, + "loss": 1.1005, + "step": 3116 + }, + { + "epoch": 0.19543545049846386, + "grad_norm": 3.3149619102478027, + "learning_rate": 1.8599035308876173e-05, + "loss": 1.0303, + "step": 3117 + }, + { + "epoch": 0.1954981503542542, + "grad_norm": 3.125269889831543, + "learning_rate": 1.8597998514483724e-05, + "loss": 1.1708, + "step": 3118 + }, + { + "epoch": 0.19556085021004452, + "grad_norm": 3.1431310176849365, + "learning_rate": 1.8596961365509447e-05, + "loss": 1.1167, + "step": 3119 + }, + { + "epoch": 0.19562355006583484, + "grad_norm": 3.0624778270721436, + "learning_rate": 1.8595923861996098e-05, + "loss": 1.1439, + "step": 3120 + }, + { + "epoch": 0.19568624992162517, + "grad_norm": 3.1564838886260986, + "learning_rate": 1.859488600398648e-05, + "loss": 0.9745, + "step": 3121 + }, + { + "epoch": 0.1957489497774155, + "grad_norm": 2.6421868801116943, + "learning_rate": 1.859384779152339e-05, + "loss": 1.2175, + "step": 3122 + }, + { + "epoch": 0.19581164963320585, + "grad_norm": 3.336972236633301, + "learning_rate": 1.859280922464964e-05, + "loss": 1.164, + "step": 3123 + }, + { + "epoch": 0.19587434948899618, + "grad_norm": 2.7997567653656006, + "learning_rate": 1.8591770303408057e-05, + "loss": 1.2199, + "step": 3124 + }, + { + "epoch": 0.1959370493447865, + "grad_norm": 3.080458402633667, + "learning_rate": 1.8590731027841498e-05, + "loss": 1.143, + "step": 3125 + }, + { + "epoch": 0.19599974920057683, + "grad_norm": 3.2464709281921387, + "learning_rate": 1.8589691397992814e-05, + "loss": 1.1631, + "step": 3126 + }, + { + "epoch": 0.19606244905636716, + "grad_norm": 2.907796859741211, + "learning_rate": 1.8588651413904875e-05, + "loss": 1.0839, + "step": 3127 + }, + { + "epoch": 0.19612514891215752, + "grad_norm": 3.149677038192749, + "learning_rate": 1.8587611075620586e-05, + "loss": 1.1802, + "step": 3128 + }, + { + "epoch": 0.19618784876794784, + "grad_norm": 2.9420557022094727, + "learning_rate": 1.858657038318284e-05, + "loss": 1.0308, + "step": 3129 + }, + { + "epoch": 0.19625054862373817, + "grad_norm": 3.1188788414001465, + "learning_rate": 1.8585529336634552e-05, + "loss": 1.2153, + "step": 3130 + }, + { + "epoch": 0.1963132484795285, + "grad_norm": 2.7862637042999268, + "learning_rate": 1.8584487936018663e-05, + "loss": 1.0988, + "step": 3131 + }, + { + "epoch": 0.19637594833531882, + "grad_norm": 2.7657384872436523, + "learning_rate": 1.8583446181378114e-05, + "loss": 1.1058, + "step": 3132 + }, + { + "epoch": 0.19643864819110915, + "grad_norm": 2.844907760620117, + "learning_rate": 1.8582404072755872e-05, + "loss": 1.2855, + "step": 3133 + }, + { + "epoch": 0.1965013480468995, + "grad_norm": 2.882035732269287, + "learning_rate": 1.858136161019491e-05, + "loss": 1.2051, + "step": 3134 + }, + { + "epoch": 0.19656404790268983, + "grad_norm": 2.874289035797119, + "learning_rate": 1.8580318793738222e-05, + "loss": 1.1696, + "step": 3135 + }, + { + "epoch": 0.19662674775848016, + "grad_norm": 2.6385555267333984, + "learning_rate": 1.857927562342881e-05, + "loss": 1.2737, + "step": 3136 + }, + { + "epoch": 0.19668944761427049, + "grad_norm": 3.0727500915527344, + "learning_rate": 1.85782320993097e-05, + "loss": 1.1856, + "step": 3137 + }, + { + "epoch": 0.1967521474700608, + "grad_norm": 2.9485185146331787, + "learning_rate": 1.857718822142392e-05, + "loss": 1.1514, + "step": 3138 + }, + { + "epoch": 0.19681484732585114, + "grad_norm": 2.819640874862671, + "learning_rate": 1.8576143989814524e-05, + "loss": 1.0717, + "step": 3139 + }, + { + "epoch": 0.1968775471816415, + "grad_norm": 2.783615827560425, + "learning_rate": 1.8575099404524574e-05, + "loss": 1.1621, + "step": 3140 + }, + { + "epoch": 0.19694024703743182, + "grad_norm": 2.790999412536621, + "learning_rate": 1.857405446559715e-05, + "loss": 1.0657, + "step": 3141 + }, + { + "epoch": 0.19700294689322215, + "grad_norm": 2.741725206375122, + "learning_rate": 1.8573009173075348e-05, + "loss": 1.0227, + "step": 3142 + }, + { + "epoch": 0.19706564674901247, + "grad_norm": 3.0519919395446777, + "learning_rate": 1.8571963527002273e-05, + "loss": 1.0776, + "step": 3143 + }, + { + "epoch": 0.1971283466048028, + "grad_norm": 2.8103766441345215, + "learning_rate": 1.857091752742105e-05, + "loss": 1.1362, + "step": 3144 + }, + { + "epoch": 0.19719104646059313, + "grad_norm": 2.761077404022217, + "learning_rate": 1.856987117437481e-05, + "loss": 1.1932, + "step": 3145 + }, + { + "epoch": 0.19725374631638348, + "grad_norm": 3.348557710647583, + "learning_rate": 1.856882446790671e-05, + "loss": 1.2429, + "step": 3146 + }, + { + "epoch": 0.1973164461721738, + "grad_norm": 2.84415864944458, + "learning_rate": 1.8567777408059914e-05, + "loss": 1.2857, + "step": 3147 + }, + { + "epoch": 0.19737914602796414, + "grad_norm": 3.0770821571350098, + "learning_rate": 1.8566729994877604e-05, + "loss": 1.2513, + "step": 3148 + }, + { + "epoch": 0.19744184588375446, + "grad_norm": 3.184299945831299, + "learning_rate": 1.8565682228402975e-05, + "loss": 1.2563, + "step": 3149 + }, + { + "epoch": 0.1975045457395448, + "grad_norm": 2.7735469341278076, + "learning_rate": 1.8564634108679235e-05, + "loss": 1.2383, + "step": 3150 + }, + { + "epoch": 0.19756724559533512, + "grad_norm": 2.778296709060669, + "learning_rate": 1.8563585635749613e-05, + "loss": 1.15, + "step": 3151 + }, + { + "epoch": 0.19762994545112547, + "grad_norm": 2.9208598136901855, + "learning_rate": 1.8562536809657343e-05, + "loss": 1.3262, + "step": 3152 + }, + { + "epoch": 0.1976926453069158, + "grad_norm": 2.7633209228515625, + "learning_rate": 1.8561487630445684e-05, + "loss": 1.2138, + "step": 3153 + }, + { + "epoch": 0.19775534516270613, + "grad_norm": 2.996898651123047, + "learning_rate": 1.85604380981579e-05, + "loss": 1.1944, + "step": 3154 + }, + { + "epoch": 0.19781804501849645, + "grad_norm": 2.7476329803466797, + "learning_rate": 1.8559388212837277e-05, + "loss": 1.1272, + "step": 3155 + }, + { + "epoch": 0.19788074487428678, + "grad_norm": 2.8795506954193115, + "learning_rate": 1.855833797452711e-05, + "loss": 1.2039, + "step": 3156 + }, + { + "epoch": 0.19794344473007713, + "grad_norm": 3.1929166316986084, + "learning_rate": 1.8557287383270703e-05, + "loss": 1.1811, + "step": 3157 + }, + { + "epoch": 0.19800614458586746, + "grad_norm": 2.608342409133911, + "learning_rate": 1.85562364391114e-05, + "loss": 1.1736, + "step": 3158 + }, + { + "epoch": 0.1980688444416578, + "grad_norm": 2.4947197437286377, + "learning_rate": 1.855518514209253e-05, + "loss": 1.1004, + "step": 3159 + }, + { + "epoch": 0.19813154429744811, + "grad_norm": 2.8265841007232666, + "learning_rate": 1.855413349225745e-05, + "loss": 1.2103, + "step": 3160 + }, + { + "epoch": 0.19819424415323844, + "grad_norm": 2.894364833831787, + "learning_rate": 1.8553081489649528e-05, + "loss": 1.1326, + "step": 3161 + }, + { + "epoch": 0.19825694400902877, + "grad_norm": 2.950350761413574, + "learning_rate": 1.855202913431216e-05, + "loss": 1.0389, + "step": 3162 + }, + { + "epoch": 0.19831964386481912, + "grad_norm": 3.002042055130005, + "learning_rate": 1.855097642628873e-05, + "loss": 1.2007, + "step": 3163 + }, + { + "epoch": 0.19838234372060945, + "grad_norm": 3.0690269470214844, + "learning_rate": 1.8549923365622662e-05, + "loss": 1.2552, + "step": 3164 + }, + { + "epoch": 0.19844504357639978, + "grad_norm": 2.986337661743164, + "learning_rate": 1.854886995235738e-05, + "loss": 1.1518, + "step": 3165 + }, + { + "epoch": 0.1985077434321901, + "grad_norm": 2.8123600482940674, + "learning_rate": 1.854781618653633e-05, + "loss": 1.1414, + "step": 3166 + }, + { + "epoch": 0.19857044328798043, + "grad_norm": 2.7232303619384766, + "learning_rate": 1.8546762068202967e-05, + "loss": 1.1175, + "step": 3167 + }, + { + "epoch": 0.19863314314377076, + "grad_norm": 2.737354278564453, + "learning_rate": 1.854570759740076e-05, + "loss": 1.2235, + "step": 3168 + }, + { + "epoch": 0.1986958429995611, + "grad_norm": 3.055851459503174, + "learning_rate": 1.85446527741732e-05, + "loss": 1.3092, + "step": 3169 + }, + { + "epoch": 0.19875854285535144, + "grad_norm": 3.3401474952697754, + "learning_rate": 1.8543597598563786e-05, + "loss": 1.132, + "step": 3170 + }, + { + "epoch": 0.19882124271114177, + "grad_norm": 2.8156654834747314, + "learning_rate": 1.8542542070616035e-05, + "loss": 1.2588, + "step": 3171 + }, + { + "epoch": 0.1988839425669321, + "grad_norm": 3.0127742290496826, + "learning_rate": 1.8541486190373474e-05, + "loss": 1.0834, + "step": 3172 + }, + { + "epoch": 0.19894664242272242, + "grad_norm": 2.893566608428955, + "learning_rate": 1.8540429957879652e-05, + "loss": 1.1656, + "step": 3173 + }, + { + "epoch": 0.19900934227851275, + "grad_norm": 2.829457998275757, + "learning_rate": 1.8539373373178126e-05, + "loss": 1.1976, + "step": 3174 + }, + { + "epoch": 0.1990720421343031, + "grad_norm": 2.7360286712646484, + "learning_rate": 1.8538316436312467e-05, + "loss": 1.3098, + "step": 3175 + }, + { + "epoch": 0.19913474199009343, + "grad_norm": 2.8139424324035645, + "learning_rate": 1.8537259147326264e-05, + "loss": 1.1913, + "step": 3176 + }, + { + "epoch": 0.19919744184588375, + "grad_norm": 2.891643524169922, + "learning_rate": 1.8536201506263122e-05, + "loss": 0.935, + "step": 3177 + }, + { + "epoch": 0.19926014170167408, + "grad_norm": 2.958548069000244, + "learning_rate": 1.8535143513166657e-05, + "loss": 1.1257, + "step": 3178 + }, + { + "epoch": 0.1993228415574644, + "grad_norm": 2.8165900707244873, + "learning_rate": 1.8534085168080503e-05, + "loss": 1.1424, + "step": 3179 + }, + { + "epoch": 0.19938554141325476, + "grad_norm": 2.5961484909057617, + "learning_rate": 1.8533026471048305e-05, + "loss": 1.3424, + "step": 3180 + }, + { + "epoch": 0.1994482412690451, + "grad_norm": 3.007044553756714, + "learning_rate": 1.853196742211372e-05, + "loss": 1.1184, + "step": 3181 + }, + { + "epoch": 0.19951094112483542, + "grad_norm": 2.8357014656066895, + "learning_rate": 1.8530908021320427e-05, + "loss": 1.1744, + "step": 3182 + }, + { + "epoch": 0.19957364098062574, + "grad_norm": 3.0921154022216797, + "learning_rate": 1.8529848268712114e-05, + "loss": 1.3286, + "step": 3183 + }, + { + "epoch": 0.19963634083641607, + "grad_norm": 2.9033896923065186, + "learning_rate": 1.8528788164332484e-05, + "loss": 1.2171, + "step": 3184 + }, + { + "epoch": 0.1996990406922064, + "grad_norm": 2.9517710208892822, + "learning_rate": 1.852772770822526e-05, + "loss": 1.1032, + "step": 3185 + }, + { + "epoch": 0.19976174054799675, + "grad_norm": 2.5870678424835205, + "learning_rate": 1.8526666900434174e-05, + "loss": 1.0566, + "step": 3186 + }, + { + "epoch": 0.19982444040378708, + "grad_norm": 2.6599173545837402, + "learning_rate": 1.852560574100297e-05, + "loss": 1.1075, + "step": 3187 + }, + { + "epoch": 0.1998871402595774, + "grad_norm": 2.941699743270874, + "learning_rate": 1.8524544229975414e-05, + "loss": 1.124, + "step": 3188 + }, + { + "epoch": 0.19994984011536773, + "grad_norm": 3.392522096633911, + "learning_rate": 1.8523482367395283e-05, + "loss": 1.1704, + "step": 3189 + }, + { + "epoch": 0.20001253997115806, + "grad_norm": 3.1392526626586914, + "learning_rate": 1.8522420153306366e-05, + "loss": 1.1161, + "step": 3190 + }, + { + "epoch": 0.2000752398269484, + "grad_norm": 2.8686203956604004, + "learning_rate": 1.852135758775247e-05, + "loss": 1.0714, + "step": 3191 + }, + { + "epoch": 0.20013793968273874, + "grad_norm": 2.670738458633423, + "learning_rate": 1.8520294670777412e-05, + "loss": 1.1817, + "step": 3192 + }, + { + "epoch": 0.20020063953852907, + "grad_norm": 2.9207589626312256, + "learning_rate": 1.8519231402425033e-05, + "loss": 1.2678, + "step": 3193 + }, + { + "epoch": 0.2002633393943194, + "grad_norm": 2.7612807750701904, + "learning_rate": 1.8518167782739177e-05, + "loss": 1.1293, + "step": 3194 + }, + { + "epoch": 0.20032603925010972, + "grad_norm": 2.90456485748291, + "learning_rate": 1.8517103811763713e-05, + "loss": 1.0931, + "step": 3195 + }, + { + "epoch": 0.20038873910590005, + "grad_norm": 3.1990606784820557, + "learning_rate": 1.8516039489542513e-05, + "loss": 0.8737, + "step": 3196 + }, + { + "epoch": 0.20045143896169038, + "grad_norm": 2.9516429901123047, + "learning_rate": 1.851497481611947e-05, + "loss": 1.1237, + "step": 3197 + }, + { + "epoch": 0.20051413881748073, + "grad_norm": 2.75630784034729, + "learning_rate": 1.8513909791538496e-05, + "loss": 1.158, + "step": 3198 + }, + { + "epoch": 0.20057683867327106, + "grad_norm": 2.966130495071411, + "learning_rate": 1.8512844415843514e-05, + "loss": 1.1744, + "step": 3199 + }, + { + "epoch": 0.20063953852906138, + "grad_norm": 2.4602432250976562, + "learning_rate": 1.851177868907845e-05, + "loss": 1.1877, + "step": 3200 + }, + { + "epoch": 0.2007022383848517, + "grad_norm": 2.6351561546325684, + "learning_rate": 1.8510712611287268e-05, + "loss": 1.1609, + "step": 3201 + }, + { + "epoch": 0.20076493824064204, + "grad_norm": 2.887627363204956, + "learning_rate": 1.8509646182513922e-05, + "loss": 1.2417, + "step": 3202 + }, + { + "epoch": 0.20082763809643236, + "grad_norm": 3.1172077655792236, + "learning_rate": 1.8508579402802398e-05, + "loss": 1.1738, + "step": 3203 + }, + { + "epoch": 0.20089033795222272, + "grad_norm": 2.754431962966919, + "learning_rate": 1.850751227219669e-05, + "loss": 1.1028, + "step": 3204 + }, + { + "epoch": 0.20095303780801305, + "grad_norm": 2.9481987953186035, + "learning_rate": 1.8506444790740798e-05, + "loss": 1.2027, + "step": 3205 + }, + { + "epoch": 0.20101573766380337, + "grad_norm": 3.0520994663238525, + "learning_rate": 1.8505376958478754e-05, + "loss": 1.2832, + "step": 3206 + }, + { + "epoch": 0.2010784375195937, + "grad_norm": 2.6503665447235107, + "learning_rate": 1.8504308775454594e-05, + "loss": 1.3008, + "step": 3207 + }, + { + "epoch": 0.20114113737538403, + "grad_norm": 2.990025281906128, + "learning_rate": 1.8503240241712366e-05, + "loss": 1.2999, + "step": 3208 + }, + { + "epoch": 0.20120383723117438, + "grad_norm": 2.8144404888153076, + "learning_rate": 1.8502171357296144e-05, + "loss": 1.1697, + "step": 3209 + }, + { + "epoch": 0.2012665370869647, + "grad_norm": 2.8032901287078857, + "learning_rate": 1.850110212225e-05, + "loss": 1.2324, + "step": 3210 + }, + { + "epoch": 0.20132923694275504, + "grad_norm": 3.0748727321624756, + "learning_rate": 1.8500032536618032e-05, + "loss": 1.2329, + "step": 3211 + }, + { + "epoch": 0.20139193679854536, + "grad_norm": 2.788668632507324, + "learning_rate": 1.8498962600444356e-05, + "loss": 1.3005, + "step": 3212 + }, + { + "epoch": 0.2014546366543357, + "grad_norm": 2.520127534866333, + "learning_rate": 1.849789231377309e-05, + "loss": 1.2609, + "step": 3213 + }, + { + "epoch": 0.20151733651012602, + "grad_norm": 2.745389938354492, + "learning_rate": 1.849682167664837e-05, + "loss": 1.279, + "step": 3214 + }, + { + "epoch": 0.20158003636591637, + "grad_norm": 2.9558792114257812, + "learning_rate": 1.8495750689114357e-05, + "loss": 1.232, + "step": 3215 + }, + { + "epoch": 0.2016427362217067, + "grad_norm": 3.033529758453369, + "learning_rate": 1.8494679351215212e-05, + "loss": 1.0888, + "step": 3216 + }, + { + "epoch": 0.20170543607749702, + "grad_norm": 2.9365313053131104, + "learning_rate": 1.849360766299512e-05, + "loss": 1.0729, + "step": 3217 + }, + { + "epoch": 0.20176813593328735, + "grad_norm": 2.905231475830078, + "learning_rate": 1.8492535624498278e-05, + "loss": 1.0699, + "step": 3218 + }, + { + "epoch": 0.20183083578907768, + "grad_norm": 2.838350296020508, + "learning_rate": 1.8491463235768896e-05, + "loss": 1.2721, + "step": 3219 + }, + { + "epoch": 0.201893535644868, + "grad_norm": 2.866278648376465, + "learning_rate": 1.8490390496851198e-05, + "loss": 1.1717, + "step": 3220 + }, + { + "epoch": 0.20195623550065836, + "grad_norm": 2.9525973796844482, + "learning_rate": 1.8489317407789426e-05, + "loss": 1.1168, + "step": 3221 + }, + { + "epoch": 0.2020189353564487, + "grad_norm": 2.6558480262756348, + "learning_rate": 1.848824396862783e-05, + "loss": 1.0352, + "step": 3222 + }, + { + "epoch": 0.202081635212239, + "grad_norm": 2.8939616680145264, + "learning_rate": 1.8487170179410688e-05, + "loss": 1.1437, + "step": 3223 + }, + { + "epoch": 0.20214433506802934, + "grad_norm": 2.8431015014648438, + "learning_rate": 1.8486096040182276e-05, + "loss": 1.1762, + "step": 3224 + }, + { + "epoch": 0.20220703492381967, + "grad_norm": 2.6665303707122803, + "learning_rate": 1.848502155098689e-05, + "loss": 1.2478, + "step": 3225 + }, + { + "epoch": 0.20226973477961, + "grad_norm": 3.0406715869903564, + "learning_rate": 1.8483946711868847e-05, + "loss": 1.1847, + "step": 3226 + }, + { + "epoch": 0.20233243463540035, + "grad_norm": 2.8254196643829346, + "learning_rate": 1.848287152287247e-05, + "loss": 1.2961, + "step": 3227 + }, + { + "epoch": 0.20239513449119068, + "grad_norm": 2.830364942550659, + "learning_rate": 1.84817959840421e-05, + "loss": 1.168, + "step": 3228 + }, + { + "epoch": 0.202457834346981, + "grad_norm": 2.8466591835021973, + "learning_rate": 1.8480720095422096e-05, + "loss": 1.2259, + "step": 3229 + }, + { + "epoch": 0.20252053420277133, + "grad_norm": 2.9750845432281494, + "learning_rate": 1.8479643857056823e-05, + "loss": 1.208, + "step": 3230 + }, + { + "epoch": 0.20258323405856166, + "grad_norm": 2.8709850311279297, + "learning_rate": 1.8478567268990667e-05, + "loss": 1.2041, + "step": 3231 + }, + { + "epoch": 0.20264593391435198, + "grad_norm": 2.7509043216705322, + "learning_rate": 1.8477490331268026e-05, + "loss": 1.134, + "step": 3232 + }, + { + "epoch": 0.20270863377014234, + "grad_norm": 3.2113120555877686, + "learning_rate": 1.8476413043933316e-05, + "loss": 1.1069, + "step": 3233 + }, + { + "epoch": 0.20277133362593266, + "grad_norm": 3.077852964401245, + "learning_rate": 1.847533540703096e-05, + "loss": 1.2241, + "step": 3234 + }, + { + "epoch": 0.202834033481723, + "grad_norm": 2.6640708446502686, + "learning_rate": 1.84742574206054e-05, + "loss": 1.2948, + "step": 3235 + }, + { + "epoch": 0.20289673333751332, + "grad_norm": 2.8562936782836914, + "learning_rate": 1.84731790847011e-05, + "loss": 1.0406, + "step": 3236 + }, + { + "epoch": 0.20295943319330365, + "grad_norm": 2.702653169631958, + "learning_rate": 1.8472100399362518e-05, + "loss": 1.2752, + "step": 3237 + }, + { + "epoch": 0.203022133049094, + "grad_norm": 2.8220584392547607, + "learning_rate": 1.847102136463415e-05, + "loss": 1.1813, + "step": 3238 + }, + { + "epoch": 0.20308483290488433, + "grad_norm": 2.7980966567993164, + "learning_rate": 1.8469941980560488e-05, + "loss": 1.2306, + "step": 3239 + }, + { + "epoch": 0.20314753276067465, + "grad_norm": 2.8354859352111816, + "learning_rate": 1.846886224718605e-05, + "loss": 1.0719, + "step": 3240 + }, + { + "epoch": 0.20321023261646498, + "grad_norm": 2.836912155151367, + "learning_rate": 1.846778216455536e-05, + "loss": 1.2627, + "step": 3241 + }, + { + "epoch": 0.2032729324722553, + "grad_norm": 3.0318105220794678, + "learning_rate": 1.8466701732712966e-05, + "loss": 1.2239, + "step": 3242 + }, + { + "epoch": 0.20333563232804563, + "grad_norm": 3.0469143390655518, + "learning_rate": 1.8465620951703424e-05, + "loss": 1.1359, + "step": 3243 + }, + { + "epoch": 0.203398332183836, + "grad_norm": 3.16339111328125, + "learning_rate": 1.8464539821571302e-05, + "loss": 1.1603, + "step": 3244 + }, + { + "epoch": 0.20346103203962632, + "grad_norm": 2.935377836227417, + "learning_rate": 1.8463458342361192e-05, + "loss": 1.3371, + "step": 3245 + }, + { + "epoch": 0.20352373189541664, + "grad_norm": 3.0769147872924805, + "learning_rate": 1.846237651411769e-05, + "loss": 1.1884, + "step": 3246 + }, + { + "epoch": 0.20358643175120697, + "grad_norm": 2.9547476768493652, + "learning_rate": 1.8461294336885406e-05, + "loss": 1.267, + "step": 3247 + }, + { + "epoch": 0.2036491316069973, + "grad_norm": 2.730314254760742, + "learning_rate": 1.846021181070898e-05, + "loss": 1.3054, + "step": 3248 + }, + { + "epoch": 0.20371183146278762, + "grad_norm": 2.862966775894165, + "learning_rate": 1.8459128935633045e-05, + "loss": 1.2721, + "step": 3249 + }, + { + "epoch": 0.20377453131857798, + "grad_norm": 2.973092794418335, + "learning_rate": 1.8458045711702264e-05, + "loss": 1.2692, + "step": 3250 + }, + { + "epoch": 0.2038372311743683, + "grad_norm": 2.9187068939208984, + "learning_rate": 1.845696213896131e-05, + "loss": 1.1267, + "step": 3251 + }, + { + "epoch": 0.20389993103015863, + "grad_norm": 2.6497445106506348, + "learning_rate": 1.845587821745487e-05, + "loss": 1.192, + "step": 3252 + }, + { + "epoch": 0.20396263088594896, + "grad_norm": 3.2867112159729004, + "learning_rate": 1.8454793947227637e-05, + "loss": 1.1579, + "step": 3253 + }, + { + "epoch": 0.20402533074173929, + "grad_norm": 2.8549840450286865, + "learning_rate": 1.8453709328324337e-05, + "loss": 1.2266, + "step": 3254 + }, + { + "epoch": 0.2040880305975296, + "grad_norm": 2.8376734256744385, + "learning_rate": 1.8452624360789695e-05, + "loss": 1.1817, + "step": 3255 + }, + { + "epoch": 0.20415073045331997, + "grad_norm": 2.743846893310547, + "learning_rate": 1.8451539044668453e-05, + "loss": 1.2818, + "step": 3256 + }, + { + "epoch": 0.2042134303091103, + "grad_norm": 2.7995846271514893, + "learning_rate": 1.8450453380005374e-05, + "loss": 1.1685, + "step": 3257 + }, + { + "epoch": 0.20427613016490062, + "grad_norm": 3.044318437576294, + "learning_rate": 1.844936736684523e-05, + "loss": 1.12, + "step": 3258 + }, + { + "epoch": 0.20433883002069095, + "grad_norm": 3.401219129562378, + "learning_rate": 1.8448281005232802e-05, + "loss": 1.1455, + "step": 3259 + }, + { + "epoch": 0.20440152987648127, + "grad_norm": 3.1590166091918945, + "learning_rate": 1.84471942952129e-05, + "loss": 1.1221, + "step": 3260 + }, + { + "epoch": 0.20446422973227163, + "grad_norm": 2.455139398574829, + "learning_rate": 1.8446107236830336e-05, + "loss": 1.0886, + "step": 3261 + }, + { + "epoch": 0.20452692958806196, + "grad_norm": 3.428532600402832, + "learning_rate": 1.8445019830129936e-05, + "loss": 1.34, + "step": 3262 + }, + { + "epoch": 0.20458962944385228, + "grad_norm": 3.1055638790130615, + "learning_rate": 1.8443932075156555e-05, + "loss": 1.1409, + "step": 3263 + }, + { + "epoch": 0.2046523292996426, + "grad_norm": 2.925560474395752, + "learning_rate": 1.8442843971955043e-05, + "loss": 1.3562, + "step": 3264 + }, + { + "epoch": 0.20471502915543294, + "grad_norm": 3.157670736312866, + "learning_rate": 1.844175552057028e-05, + "loss": 1.0439, + "step": 3265 + }, + { + "epoch": 0.20477772901122326, + "grad_norm": 2.985049247741699, + "learning_rate": 1.844066672104715e-05, + "loss": 1.1087, + "step": 3266 + }, + { + "epoch": 0.20484042886701362, + "grad_norm": 2.916884660720825, + "learning_rate": 1.8439577573430557e-05, + "loss": 1.1441, + "step": 3267 + }, + { + "epoch": 0.20490312872280395, + "grad_norm": 2.591196298599243, + "learning_rate": 1.8438488077765414e-05, + "loss": 1.2958, + "step": 3268 + }, + { + "epoch": 0.20496582857859427, + "grad_norm": 2.626770257949829, + "learning_rate": 1.8437398234096652e-05, + "loss": 1.0953, + "step": 3269 + }, + { + "epoch": 0.2050285284343846, + "grad_norm": 3.015699625015259, + "learning_rate": 1.8436308042469223e-05, + "loss": 1.1077, + "step": 3270 + }, + { + "epoch": 0.20509122829017493, + "grad_norm": 2.7453181743621826, + "learning_rate": 1.843521750292808e-05, + "loss": 1.2511, + "step": 3271 + }, + { + "epoch": 0.20515392814596525, + "grad_norm": 3.1875627040863037, + "learning_rate": 1.8434126615518197e-05, + "loss": 1.2724, + "step": 3272 + }, + { + "epoch": 0.2052166280017556, + "grad_norm": 2.9198741912841797, + "learning_rate": 1.8433035380284566e-05, + "loss": 1.2961, + "step": 3273 + }, + { + "epoch": 0.20527932785754593, + "grad_norm": 2.831845283508301, + "learning_rate": 1.8431943797272187e-05, + "loss": 1.1032, + "step": 3274 + }, + { + "epoch": 0.20534202771333626, + "grad_norm": 3.0932459831237793, + "learning_rate": 1.8430851866526078e-05, + "loss": 1.1472, + "step": 3275 + }, + { + "epoch": 0.2054047275691266, + "grad_norm": 2.9985692501068115, + "learning_rate": 1.842975958809127e-05, + "loss": 1.0788, + "step": 3276 + }, + { + "epoch": 0.20546742742491692, + "grad_norm": 2.747267246246338, + "learning_rate": 1.842866696201281e-05, + "loss": 1.0881, + "step": 3277 + }, + { + "epoch": 0.20553012728070724, + "grad_norm": 2.648942708969116, + "learning_rate": 1.8427573988335755e-05, + "loss": 1.0825, + "step": 3278 + }, + { + "epoch": 0.2055928271364976, + "grad_norm": 3.0141406059265137, + "learning_rate": 1.8426480667105178e-05, + "loss": 1.1453, + "step": 3279 + }, + { + "epoch": 0.20565552699228792, + "grad_norm": 3.147162914276123, + "learning_rate": 1.8425386998366174e-05, + "loss": 1.1449, + "step": 3280 + }, + { + "epoch": 0.20571822684807825, + "grad_norm": 3.0557703971862793, + "learning_rate": 1.842429298216384e-05, + "loss": 1.0271, + "step": 3281 + }, + { + "epoch": 0.20578092670386858, + "grad_norm": 2.9002037048339844, + "learning_rate": 1.8423198618543297e-05, + "loss": 1.1407, + "step": 3282 + }, + { + "epoch": 0.2058436265596589, + "grad_norm": 3.2952804565429688, + "learning_rate": 1.8422103907549673e-05, + "loss": 1.0696, + "step": 3283 + }, + { + "epoch": 0.20590632641544923, + "grad_norm": 3.278536319732666, + "learning_rate": 1.842100884922812e-05, + "loss": 1.1577, + "step": 3284 + }, + { + "epoch": 0.20596902627123959, + "grad_norm": 3.0794565677642822, + "learning_rate": 1.841991344362379e-05, + "loss": 1.0214, + "step": 3285 + }, + { + "epoch": 0.2060317261270299, + "grad_norm": 2.8321373462677, + "learning_rate": 1.8418817690781864e-05, + "loss": 0.9219, + "step": 3286 + }, + { + "epoch": 0.20609442598282024, + "grad_norm": 2.790985584259033, + "learning_rate": 1.841772159074753e-05, + "loss": 1.2964, + "step": 3287 + }, + { + "epoch": 0.20615712583861057, + "grad_norm": 3.2283308506011963, + "learning_rate": 1.8416625143565987e-05, + "loss": 1.1439, + "step": 3288 + }, + { + "epoch": 0.2062198256944009, + "grad_norm": 2.8601770401000977, + "learning_rate": 1.841552834928246e-05, + "loss": 1.3017, + "step": 3289 + }, + { + "epoch": 0.20628252555019125, + "grad_norm": 2.688094139099121, + "learning_rate": 1.8414431207942173e-05, + "loss": 1.1998, + "step": 3290 + }, + { + "epoch": 0.20634522540598157, + "grad_norm": 3.1502959728240967, + "learning_rate": 1.8413333719590375e-05, + "loss": 1.2553, + "step": 3291 + }, + { + "epoch": 0.2064079252617719, + "grad_norm": 2.867236614227295, + "learning_rate": 1.8412235884272326e-05, + "loss": 1.2534, + "step": 3292 + }, + { + "epoch": 0.20647062511756223, + "grad_norm": 2.753887176513672, + "learning_rate": 1.8411137702033306e-05, + "loss": 0.8706, + "step": 3293 + }, + { + "epoch": 0.20653332497335256, + "grad_norm": 3.0373077392578125, + "learning_rate": 1.8410039172918596e-05, + "loss": 0.894, + "step": 3294 + }, + { + "epoch": 0.20659602482914288, + "grad_norm": 3.29843807220459, + "learning_rate": 1.8408940296973504e-05, + "loss": 1.1113, + "step": 3295 + }, + { + "epoch": 0.20665872468493324, + "grad_norm": 3.0976829528808594, + "learning_rate": 1.840784107424335e-05, + "loss": 1.1262, + "step": 3296 + }, + { + "epoch": 0.20672142454072356, + "grad_norm": 3.090204954147339, + "learning_rate": 1.8406741504773462e-05, + "loss": 1.3696, + "step": 3297 + }, + { + "epoch": 0.2067841243965139, + "grad_norm": 2.8688735961914062, + "learning_rate": 1.8405641588609183e-05, + "loss": 1.1679, + "step": 3298 + }, + { + "epoch": 0.20684682425230422, + "grad_norm": 3.1474387645721436, + "learning_rate": 1.840454132579588e-05, + "loss": 1.3644, + "step": 3299 + }, + { + "epoch": 0.20690952410809454, + "grad_norm": 2.8033244609832764, + "learning_rate": 1.840344071637893e-05, + "loss": 1.1905, + "step": 3300 + }, + { + "epoch": 0.20697222396388487, + "grad_norm": 3.06135630607605, + "learning_rate": 1.8402339760403715e-05, + "loss": 1.1676, + "step": 3301 + }, + { + "epoch": 0.20703492381967523, + "grad_norm": 2.767702102661133, + "learning_rate": 1.8401238457915637e-05, + "loss": 1.2322, + "step": 3302 + }, + { + "epoch": 0.20709762367546555, + "grad_norm": 2.797138214111328, + "learning_rate": 1.8400136808960126e-05, + "loss": 1.2011, + "step": 3303 + }, + { + "epoch": 0.20716032353125588, + "grad_norm": 2.9008193016052246, + "learning_rate": 1.83990348135826e-05, + "loss": 1.1469, + "step": 3304 + }, + { + "epoch": 0.2072230233870462, + "grad_norm": 2.968782424926758, + "learning_rate": 1.8397932471828515e-05, + "loss": 1.1678, + "step": 3305 + }, + { + "epoch": 0.20728572324283653, + "grad_norm": 2.893033504486084, + "learning_rate": 1.8396829783743326e-05, + "loss": 1.2029, + "step": 3306 + }, + { + "epoch": 0.20734842309862686, + "grad_norm": 3.220829486846924, + "learning_rate": 1.839572674937251e-05, + "loss": 1.41, + "step": 3307 + }, + { + "epoch": 0.20741112295441722, + "grad_norm": 2.8930346965789795, + "learning_rate": 1.8394623368761558e-05, + "loss": 1.2391, + "step": 3308 + }, + { + "epoch": 0.20747382281020754, + "grad_norm": 2.8293607234954834, + "learning_rate": 1.839351964195597e-05, + "loss": 1.0924, + "step": 3309 + }, + { + "epoch": 0.20753652266599787, + "grad_norm": 3.2538483142852783, + "learning_rate": 1.8392415569001265e-05, + "loss": 1.0183, + "step": 3310 + }, + { + "epoch": 0.2075992225217882, + "grad_norm": 2.973764181137085, + "learning_rate": 1.8391311149942976e-05, + "loss": 1.3091, + "step": 3311 + }, + { + "epoch": 0.20766192237757852, + "grad_norm": 2.7031450271606445, + "learning_rate": 1.839020638482665e-05, + "loss": 1.2177, + "step": 3312 + }, + { + "epoch": 0.20772462223336885, + "grad_norm": 2.789156913757324, + "learning_rate": 1.8389101273697846e-05, + "loss": 1.1883, + "step": 3313 + }, + { + "epoch": 0.2077873220891592, + "grad_norm": 3.144552707672119, + "learning_rate": 1.8387995816602137e-05, + "loss": 1.2071, + "step": 3314 + }, + { + "epoch": 0.20785002194494953, + "grad_norm": 2.675139904022217, + "learning_rate": 1.8386890013585115e-05, + "loss": 1.2049, + "step": 3315 + }, + { + "epoch": 0.20791272180073986, + "grad_norm": 3.1977996826171875, + "learning_rate": 1.8385783864692384e-05, + "loss": 1.1052, + "step": 3316 + }, + { + "epoch": 0.20797542165653018, + "grad_norm": 2.8366870880126953, + "learning_rate": 1.8384677369969562e-05, + "loss": 1.2663, + "step": 3317 + }, + { + "epoch": 0.2080381215123205, + "grad_norm": 2.7766873836517334, + "learning_rate": 1.8383570529462273e-05, + "loss": 1.2469, + "step": 3318 + }, + { + "epoch": 0.20810082136811087, + "grad_norm": 2.71216082572937, + "learning_rate": 1.8382463343216176e-05, + "loss": 1.1331, + "step": 3319 + }, + { + "epoch": 0.2081635212239012, + "grad_norm": 2.6050527095794678, + "learning_rate": 1.838135581127692e-05, + "loss": 1.1266, + "step": 3320 + }, + { + "epoch": 0.20822622107969152, + "grad_norm": 3.347731590270996, + "learning_rate": 1.8380247933690184e-05, + "loss": 1.0376, + "step": 3321 + }, + { + "epoch": 0.20828892093548185, + "grad_norm": 3.0080878734588623, + "learning_rate": 1.837913971050166e-05, + "loss": 1.0194, + "step": 3322 + }, + { + "epoch": 0.20835162079127217, + "grad_norm": 3.020660400390625, + "learning_rate": 1.8378031141757046e-05, + "loss": 1.2334, + "step": 3323 + }, + { + "epoch": 0.2084143206470625, + "grad_norm": 2.835597038269043, + "learning_rate": 1.8376922227502066e-05, + "loss": 1.1004, + "step": 3324 + }, + { + "epoch": 0.20847702050285286, + "grad_norm": 2.9397575855255127, + "learning_rate": 1.8375812967782445e-05, + "loss": 1.2213, + "step": 3325 + }, + { + "epoch": 0.20853972035864318, + "grad_norm": 2.9058728218078613, + "learning_rate": 1.837470336264393e-05, + "loss": 1.2472, + "step": 3326 + }, + { + "epoch": 0.2086024202144335, + "grad_norm": 2.89353346824646, + "learning_rate": 1.8373593412132284e-05, + "loss": 1.06, + "step": 3327 + }, + { + "epoch": 0.20866512007022384, + "grad_norm": 2.8814172744750977, + "learning_rate": 1.837248311629328e-05, + "loss": 1.1158, + "step": 3328 + }, + { + "epoch": 0.20872781992601416, + "grad_norm": 3.4110779762268066, + "learning_rate": 1.8371372475172705e-05, + "loss": 1.435, + "step": 3329 + }, + { + "epoch": 0.2087905197818045, + "grad_norm": 2.866387367248535, + "learning_rate": 1.8370261488816367e-05, + "loss": 1.2649, + "step": 3330 + }, + { + "epoch": 0.20885321963759484, + "grad_norm": 2.839970111846924, + "learning_rate": 1.8369150157270078e-05, + "loss": 1.1512, + "step": 3331 + }, + { + "epoch": 0.20891591949338517, + "grad_norm": 2.8117055892944336, + "learning_rate": 1.8368038480579672e-05, + "loss": 1.2069, + "step": 3332 + }, + { + "epoch": 0.2089786193491755, + "grad_norm": 3.0721535682678223, + "learning_rate": 1.836692645879099e-05, + "loss": 1.2558, + "step": 3333 + }, + { + "epoch": 0.20904131920496583, + "grad_norm": 3.0389404296875, + "learning_rate": 1.83658140919499e-05, + "loss": 1.3915, + "step": 3334 + }, + { + "epoch": 0.20910401906075615, + "grad_norm": 3.1382808685302734, + "learning_rate": 1.8364701380102267e-05, + "loss": 1.1553, + "step": 3335 + }, + { + "epoch": 0.20916671891654648, + "grad_norm": 2.75372052192688, + "learning_rate": 1.836358832329399e-05, + "loss": 1.2111, + "step": 3336 + }, + { + "epoch": 0.20922941877233683, + "grad_norm": 3.0850601196289062, + "learning_rate": 1.8362474921570957e-05, + "loss": 1.2302, + "step": 3337 + }, + { + "epoch": 0.20929211862812716, + "grad_norm": 2.5467326641082764, + "learning_rate": 1.83613611749791e-05, + "loss": 1.2738, + "step": 3338 + }, + { + "epoch": 0.2093548184839175, + "grad_norm": 2.854994058609009, + "learning_rate": 1.8360247083564343e-05, + "loss": 1.2331, + "step": 3339 + }, + { + "epoch": 0.20941751833970781, + "grad_norm": 3.0319108963012695, + "learning_rate": 1.8359132647372627e-05, + "loss": 1.2325, + "step": 3340 + }, + { + "epoch": 0.20948021819549814, + "grad_norm": 3.014400005340576, + "learning_rate": 1.8358017866449916e-05, + "loss": 1.1203, + "step": 3341 + }, + { + "epoch": 0.2095429180512885, + "grad_norm": 2.788797378540039, + "learning_rate": 1.8356902740842188e-05, + "loss": 1.3155, + "step": 3342 + }, + { + "epoch": 0.20960561790707882, + "grad_norm": 3.1582765579223633, + "learning_rate": 1.835578727059542e-05, + "loss": 1.0537, + "step": 3343 + }, + { + "epoch": 0.20966831776286915, + "grad_norm": 2.9010565280914307, + "learning_rate": 1.8354671455755625e-05, + "loss": 1.1544, + "step": 3344 + }, + { + "epoch": 0.20973101761865948, + "grad_norm": 2.790919065475464, + "learning_rate": 1.835355529636881e-05, + "loss": 1.0196, + "step": 3345 + }, + { + "epoch": 0.2097937174744498, + "grad_norm": 2.8328936100006104, + "learning_rate": 1.835243879248101e-05, + "loss": 1.2184, + "step": 3346 + }, + { + "epoch": 0.20985641733024013, + "grad_norm": 2.7366716861724854, + "learning_rate": 1.835132194413827e-05, + "loss": 1.0858, + "step": 3347 + }, + { + "epoch": 0.20991911718603048, + "grad_norm": 3.0383718013763428, + "learning_rate": 1.835020475138665e-05, + "loss": 1.1542, + "step": 3348 + }, + { + "epoch": 0.2099818170418208, + "grad_norm": 2.990800142288208, + "learning_rate": 1.8349087214272222e-05, + "loss": 1.2234, + "step": 3349 + }, + { + "epoch": 0.21004451689761114, + "grad_norm": 2.817681074142456, + "learning_rate": 1.8347969332841075e-05, + "loss": 1.2367, + "step": 3350 + }, + { + "epoch": 0.21010721675340147, + "grad_norm": 3.2169129848480225, + "learning_rate": 1.83468511071393e-05, + "loss": 1.0032, + "step": 3351 + }, + { + "epoch": 0.2101699166091918, + "grad_norm": 2.89988112449646, + "learning_rate": 1.834573253721303e-05, + "loss": 1.2215, + "step": 3352 + }, + { + "epoch": 0.21023261646498212, + "grad_norm": 2.789646625518799, + "learning_rate": 1.834461362310838e-05, + "loss": 1.1022, + "step": 3353 + }, + { + "epoch": 0.21029531632077247, + "grad_norm": 2.9173834323883057, + "learning_rate": 1.8343494364871502e-05, + "loss": 0.9497, + "step": 3354 + }, + { + "epoch": 0.2103580161765628, + "grad_norm": 2.9185636043548584, + "learning_rate": 1.834237476254855e-05, + "loss": 1.3113, + "step": 3355 + }, + { + "epoch": 0.21042071603235313, + "grad_norm": 3.0377914905548096, + "learning_rate": 1.83412548161857e-05, + "loss": 1.075, + "step": 3356 + }, + { + "epoch": 0.21048341588814345, + "grad_norm": 3.0801279544830322, + "learning_rate": 1.8340134525829137e-05, + "loss": 1.3157, + "step": 3357 + }, + { + "epoch": 0.21054611574393378, + "grad_norm": 2.8521766662597656, + "learning_rate": 1.8339013891525058e-05, + "loss": 1.2423, + "step": 3358 + }, + { + "epoch": 0.2106088155997241, + "grad_norm": 2.9109556674957275, + "learning_rate": 1.8337892913319687e-05, + "loss": 1.2456, + "step": 3359 + }, + { + "epoch": 0.21067151545551446, + "grad_norm": 2.745912790298462, + "learning_rate": 1.8336771591259247e-05, + "loss": 1.1066, + "step": 3360 + }, + { + "epoch": 0.2107342153113048, + "grad_norm": 2.9738683700561523, + "learning_rate": 1.833564992538998e-05, + "loss": 1.1424, + "step": 3361 + }, + { + "epoch": 0.21079691516709512, + "grad_norm": 3.4681413173675537, + "learning_rate": 1.833452791575815e-05, + "loss": 1.0713, + "step": 3362 + }, + { + "epoch": 0.21085961502288544, + "grad_norm": 2.896726369857788, + "learning_rate": 1.8333405562410026e-05, + "loss": 1.2252, + "step": 3363 + }, + { + "epoch": 0.21092231487867577, + "grad_norm": 3.184450626373291, + "learning_rate": 1.833228286539189e-05, + "loss": 1.1192, + "step": 3364 + }, + { + "epoch": 0.2109850147344661, + "grad_norm": 2.9204089641571045, + "learning_rate": 1.833115982475004e-05, + "loss": 1.1881, + "step": 3365 + }, + { + "epoch": 0.21104771459025645, + "grad_norm": 3.4206230640411377, + "learning_rate": 1.83300364405308e-05, + "loss": 0.9678, + "step": 3366 + }, + { + "epoch": 0.21111041444604678, + "grad_norm": 3.2120916843414307, + "learning_rate": 1.8328912712780495e-05, + "loss": 1.2117, + "step": 3367 + }, + { + "epoch": 0.2111731143018371, + "grad_norm": 3.1224069595336914, + "learning_rate": 1.8327788641545463e-05, + "loss": 1.1418, + "step": 3368 + }, + { + "epoch": 0.21123581415762743, + "grad_norm": 3.035413980484009, + "learning_rate": 1.8326664226872063e-05, + "loss": 1.075, + "step": 3369 + }, + { + "epoch": 0.21129851401341776, + "grad_norm": 3.0682733058929443, + "learning_rate": 1.832553946880667e-05, + "loss": 1.1565, + "step": 3370 + }, + { + "epoch": 0.21136121386920811, + "grad_norm": 3.0132558345794678, + "learning_rate": 1.8324414367395664e-05, + "loss": 1.1951, + "step": 3371 + }, + { + "epoch": 0.21142391372499844, + "grad_norm": 3.2547852993011475, + "learning_rate": 1.8323288922685446e-05, + "loss": 1.0672, + "step": 3372 + }, + { + "epoch": 0.21148661358078877, + "grad_norm": 3.1034250259399414, + "learning_rate": 1.832216313472243e-05, + "loss": 1.109, + "step": 3373 + }, + { + "epoch": 0.2115493134365791, + "grad_norm": 3.350806713104248, + "learning_rate": 1.8321037003553043e-05, + "loss": 1.2431, + "step": 3374 + }, + { + "epoch": 0.21161201329236942, + "grad_norm": 3.187114715576172, + "learning_rate": 1.8319910529223727e-05, + "loss": 0.8997, + "step": 3375 + }, + { + "epoch": 0.21167471314815975, + "grad_norm": 3.2070999145507812, + "learning_rate": 1.8318783711780934e-05, + "loss": 1.1908, + "step": 3376 + }, + { + "epoch": 0.2117374130039501, + "grad_norm": 3.038073778152466, + "learning_rate": 1.8317656551271138e-05, + "loss": 1.3367, + "step": 3377 + }, + { + "epoch": 0.21180011285974043, + "grad_norm": 3.4439685344696045, + "learning_rate": 1.8316529047740826e-05, + "loss": 1.134, + "step": 3378 + }, + { + "epoch": 0.21186281271553076, + "grad_norm": 2.666834592819214, + "learning_rate": 1.8315401201236492e-05, + "loss": 1.3837, + "step": 3379 + }, + { + "epoch": 0.21192551257132108, + "grad_norm": 2.772656202316284, + "learning_rate": 1.831427301180465e-05, + "loss": 1.2333, + "step": 3380 + }, + { + "epoch": 0.2119882124271114, + "grad_norm": 3.104719877243042, + "learning_rate": 1.8313144479491822e-05, + "loss": 1.1544, + "step": 3381 + }, + { + "epoch": 0.21205091228290174, + "grad_norm": 2.6532888412475586, + "learning_rate": 1.8312015604344555e-05, + "loss": 1.1742, + "step": 3382 + }, + { + "epoch": 0.2121136121386921, + "grad_norm": 3.066275119781494, + "learning_rate": 1.83108863864094e-05, + "loss": 1.2208, + "step": 3383 + }, + { + "epoch": 0.21217631199448242, + "grad_norm": 2.909572124481201, + "learning_rate": 1.830975682573293e-05, + "loss": 1.0507, + "step": 3384 + }, + { + "epoch": 0.21223901185027275, + "grad_norm": 2.861375093460083, + "learning_rate": 1.8308626922361726e-05, + "loss": 1.1891, + "step": 3385 + }, + { + "epoch": 0.21230171170606307, + "grad_norm": 2.8681154251098633, + "learning_rate": 1.8307496676342384e-05, + "loss": 1.1747, + "step": 3386 + }, + { + "epoch": 0.2123644115618534, + "grad_norm": 2.7000656127929688, + "learning_rate": 1.8306366087721515e-05, + "loss": 1.2184, + "step": 3387 + }, + { + "epoch": 0.21242711141764373, + "grad_norm": 2.686201333999634, + "learning_rate": 1.8305235156545747e-05, + "loss": 1.2707, + "step": 3388 + }, + { + "epoch": 0.21248981127343408, + "grad_norm": 2.5150434970855713, + "learning_rate": 1.830410388286172e-05, + "loss": 1.1405, + "step": 3389 + }, + { + "epoch": 0.2125525111292244, + "grad_norm": 2.9072623252868652, + "learning_rate": 1.830297226671608e-05, + "loss": 0.9942, + "step": 3390 + }, + { + "epoch": 0.21261521098501474, + "grad_norm": 2.795544385910034, + "learning_rate": 1.8301840308155507e-05, + "loss": 1.1315, + "step": 3391 + }, + { + "epoch": 0.21267791084080506, + "grad_norm": 2.7770543098449707, + "learning_rate": 1.8300708007226676e-05, + "loss": 1.1646, + "step": 3392 + }, + { + "epoch": 0.2127406106965954, + "grad_norm": 3.015138864517212, + "learning_rate": 1.8299575363976287e-05, + "loss": 1.3961, + "step": 3393 + }, + { + "epoch": 0.21280331055238572, + "grad_norm": 2.6969523429870605, + "learning_rate": 1.8298442378451044e-05, + "loss": 1.2872, + "step": 3394 + }, + { + "epoch": 0.21286601040817607, + "grad_norm": 3.0905282497406006, + "learning_rate": 1.8297309050697673e-05, + "loss": 1.2294, + "step": 3395 + }, + { + "epoch": 0.2129287102639664, + "grad_norm": 2.9849414825439453, + "learning_rate": 1.8296175380762917e-05, + "loss": 1.1344, + "step": 3396 + }, + { + "epoch": 0.21299141011975672, + "grad_norm": 3.0944161415100098, + "learning_rate": 1.8295041368693525e-05, + "loss": 1.1765, + "step": 3397 + }, + { + "epoch": 0.21305410997554705, + "grad_norm": 2.711550712585449, + "learning_rate": 1.8293907014536265e-05, + "loss": 1.1172, + "step": 3398 + }, + { + "epoch": 0.21311680983133738, + "grad_norm": 2.6522445678710938, + "learning_rate": 1.8292772318337918e-05, + "loss": 1.3004, + "step": 3399 + }, + { + "epoch": 0.21317950968712773, + "grad_norm": 3.0467419624328613, + "learning_rate": 1.829163728014528e-05, + "loss": 1.1039, + "step": 3400 + }, + { + "epoch": 0.21324220954291806, + "grad_norm": 2.9804179668426514, + "learning_rate": 1.8290501900005154e-05, + "loss": 1.0879, + "step": 3401 + }, + { + "epoch": 0.2133049093987084, + "grad_norm": 3.2205519676208496, + "learning_rate": 1.8289366177964375e-05, + "loss": 1.0821, + "step": 3402 + }, + { + "epoch": 0.2133676092544987, + "grad_norm": 2.64505934715271, + "learning_rate": 1.828823011406977e-05, + "loss": 1.3092, + "step": 3403 + }, + { + "epoch": 0.21343030911028904, + "grad_norm": 2.9717440605163574, + "learning_rate": 1.828709370836819e-05, + "loss": 1.0951, + "step": 3404 + }, + { + "epoch": 0.21349300896607937, + "grad_norm": 2.7309072017669678, + "learning_rate": 1.8285956960906502e-05, + "loss": 1.0963, + "step": 3405 + }, + { + "epoch": 0.21355570882186972, + "grad_norm": 3.2046501636505127, + "learning_rate": 1.828481987173159e-05, + "loss": 1.2861, + "step": 3406 + }, + { + "epoch": 0.21361840867766005, + "grad_norm": 2.77400803565979, + "learning_rate": 1.8283682440890346e-05, + "loss": 1.1746, + "step": 3407 + }, + { + "epoch": 0.21368110853345038, + "grad_norm": 2.717869997024536, + "learning_rate": 1.8282544668429674e-05, + "loss": 1.1956, + "step": 3408 + }, + { + "epoch": 0.2137438083892407, + "grad_norm": 3.1989307403564453, + "learning_rate": 1.8281406554396502e-05, + "loss": 1.1809, + "step": 3409 + }, + { + "epoch": 0.21380650824503103, + "grad_norm": 2.7072160243988037, + "learning_rate": 1.8280268098837758e-05, + "loss": 1.2158, + "step": 3410 + }, + { + "epoch": 0.21386920810082136, + "grad_norm": 3.0089616775512695, + "learning_rate": 1.82791293018004e-05, + "loss": 1.072, + "step": 3411 + }, + { + "epoch": 0.2139319079566117, + "grad_norm": 2.9259283542633057, + "learning_rate": 1.8277990163331384e-05, + "loss": 1.0517, + "step": 3412 + }, + { + "epoch": 0.21399460781240204, + "grad_norm": 2.961582899093628, + "learning_rate": 1.8276850683477693e-05, + "loss": 1.0955, + "step": 3413 + }, + { + "epoch": 0.21405730766819236, + "grad_norm": 3.0390686988830566, + "learning_rate": 1.827571086228632e-05, + "loss": 1.1599, + "step": 3414 + }, + { + "epoch": 0.2141200075239827, + "grad_norm": 2.8761000633239746, + "learning_rate": 1.827457069980427e-05, + "loss": 1.203, + "step": 3415 + }, + { + "epoch": 0.21418270737977302, + "grad_norm": 2.997746229171753, + "learning_rate": 1.8273430196078558e-05, + "loss": 1.1458, + "step": 3416 + }, + { + "epoch": 0.21424540723556335, + "grad_norm": 3.2973501682281494, + "learning_rate": 1.827228935115623e-05, + "loss": 1.1481, + "step": 3417 + }, + { + "epoch": 0.2143081070913537, + "grad_norm": 2.7970361709594727, + "learning_rate": 1.8271148165084323e-05, + "loss": 1.0762, + "step": 3418 + }, + { + "epoch": 0.21437080694714403, + "grad_norm": 3.0043742656707764, + "learning_rate": 1.8270006637909907e-05, + "loss": 1.0652, + "step": 3419 + }, + { + "epoch": 0.21443350680293435, + "grad_norm": 2.850764513015747, + "learning_rate": 1.8268864769680054e-05, + "loss": 1.1263, + "step": 3420 + }, + { + "epoch": 0.21449620665872468, + "grad_norm": 2.68994140625, + "learning_rate": 1.826772256044186e-05, + "loss": 1.1583, + "step": 3421 + }, + { + "epoch": 0.214558906514515, + "grad_norm": 3.0823936462402344, + "learning_rate": 1.826658001024242e-05, + "loss": 1.1925, + "step": 3422 + }, + { + "epoch": 0.21462160637030533, + "grad_norm": 3.179596424102783, + "learning_rate": 1.826543711912887e-05, + "loss": 1.1227, + "step": 3423 + }, + { + "epoch": 0.2146843062260957, + "grad_norm": 2.7538645267486572, + "learning_rate": 1.8264293887148324e-05, + "loss": 1.1441, + "step": 3424 + }, + { + "epoch": 0.21474700608188602, + "grad_norm": 2.721005916595459, + "learning_rate": 1.826315031434794e-05, + "loss": 1.0382, + "step": 3425 + }, + { + "epoch": 0.21480970593767634, + "grad_norm": 2.9871463775634766, + "learning_rate": 1.8262006400774878e-05, + "loss": 1.3151, + "step": 3426 + }, + { + "epoch": 0.21487240579346667, + "grad_norm": 2.78096342086792, + "learning_rate": 1.826086214647631e-05, + "loss": 1.1223, + "step": 3427 + }, + { + "epoch": 0.214935105649257, + "grad_norm": 3.00140380859375, + "learning_rate": 1.8259717551499422e-05, + "loss": 1.17, + "step": 3428 + }, + { + "epoch": 0.21499780550504735, + "grad_norm": 2.809117317199707, + "learning_rate": 1.8258572615891427e-05, + "loss": 1.3145, + "step": 3429 + }, + { + "epoch": 0.21506050536083768, + "grad_norm": 2.901545286178589, + "learning_rate": 1.8257427339699534e-05, + "loss": 1.1431, + "step": 3430 + }, + { + "epoch": 0.215123205216628, + "grad_norm": 2.575582265853882, + "learning_rate": 1.8256281722970977e-05, + "loss": 1.2072, + "step": 3431 + }, + { + "epoch": 0.21518590507241833, + "grad_norm": 3.191389322280884, + "learning_rate": 1.8255135765753e-05, + "loss": 1.2318, + "step": 3432 + }, + { + "epoch": 0.21524860492820866, + "grad_norm": 2.5262725353240967, + "learning_rate": 1.8253989468092865e-05, + "loss": 1.1715, + "step": 3433 + }, + { + "epoch": 0.21531130478399899, + "grad_norm": 2.9502041339874268, + "learning_rate": 1.8252842830037845e-05, + "loss": 1.2691, + "step": 3434 + }, + { + "epoch": 0.21537400463978934, + "grad_norm": 3.019813060760498, + "learning_rate": 1.8251695851635226e-05, + "loss": 1.1317, + "step": 3435 + }, + { + "epoch": 0.21543670449557967, + "grad_norm": 2.6265642642974854, + "learning_rate": 1.825054853293231e-05, + "loss": 1.0488, + "step": 3436 + }, + { + "epoch": 0.21549940435137, + "grad_norm": 2.790546178817749, + "learning_rate": 1.824940087397641e-05, + "loss": 1.0202, + "step": 3437 + }, + { + "epoch": 0.21556210420716032, + "grad_norm": 2.8564376831054688, + "learning_rate": 1.824825287481486e-05, + "loss": 1.3371, + "step": 3438 + }, + { + "epoch": 0.21562480406295065, + "grad_norm": 3.2304935455322266, + "learning_rate": 1.8247104535495e-05, + "loss": 1.0526, + "step": 3439 + }, + { + "epoch": 0.21568750391874097, + "grad_norm": 3.1486024856567383, + "learning_rate": 1.824595585606419e-05, + "loss": 1.1264, + "step": 3440 + }, + { + "epoch": 0.21575020377453133, + "grad_norm": 2.983316421508789, + "learning_rate": 1.8244806836569798e-05, + "loss": 1.1425, + "step": 3441 + }, + { + "epoch": 0.21581290363032166, + "grad_norm": 3.176135540008545, + "learning_rate": 1.8243657477059214e-05, + "loss": 1.0348, + "step": 3442 + }, + { + "epoch": 0.21587560348611198, + "grad_norm": 2.9420180320739746, + "learning_rate": 1.8242507777579833e-05, + "loss": 1.4064, + "step": 3443 + }, + { + "epoch": 0.2159383033419023, + "grad_norm": 2.860900402069092, + "learning_rate": 1.8241357738179076e-05, + "loss": 1.087, + "step": 3444 + }, + { + "epoch": 0.21600100319769264, + "grad_norm": 3.320638656616211, + "learning_rate": 1.824020735890436e-05, + "loss": 1.3103, + "step": 3445 + }, + { + "epoch": 0.21606370305348296, + "grad_norm": 3.1689045429229736, + "learning_rate": 1.8239056639803138e-05, + "loss": 1.168, + "step": 3446 + }, + { + "epoch": 0.21612640290927332, + "grad_norm": 3.2299976348876953, + "learning_rate": 1.823790558092286e-05, + "loss": 1.1176, + "step": 3447 + }, + { + "epoch": 0.21618910276506365, + "grad_norm": 2.638195753097534, + "learning_rate": 1.8236754182310993e-05, + "loss": 1.1513, + "step": 3448 + }, + { + "epoch": 0.21625180262085397, + "grad_norm": 3.3303959369659424, + "learning_rate": 1.8235602444015028e-05, + "loss": 1.246, + "step": 3449 + }, + { + "epoch": 0.2163145024766443, + "grad_norm": 2.6950125694274902, + "learning_rate": 1.8234450366082456e-05, + "loss": 1.0602, + "step": 3450 + }, + { + "epoch": 0.21637720233243463, + "grad_norm": 2.7798190116882324, + "learning_rate": 1.823329794856079e-05, + "loss": 1.2046, + "step": 3451 + }, + { + "epoch": 0.21643990218822498, + "grad_norm": 3.1202821731567383, + "learning_rate": 1.8232145191497555e-05, + "loss": 0.9852, + "step": 3452 + }, + { + "epoch": 0.2165026020440153, + "grad_norm": 2.8287532329559326, + "learning_rate": 1.8230992094940298e-05, + "loss": 1.2043, + "step": 3453 + }, + { + "epoch": 0.21656530189980563, + "grad_norm": 2.726102113723755, + "learning_rate": 1.8229838658936566e-05, + "loss": 1.2567, + "step": 3454 + }, + { + "epoch": 0.21662800175559596, + "grad_norm": 2.8064825534820557, + "learning_rate": 1.8228684883533928e-05, + "loss": 1.1503, + "step": 3455 + }, + { + "epoch": 0.2166907016113863, + "grad_norm": 2.795233726501465, + "learning_rate": 1.8227530768779968e-05, + "loss": 1.2507, + "step": 3456 + }, + { + "epoch": 0.21675340146717662, + "grad_norm": 2.650527000427246, + "learning_rate": 1.8226376314722274e-05, + "loss": 1.235, + "step": 3457 + }, + { + "epoch": 0.21681610132296697, + "grad_norm": 3.12528133392334, + "learning_rate": 1.8225221521408465e-05, + "loss": 1.2896, + "step": 3458 + }, + { + "epoch": 0.2168788011787573, + "grad_norm": 2.9217212200164795, + "learning_rate": 1.8224066388886163e-05, + "loss": 1.0821, + "step": 3459 + }, + { + "epoch": 0.21694150103454762, + "grad_norm": 2.8614370822906494, + "learning_rate": 1.8222910917203002e-05, + "loss": 1.2112, + "step": 3460 + }, + { + "epoch": 0.21700420089033795, + "grad_norm": 2.7186472415924072, + "learning_rate": 1.8221755106406636e-05, + "loss": 1.1216, + "step": 3461 + }, + { + "epoch": 0.21706690074612828, + "grad_norm": 2.851667642593384, + "learning_rate": 1.822059895654473e-05, + "loss": 1.2486, + "step": 3462 + }, + { + "epoch": 0.2171296006019186, + "grad_norm": 2.940608501434326, + "learning_rate": 1.8219442467664964e-05, + "loss": 1.2404, + "step": 3463 + }, + { + "epoch": 0.21719230045770896, + "grad_norm": 3.0502936840057373, + "learning_rate": 1.8218285639815034e-05, + "loss": 1.2614, + "step": 3464 + }, + { + "epoch": 0.21725500031349929, + "grad_norm": 3.077563524246216, + "learning_rate": 1.8217128473042645e-05, + "loss": 1.109, + "step": 3465 + }, + { + "epoch": 0.2173177001692896, + "grad_norm": 2.8069350719451904, + "learning_rate": 1.8215970967395517e-05, + "loss": 1.1477, + "step": 3466 + }, + { + "epoch": 0.21738040002507994, + "grad_norm": 2.670307159423828, + "learning_rate": 1.821481312292139e-05, + "loss": 1.2728, + "step": 3467 + }, + { + "epoch": 0.21744309988087027, + "grad_norm": 2.7720727920532227, + "learning_rate": 1.8213654939668007e-05, + "loss": 1.1983, + "step": 3468 + }, + { + "epoch": 0.2175057997366606, + "grad_norm": 2.8227763175964355, + "learning_rate": 1.8212496417683135e-05, + "loss": 1.1672, + "step": 3469 + }, + { + "epoch": 0.21756849959245095, + "grad_norm": 2.4586305618286133, + "learning_rate": 1.8211337557014556e-05, + "loss": 1.2657, + "step": 3470 + }, + { + "epoch": 0.21763119944824127, + "grad_norm": 2.8274495601654053, + "learning_rate": 1.8210178357710057e-05, + "loss": 1.0766, + "step": 3471 + }, + { + "epoch": 0.2176938993040316, + "grad_norm": 2.7498583793640137, + "learning_rate": 1.8209018819817444e-05, + "loss": 1.1837, + "step": 3472 + }, + { + "epoch": 0.21775659915982193, + "grad_norm": 3.236403226852417, + "learning_rate": 1.8207858943384535e-05, + "loss": 1.0774, + "step": 3473 + }, + { + "epoch": 0.21781929901561226, + "grad_norm": 3.0355584621429443, + "learning_rate": 1.8206698728459167e-05, + "loss": 1.2083, + "step": 3474 + }, + { + "epoch": 0.21788199887140258, + "grad_norm": 3.0671274662017822, + "learning_rate": 1.8205538175089182e-05, + "loss": 1.3193, + "step": 3475 + }, + { + "epoch": 0.21794469872719294, + "grad_norm": 2.746685743331909, + "learning_rate": 1.8204377283322446e-05, + "loss": 1.084, + "step": 3476 + }, + { + "epoch": 0.21800739858298326, + "grad_norm": 3.232438325881958, + "learning_rate": 1.8203216053206835e-05, + "loss": 1.1023, + "step": 3477 + }, + { + "epoch": 0.2180700984387736, + "grad_norm": 3.001978635787964, + "learning_rate": 1.820205448479023e-05, + "loss": 1.2357, + "step": 3478 + }, + { + "epoch": 0.21813279829456392, + "grad_norm": 3.0080020427703857, + "learning_rate": 1.8200892578120544e-05, + "loss": 1.1436, + "step": 3479 + }, + { + "epoch": 0.21819549815035424, + "grad_norm": 3.0861423015594482, + "learning_rate": 1.819973033324569e-05, + "loss": 1.1863, + "step": 3480 + }, + { + "epoch": 0.2182581980061446, + "grad_norm": 2.767040252685547, + "learning_rate": 1.8198567750213597e-05, + "loss": 1.0428, + "step": 3481 + }, + { + "epoch": 0.21832089786193493, + "grad_norm": 3.15714955329895, + "learning_rate": 1.8197404829072214e-05, + "loss": 1.0627, + "step": 3482 + }, + { + "epoch": 0.21838359771772525, + "grad_norm": 3.1300365924835205, + "learning_rate": 1.8196241569869495e-05, + "loss": 1.1053, + "step": 3483 + }, + { + "epoch": 0.21844629757351558, + "grad_norm": 3.3604283332824707, + "learning_rate": 1.8195077972653417e-05, + "loss": 1.1605, + "step": 3484 + }, + { + "epoch": 0.2185089974293059, + "grad_norm": 3.2311594486236572, + "learning_rate": 1.8193914037471965e-05, + "loss": 1.2184, + "step": 3485 + }, + { + "epoch": 0.21857169728509623, + "grad_norm": 2.8588130474090576, + "learning_rate": 1.8192749764373143e-05, + "loss": 1.1153, + "step": 3486 + }, + { + "epoch": 0.2186343971408866, + "grad_norm": 2.621732473373413, + "learning_rate": 1.819158515340496e-05, + "loss": 1.1905, + "step": 3487 + }, + { + "epoch": 0.21869709699667692, + "grad_norm": 2.920182228088379, + "learning_rate": 1.819042020461545e-05, + "loss": 1.2738, + "step": 3488 + }, + { + "epoch": 0.21875979685246724, + "grad_norm": 3.3447177410125732, + "learning_rate": 1.818925491805265e-05, + "loss": 1.1415, + "step": 3489 + }, + { + "epoch": 0.21882249670825757, + "grad_norm": 2.7022831439971924, + "learning_rate": 1.8188089293764622e-05, + "loss": 1.1901, + "step": 3490 + }, + { + "epoch": 0.2188851965640479, + "grad_norm": 3.2078630924224854, + "learning_rate": 1.8186923331799434e-05, + "loss": 1.0555, + "step": 3491 + }, + { + "epoch": 0.21894789641983822, + "grad_norm": 2.8380954265594482, + "learning_rate": 1.8185757032205172e-05, + "loss": 1.2118, + "step": 3492 + }, + { + "epoch": 0.21901059627562858, + "grad_norm": 2.6186671257019043, + "learning_rate": 1.8184590395029927e-05, + "loss": 1.3463, + "step": 3493 + }, + { + "epoch": 0.2190732961314189, + "grad_norm": 2.8827946186065674, + "learning_rate": 1.8183423420321823e-05, + "loss": 1.2981, + "step": 3494 + }, + { + "epoch": 0.21913599598720923, + "grad_norm": 3.3144476413726807, + "learning_rate": 1.8182256108128973e-05, + "loss": 1.3052, + "step": 3495 + }, + { + "epoch": 0.21919869584299956, + "grad_norm": 3.056907892227173, + "learning_rate": 1.818108845849953e-05, + "loss": 1.191, + "step": 3496 + }, + { + "epoch": 0.21926139569878988, + "grad_norm": 2.755643367767334, + "learning_rate": 1.817992047148164e-05, + "loss": 1.2318, + "step": 3497 + }, + { + "epoch": 0.2193240955545802, + "grad_norm": 2.6254141330718994, + "learning_rate": 1.817875214712347e-05, + "loss": 1.2741, + "step": 3498 + }, + { + "epoch": 0.21938679541037057, + "grad_norm": 2.5746748447418213, + "learning_rate": 1.8177583485473204e-05, + "loss": 1.2924, + "step": 3499 + }, + { + "epoch": 0.2194494952661609, + "grad_norm": 2.738935947418213, + "learning_rate": 1.817641448657904e-05, + "loss": 1.1279, + "step": 3500 + }, + { + "epoch": 0.21951219512195122, + "grad_norm": 2.864065170288086, + "learning_rate": 1.8175245150489188e-05, + "loss": 1.2074, + "step": 3501 + }, + { + "epoch": 0.21957489497774155, + "grad_norm": 2.9558682441711426, + "learning_rate": 1.8174075477251865e-05, + "loss": 1.0429, + "step": 3502 + }, + { + "epoch": 0.21963759483353187, + "grad_norm": 3.5027759075164795, + "learning_rate": 1.8172905466915315e-05, + "loss": 1.1788, + "step": 3503 + }, + { + "epoch": 0.2197002946893222, + "grad_norm": 2.762580394744873, + "learning_rate": 1.8171735119527784e-05, + "loss": 1.053, + "step": 3504 + }, + { + "epoch": 0.21976299454511256, + "grad_norm": 2.771946907043457, + "learning_rate": 1.8170564435137542e-05, + "loss": 1.0595, + "step": 3505 + }, + { + "epoch": 0.21982569440090288, + "grad_norm": 2.998465061187744, + "learning_rate": 1.8169393413792864e-05, + "loss": 1.0339, + "step": 3506 + }, + { + "epoch": 0.2198883942566932, + "grad_norm": 2.9647207260131836, + "learning_rate": 1.8168222055542048e-05, + "loss": 1.1536, + "step": 3507 + }, + { + "epoch": 0.21995109411248354, + "grad_norm": 2.915910243988037, + "learning_rate": 1.8167050360433394e-05, + "loss": 1.1133, + "step": 3508 + }, + { + "epoch": 0.22001379396827386, + "grad_norm": 3.318826675415039, + "learning_rate": 1.8165878328515228e-05, + "loss": 1.2224, + "step": 3509 + }, + { + "epoch": 0.22007649382406422, + "grad_norm": 3.0046298503875732, + "learning_rate": 1.8164705959835882e-05, + "loss": 1.2103, + "step": 3510 + }, + { + "epoch": 0.22013919367985454, + "grad_norm": 2.6634392738342285, + "learning_rate": 1.8163533254443707e-05, + "loss": 1.0839, + "step": 3511 + }, + { + "epoch": 0.22020189353564487, + "grad_norm": 3.0964224338531494, + "learning_rate": 1.8162360212387065e-05, + "loss": 1.0971, + "step": 3512 + }, + { + "epoch": 0.2202645933914352, + "grad_norm": 3.3177077770233154, + "learning_rate": 1.816118683371433e-05, + "loss": 1.1693, + "step": 3513 + }, + { + "epoch": 0.22032729324722553, + "grad_norm": 3.356462001800537, + "learning_rate": 1.8160013118473893e-05, + "loss": 0.9914, + "step": 3514 + }, + { + "epoch": 0.22038999310301585, + "grad_norm": 2.9065821170806885, + "learning_rate": 1.815883906671416e-05, + "loss": 1.1695, + "step": 3515 + }, + { + "epoch": 0.2204526929588062, + "grad_norm": 3.0364532470703125, + "learning_rate": 1.8157664678483547e-05, + "loss": 1.1402, + "step": 3516 + }, + { + "epoch": 0.22051539281459653, + "grad_norm": 3.2316689491271973, + "learning_rate": 1.8156489953830488e-05, + "loss": 1.278, + "step": 3517 + }, + { + "epoch": 0.22057809267038686, + "grad_norm": 3.0465900897979736, + "learning_rate": 1.8155314892803427e-05, + "loss": 1.1134, + "step": 3518 + }, + { + "epoch": 0.2206407925261772, + "grad_norm": 3.165236234664917, + "learning_rate": 1.815413949545082e-05, + "loss": 1.2428, + "step": 3519 + }, + { + "epoch": 0.22070349238196751, + "grad_norm": 2.966722249984741, + "learning_rate": 1.8152963761821147e-05, + "loss": 1.0822, + "step": 3520 + }, + { + "epoch": 0.22076619223775784, + "grad_norm": 2.64937162399292, + "learning_rate": 1.815178769196289e-05, + "loss": 1.1135, + "step": 3521 + }, + { + "epoch": 0.2208288920935482, + "grad_norm": 3.297508955001831, + "learning_rate": 1.8150611285924556e-05, + "loss": 1.2181, + "step": 3522 + }, + { + "epoch": 0.22089159194933852, + "grad_norm": 2.887660503387451, + "learning_rate": 1.8149434543754653e-05, + "loss": 1.0749, + "step": 3523 + }, + { + "epoch": 0.22095429180512885, + "grad_norm": 3.0880675315856934, + "learning_rate": 1.8148257465501718e-05, + "loss": 1.2163, + "step": 3524 + }, + { + "epoch": 0.22101699166091918, + "grad_norm": 2.9483907222747803, + "learning_rate": 1.814708005121429e-05, + "loss": 1.1847, + "step": 3525 + }, + { + "epoch": 0.2210796915167095, + "grad_norm": 3.0010783672332764, + "learning_rate": 1.8145902300940917e-05, + "loss": 1.0146, + "step": 3526 + }, + { + "epoch": 0.22114239137249983, + "grad_norm": 2.7564449310302734, + "learning_rate": 1.8144724214730182e-05, + "loss": 1.2997, + "step": 3527 + }, + { + "epoch": 0.22120509122829018, + "grad_norm": 3.0326125621795654, + "learning_rate": 1.8143545792630667e-05, + "loss": 1.0801, + "step": 3528 + }, + { + "epoch": 0.2212677910840805, + "grad_norm": 2.600937604904175, + "learning_rate": 1.8142367034690967e-05, + "loss": 1.2085, + "step": 3529 + }, + { + "epoch": 0.22133049093987084, + "grad_norm": 2.8377468585968018, + "learning_rate": 1.814118794095969e-05, + "loss": 1.2035, + "step": 3530 + }, + { + "epoch": 0.22139319079566117, + "grad_norm": 2.797037363052368, + "learning_rate": 1.8140008511485474e-05, + "loss": 1.1849, + "step": 3531 + }, + { + "epoch": 0.2214558906514515, + "grad_norm": 2.588040590286255, + "learning_rate": 1.813882874631695e-05, + "loss": 1.1927, + "step": 3532 + }, + { + "epoch": 0.22151859050724185, + "grad_norm": 2.7694005966186523, + "learning_rate": 1.8137648645502767e-05, + "loss": 1.1427, + "step": 3533 + }, + { + "epoch": 0.22158129036303217, + "grad_norm": 2.8629770278930664, + "learning_rate": 1.81364682090916e-05, + "loss": 1.158, + "step": 3534 + }, + { + "epoch": 0.2216439902188225, + "grad_norm": 2.79976487159729, + "learning_rate": 1.8135287437132135e-05, + "loss": 1.0214, + "step": 3535 + }, + { + "epoch": 0.22170669007461283, + "grad_norm": 3.2052481174468994, + "learning_rate": 1.8134106329673056e-05, + "loss": 1.0721, + "step": 3536 + }, + { + "epoch": 0.22176938993040315, + "grad_norm": 2.9562549591064453, + "learning_rate": 1.8132924886763078e-05, + "loss": 1.1711, + "step": 3537 + }, + { + "epoch": 0.22183208978619348, + "grad_norm": 2.9687440395355225, + "learning_rate": 1.8131743108450923e-05, + "loss": 1.1232, + "step": 3538 + }, + { + "epoch": 0.22189478964198384, + "grad_norm": 2.829939603805542, + "learning_rate": 1.8130560994785325e-05, + "loss": 1.1447, + "step": 3539 + }, + { + "epoch": 0.22195748949777416, + "grad_norm": 3.027038097381592, + "learning_rate": 1.812937854581504e-05, + "loss": 1.0854, + "step": 3540 + }, + { + "epoch": 0.2220201893535645, + "grad_norm": 2.989236354827881, + "learning_rate": 1.8128195761588827e-05, + "loss": 1.0552, + "step": 3541 + }, + { + "epoch": 0.22208288920935482, + "grad_norm": 2.981571912765503, + "learning_rate": 1.8127012642155465e-05, + "loss": 1.0966, + "step": 3542 + }, + { + "epoch": 0.22214558906514514, + "grad_norm": 2.8743913173675537, + "learning_rate": 1.8125829187563745e-05, + "loss": 1.2425, + "step": 3543 + }, + { + "epoch": 0.22220828892093547, + "grad_norm": 2.853938102722168, + "learning_rate": 1.812464539786248e-05, + "loss": 1.1795, + "step": 3544 + }, + { + "epoch": 0.22227098877672583, + "grad_norm": 3.176154851913452, + "learning_rate": 1.812346127310048e-05, + "loss": 0.9951, + "step": 3545 + }, + { + "epoch": 0.22233368863251615, + "grad_norm": 3.0732507705688477, + "learning_rate": 1.812227681332658e-05, + "loss": 1.1703, + "step": 3546 + }, + { + "epoch": 0.22239638848830648, + "grad_norm": 2.976820945739746, + "learning_rate": 1.8121092018589635e-05, + "loss": 1.3957, + "step": 3547 + }, + { + "epoch": 0.2224590883440968, + "grad_norm": 3.0445666313171387, + "learning_rate": 1.81199068889385e-05, + "loss": 1.2381, + "step": 3548 + }, + { + "epoch": 0.22252178819988713, + "grad_norm": 2.8176958560943604, + "learning_rate": 1.8118721424422044e-05, + "loss": 1.1953, + "step": 3549 + }, + { + "epoch": 0.22258448805567746, + "grad_norm": 3.1085050106048584, + "learning_rate": 1.811753562508917e-05, + "loss": 1.3343, + "step": 3550 + }, + { + "epoch": 0.22264718791146781, + "grad_norm": 2.990354061126709, + "learning_rate": 1.8116349490988766e-05, + "loss": 1.1707, + "step": 3551 + }, + { + "epoch": 0.22270988776725814, + "grad_norm": 3.108550548553467, + "learning_rate": 1.811516302216976e-05, + "loss": 1.1717, + "step": 3552 + }, + { + "epoch": 0.22277258762304847, + "grad_norm": 3.048961639404297, + "learning_rate": 1.8113976218681072e-05, + "loss": 1.2634, + "step": 3553 + }, + { + "epoch": 0.2228352874788388, + "grad_norm": 3.0345022678375244, + "learning_rate": 1.8112789080571655e-05, + "loss": 1.1028, + "step": 3554 + }, + { + "epoch": 0.22289798733462912, + "grad_norm": 2.5298426151275635, + "learning_rate": 1.8111601607890458e-05, + "loss": 1.2436, + "step": 3555 + }, + { + "epoch": 0.22296068719041945, + "grad_norm": 3.120645761489868, + "learning_rate": 1.8110413800686456e-05, + "loss": 1.1497, + "step": 3556 + }, + { + "epoch": 0.2230233870462098, + "grad_norm": 2.6972708702087402, + "learning_rate": 1.810922565900864e-05, + "loss": 1.091, + "step": 3557 + }, + { + "epoch": 0.22308608690200013, + "grad_norm": 2.839916944503784, + "learning_rate": 1.8108037182906e-05, + "loss": 1.1607, + "step": 3558 + }, + { + "epoch": 0.22314878675779046, + "grad_norm": 2.7976813316345215, + "learning_rate": 1.810684837242755e-05, + "loss": 1.1639, + "step": 3559 + }, + { + "epoch": 0.22321148661358078, + "grad_norm": 2.7381718158721924, + "learning_rate": 1.810565922762232e-05, + "loss": 1.2491, + "step": 3560 + }, + { + "epoch": 0.2232741864693711, + "grad_norm": 2.8832619190216064, + "learning_rate": 1.8104469748539354e-05, + "loss": 1.248, + "step": 3561 + }, + { + "epoch": 0.22333688632516147, + "grad_norm": 2.9808406829833984, + "learning_rate": 1.8103279935227694e-05, + "loss": 1.0971, + "step": 3562 + }, + { + "epoch": 0.2233995861809518, + "grad_norm": 3.119597911834717, + "learning_rate": 1.8102089787736423e-05, + "loss": 1.1883, + "step": 3563 + }, + { + "epoch": 0.22346228603674212, + "grad_norm": 2.741804838180542, + "learning_rate": 1.8100899306114616e-05, + "loss": 1.4323, + "step": 3564 + }, + { + "epoch": 0.22352498589253245, + "grad_norm": 3.357074737548828, + "learning_rate": 1.8099708490411362e-05, + "loss": 1.1911, + "step": 3565 + }, + { + "epoch": 0.22358768574832277, + "grad_norm": 2.6632301807403564, + "learning_rate": 1.8098517340675777e-05, + "loss": 1.2866, + "step": 3566 + }, + { + "epoch": 0.2236503856041131, + "grad_norm": 3.2329330444335938, + "learning_rate": 1.8097325856956987e-05, + "loss": 1.3981, + "step": 3567 + }, + { + "epoch": 0.22371308545990345, + "grad_norm": 2.836639881134033, + "learning_rate": 1.8096134039304122e-05, + "loss": 1.3676, + "step": 3568 + }, + { + "epoch": 0.22377578531569378, + "grad_norm": 2.975579261779785, + "learning_rate": 1.8094941887766335e-05, + "loss": 1.2939, + "step": 3569 + }, + { + "epoch": 0.2238384851714841, + "grad_norm": 3.0368759632110596, + "learning_rate": 1.8093749402392797e-05, + "loss": 1.1915, + "step": 3570 + }, + { + "epoch": 0.22390118502727444, + "grad_norm": 2.6917636394500732, + "learning_rate": 1.8092556583232674e-05, + "loss": 1.0224, + "step": 3571 + }, + { + "epoch": 0.22396388488306476, + "grad_norm": 2.910423994064331, + "learning_rate": 1.8091363430335167e-05, + "loss": 1.1237, + "step": 3572 + }, + { + "epoch": 0.2240265847388551, + "grad_norm": 2.9281411170959473, + "learning_rate": 1.8090169943749477e-05, + "loss": 0.9764, + "step": 3573 + }, + { + "epoch": 0.22408928459464544, + "grad_norm": 2.9743144512176514, + "learning_rate": 1.8088976123524827e-05, + "loss": 1.2018, + "step": 3574 + }, + { + "epoch": 0.22415198445043577, + "grad_norm": 2.9399046897888184, + "learning_rate": 1.8087781969710445e-05, + "loss": 1.1229, + "step": 3575 + }, + { + "epoch": 0.2242146843062261, + "grad_norm": 2.6536636352539062, + "learning_rate": 1.8086587482355585e-05, + "loss": 1.2697, + "step": 3576 + }, + { + "epoch": 0.22427738416201642, + "grad_norm": 2.9566242694854736, + "learning_rate": 1.80853926615095e-05, + "loss": 1.1836, + "step": 3577 + }, + { + "epoch": 0.22434008401780675, + "grad_norm": 2.959028482437134, + "learning_rate": 1.8084197507221473e-05, + "loss": 1.0571, + "step": 3578 + }, + { + "epoch": 0.22440278387359708, + "grad_norm": 3.2681009769439697, + "learning_rate": 1.8083002019540784e-05, + "loss": 1.1025, + "step": 3579 + }, + { + "epoch": 0.22446548372938743, + "grad_norm": 2.956946849822998, + "learning_rate": 1.8081806198516744e-05, + "loss": 0.9472, + "step": 3580 + }, + { + "epoch": 0.22452818358517776, + "grad_norm": 2.913679599761963, + "learning_rate": 1.808061004419866e-05, + "loss": 1.2225, + "step": 3581 + }, + { + "epoch": 0.2245908834409681, + "grad_norm": 3.02230167388916, + "learning_rate": 1.8079413556635864e-05, + "loss": 1.2167, + "step": 3582 + }, + { + "epoch": 0.2246535832967584, + "grad_norm": 2.956846237182617, + "learning_rate": 1.8078216735877704e-05, + "loss": 1.1871, + "step": 3583 + }, + { + "epoch": 0.22471628315254874, + "grad_norm": 2.89901065826416, + "learning_rate": 1.807701958197353e-05, + "loss": 1.1346, + "step": 3584 + }, + { + "epoch": 0.22477898300833907, + "grad_norm": 3.4894659519195557, + "learning_rate": 1.8075822094972716e-05, + "loss": 1.2752, + "step": 3585 + }, + { + "epoch": 0.22484168286412942, + "grad_norm": 3.2771265506744385, + "learning_rate": 1.8074624274924645e-05, + "loss": 1.1788, + "step": 3586 + }, + { + "epoch": 0.22490438271991975, + "grad_norm": 2.6609530448913574, + "learning_rate": 1.8073426121878717e-05, + "loss": 1.1623, + "step": 3587 + }, + { + "epoch": 0.22496708257571008, + "grad_norm": 2.9689862728118896, + "learning_rate": 1.8072227635884344e-05, + "loss": 1.1993, + "step": 3588 + }, + { + "epoch": 0.2250297824315004, + "grad_norm": 2.9919233322143555, + "learning_rate": 1.807102881699095e-05, + "loss": 1.1932, + "step": 3589 + }, + { + "epoch": 0.22509248228729073, + "grad_norm": 2.92185115814209, + "learning_rate": 1.8069829665247975e-05, + "loss": 1.2889, + "step": 3590 + }, + { + "epoch": 0.22515518214308108, + "grad_norm": 2.891576051712036, + "learning_rate": 1.8068630180704874e-05, + "loss": 1.1629, + "step": 3591 + }, + { + "epoch": 0.2252178819988714, + "grad_norm": 3.390608787536621, + "learning_rate": 1.806743036341111e-05, + "loss": 1.3541, + "step": 3592 + }, + { + "epoch": 0.22528058185466174, + "grad_norm": 3.1339166164398193, + "learning_rate": 1.8066230213416167e-05, + "loss": 1.2109, + "step": 3593 + }, + { + "epoch": 0.22534328171045206, + "grad_norm": 3.053074836730957, + "learning_rate": 1.8065029730769534e-05, + "loss": 1.1646, + "step": 3594 + }, + { + "epoch": 0.2254059815662424, + "grad_norm": 2.950464963912964, + "learning_rate": 1.806382891552073e-05, + "loss": 1.3166, + "step": 3595 + }, + { + "epoch": 0.22546868142203272, + "grad_norm": 2.9382505416870117, + "learning_rate": 1.806262776771926e-05, + "loss": 1.0894, + "step": 3596 + }, + { + "epoch": 0.22553138127782307, + "grad_norm": 2.9241178035736084, + "learning_rate": 1.8061426287414675e-05, + "loss": 1.2677, + "step": 3597 + }, + { + "epoch": 0.2255940811336134, + "grad_norm": 2.7559101581573486, + "learning_rate": 1.8060224474656517e-05, + "loss": 1.0299, + "step": 3598 + }, + { + "epoch": 0.22565678098940373, + "grad_norm": 2.6897053718566895, + "learning_rate": 1.805902232949435e-05, + "loss": 1.0278, + "step": 3599 + }, + { + "epoch": 0.22571948084519405, + "grad_norm": 3.1657676696777344, + "learning_rate": 1.805781985197775e-05, + "loss": 0.9636, + "step": 3600 + }, + { + "epoch": 0.22578218070098438, + "grad_norm": 3.120570182800293, + "learning_rate": 1.8056617042156307e-05, + "loss": 1.1199, + "step": 3601 + }, + { + "epoch": 0.2258448805567747, + "grad_norm": 3.1383817195892334, + "learning_rate": 1.8055413900079624e-05, + "loss": 1.1832, + "step": 3602 + }, + { + "epoch": 0.22590758041256506, + "grad_norm": 2.9242420196533203, + "learning_rate": 1.805421042579732e-05, + "loss": 1.3401, + "step": 3603 + }, + { + "epoch": 0.2259702802683554, + "grad_norm": 3.0105063915252686, + "learning_rate": 1.805300661935903e-05, + "loss": 1.1057, + "step": 3604 + }, + { + "epoch": 0.22603298012414572, + "grad_norm": 2.8944406509399414, + "learning_rate": 1.8051802480814394e-05, + "loss": 1.2548, + "step": 3605 + }, + { + "epoch": 0.22609567997993604, + "grad_norm": 2.8623287677764893, + "learning_rate": 1.805059801021307e-05, + "loss": 1.181, + "step": 3606 + }, + { + "epoch": 0.22615837983572637, + "grad_norm": 3.057370901107788, + "learning_rate": 1.8049393207604734e-05, + "loss": 1.1325, + "step": 3607 + }, + { + "epoch": 0.2262210796915167, + "grad_norm": 3.3129076957702637, + "learning_rate": 1.804818807303907e-05, + "loss": 0.8461, + "step": 3608 + }, + { + "epoch": 0.22628377954730705, + "grad_norm": 2.9410529136657715, + "learning_rate": 1.8046982606565778e-05, + "loss": 1.1755, + "step": 3609 + }, + { + "epoch": 0.22634647940309738, + "grad_norm": 2.7447917461395264, + "learning_rate": 1.8045776808234573e-05, + "loss": 1.077, + "step": 3610 + }, + { + "epoch": 0.2264091792588877, + "grad_norm": 3.1841228008270264, + "learning_rate": 1.8044570678095178e-05, + "loss": 1.2108, + "step": 3611 + }, + { + "epoch": 0.22647187911467803, + "grad_norm": 3.001784086227417, + "learning_rate": 1.8043364216197342e-05, + "loss": 1.0283, + "step": 3612 + }, + { + "epoch": 0.22653457897046836, + "grad_norm": 2.601618766784668, + "learning_rate": 1.8042157422590813e-05, + "loss": 1.2164, + "step": 3613 + }, + { + "epoch": 0.2265972788262587, + "grad_norm": 2.8666069507598877, + "learning_rate": 1.8040950297325358e-05, + "loss": 1.239, + "step": 3614 + }, + { + "epoch": 0.22665997868204904, + "grad_norm": 3.2170183658599854, + "learning_rate": 1.8039742840450764e-05, + "loss": 1.1708, + "step": 3615 + }, + { + "epoch": 0.22672267853783937, + "grad_norm": 2.9447567462921143, + "learning_rate": 1.803853505201682e-05, + "loss": 1.1381, + "step": 3616 + }, + { + "epoch": 0.2267853783936297, + "grad_norm": 3.1207873821258545, + "learning_rate": 1.8037326932073346e-05, + "loss": 1.209, + "step": 3617 + }, + { + "epoch": 0.22684807824942002, + "grad_norm": 3.042819023132324, + "learning_rate": 1.8036118480670155e-05, + "loss": 1.1498, + "step": 3618 + }, + { + "epoch": 0.22691077810521035, + "grad_norm": 3.0286102294921875, + "learning_rate": 1.8034909697857086e-05, + "loss": 1.0866, + "step": 3619 + }, + { + "epoch": 0.2269734779610007, + "grad_norm": 2.733035087585449, + "learning_rate": 1.803370058368399e-05, + "loss": 1.2261, + "step": 3620 + }, + { + "epoch": 0.22703617781679103, + "grad_norm": 3.1732780933380127, + "learning_rate": 1.8032491138200733e-05, + "loss": 1.2359, + "step": 3621 + }, + { + "epoch": 0.22709887767258136, + "grad_norm": 2.939980983734131, + "learning_rate": 1.8031281361457188e-05, + "loss": 1.1102, + "step": 3622 + }, + { + "epoch": 0.22716157752837168, + "grad_norm": 2.7137248516082764, + "learning_rate": 1.8030071253503255e-05, + "loss": 1.1916, + "step": 3623 + }, + { + "epoch": 0.227224277384162, + "grad_norm": 2.750880479812622, + "learning_rate": 1.8028860814388826e-05, + "loss": 1.2381, + "step": 3624 + }, + { + "epoch": 0.22728697723995234, + "grad_norm": 3.051243305206299, + "learning_rate": 1.802765004416383e-05, + "loss": 1.1555, + "step": 3625 + }, + { + "epoch": 0.2273496770957427, + "grad_norm": 2.796809196472168, + "learning_rate": 1.8026438942878196e-05, + "loss": 1.0996, + "step": 3626 + }, + { + "epoch": 0.22741237695153302, + "grad_norm": 2.8803603649139404, + "learning_rate": 1.8025227510581867e-05, + "loss": 1.1805, + "step": 3627 + }, + { + "epoch": 0.22747507680732335, + "grad_norm": 2.7037997245788574, + "learning_rate": 1.8024015747324806e-05, + "loss": 1.3168, + "step": 3628 + }, + { + "epoch": 0.22753777666311367, + "grad_norm": 3.312079429626465, + "learning_rate": 1.8022803653156983e-05, + "loss": 1.1377, + "step": 3629 + }, + { + "epoch": 0.227600476518904, + "grad_norm": 2.9159226417541504, + "learning_rate": 1.802159122812839e-05, + "loss": 1.2941, + "step": 3630 + }, + { + "epoch": 0.22766317637469433, + "grad_norm": 2.9653189182281494, + "learning_rate": 1.8020378472289025e-05, + "loss": 1.1594, + "step": 3631 + }, + { + "epoch": 0.22772587623048468, + "grad_norm": 2.852954149246216, + "learning_rate": 1.80191653856889e-05, + "loss": 1.1861, + "step": 3632 + }, + { + "epoch": 0.227788576086275, + "grad_norm": 2.877570867538452, + "learning_rate": 1.8017951968378046e-05, + "loss": 1.1726, + "step": 3633 + }, + { + "epoch": 0.22785127594206533, + "grad_norm": 2.6509335041046143, + "learning_rate": 1.80167382204065e-05, + "loss": 1.3415, + "step": 3634 + }, + { + "epoch": 0.22791397579785566, + "grad_norm": 3.037809133529663, + "learning_rate": 1.801552414182432e-05, + "loss": 1.1539, + "step": 3635 + }, + { + "epoch": 0.227976675653646, + "grad_norm": 2.8155715465545654, + "learning_rate": 1.8014309732681578e-05, + "loss": 1.2575, + "step": 3636 + }, + { + "epoch": 0.22803937550943632, + "grad_norm": 2.6981961727142334, + "learning_rate": 1.8013094993028348e-05, + "loss": 1.2047, + "step": 3637 + }, + { + "epoch": 0.22810207536522667, + "grad_norm": 2.8835628032684326, + "learning_rate": 1.801187992291473e-05, + "loss": 1.1892, + "step": 3638 + }, + { + "epoch": 0.228164775221017, + "grad_norm": 2.805055618286133, + "learning_rate": 1.8010664522390838e-05, + "loss": 1.094, + "step": 3639 + }, + { + "epoch": 0.22822747507680732, + "grad_norm": 2.788329601287842, + "learning_rate": 1.8009448791506792e-05, + "loss": 1.2774, + "step": 3640 + }, + { + "epoch": 0.22829017493259765, + "grad_norm": 3.2359044551849365, + "learning_rate": 1.8008232730312724e-05, + "loss": 1.1786, + "step": 3641 + }, + { + "epoch": 0.22835287478838798, + "grad_norm": 2.712146759033203, + "learning_rate": 1.8007016338858793e-05, + "loss": 1.2977, + "step": 3642 + }, + { + "epoch": 0.22841557464417833, + "grad_norm": 2.6942105293273926, + "learning_rate": 1.8005799617195155e-05, + "loss": 1.1948, + "step": 3643 + }, + { + "epoch": 0.22847827449996866, + "grad_norm": 3.1172168254852295, + "learning_rate": 1.800458256537199e-05, + "loss": 1.1919, + "step": 3644 + }, + { + "epoch": 0.22854097435575899, + "grad_norm": 3.196993589401245, + "learning_rate": 1.8003365183439495e-05, + "loss": 1.1291, + "step": 3645 + }, + { + "epoch": 0.2286036742115493, + "grad_norm": 2.924241065979004, + "learning_rate": 1.8002147471447866e-05, + "loss": 1.1752, + "step": 3646 + }, + { + "epoch": 0.22866637406733964, + "grad_norm": 2.8910717964172363, + "learning_rate": 1.800092942944733e-05, + "loss": 1.2347, + "step": 3647 + }, + { + "epoch": 0.22872907392312997, + "grad_norm": 2.947491407394409, + "learning_rate": 1.7999711057488112e-05, + "loss": 1.1037, + "step": 3648 + }, + { + "epoch": 0.22879177377892032, + "grad_norm": 2.8996262550354004, + "learning_rate": 1.799849235562046e-05, + "loss": 1.1098, + "step": 3649 + }, + { + "epoch": 0.22885447363471065, + "grad_norm": 3.0380823612213135, + "learning_rate": 1.7997273323894636e-05, + "loss": 1.0305, + "step": 3650 + }, + { + "epoch": 0.22891717349050097, + "grad_norm": 3.057347059249878, + "learning_rate": 1.7996053962360908e-05, + "loss": 1.2986, + "step": 3651 + }, + { + "epoch": 0.2289798733462913, + "grad_norm": 2.831235408782959, + "learning_rate": 1.799483427106957e-05, + "loss": 1.0771, + "step": 3652 + }, + { + "epoch": 0.22904257320208163, + "grad_norm": 2.6839349269866943, + "learning_rate": 1.7993614250070918e-05, + "loss": 1.0984, + "step": 3653 + }, + { + "epoch": 0.22910527305787196, + "grad_norm": 2.9827468395233154, + "learning_rate": 1.799239389941526e-05, + "loss": 1.3362, + "step": 3654 + }, + { + "epoch": 0.2291679729136623, + "grad_norm": 2.7602622509002686, + "learning_rate": 1.799117321915293e-05, + "loss": 1.2545, + "step": 3655 + }, + { + "epoch": 0.22923067276945264, + "grad_norm": 3.183417797088623, + "learning_rate": 1.798995220933427e-05, + "loss": 1.1456, + "step": 3656 + }, + { + "epoch": 0.22929337262524296, + "grad_norm": 2.8442916870117188, + "learning_rate": 1.798873087000963e-05, + "loss": 1.1669, + "step": 3657 + }, + { + "epoch": 0.2293560724810333, + "grad_norm": 2.9376800060272217, + "learning_rate": 1.7987509201229378e-05, + "loss": 1.0009, + "step": 3658 + }, + { + "epoch": 0.22941877233682362, + "grad_norm": 2.793410539627075, + "learning_rate": 1.79862872030439e-05, + "loss": 1.0061, + "step": 3659 + }, + { + "epoch": 0.22948147219261394, + "grad_norm": 3.1511054039001465, + "learning_rate": 1.798506487550359e-05, + "loss": 1.1612, + "step": 3660 + }, + { + "epoch": 0.2295441720484043, + "grad_norm": 2.7782585620880127, + "learning_rate": 1.7983842218658852e-05, + "loss": 1.2229, + "step": 3661 + }, + { + "epoch": 0.22960687190419463, + "grad_norm": 2.930635452270508, + "learning_rate": 1.7982619232560117e-05, + "loss": 1.1334, + "step": 3662 + }, + { + "epoch": 0.22966957175998495, + "grad_norm": 2.906080722808838, + "learning_rate": 1.7981395917257816e-05, + "loss": 1.1877, + "step": 3663 + }, + { + "epoch": 0.22973227161577528, + "grad_norm": 3.366361141204834, + "learning_rate": 1.7980172272802398e-05, + "loss": 1.2529, + "step": 3664 + }, + { + "epoch": 0.2297949714715656, + "grad_norm": 3.0131583213806152, + "learning_rate": 1.7978948299244326e-05, + "loss": 1.2449, + "step": 3665 + }, + { + "epoch": 0.22985767132735593, + "grad_norm": 2.739013671875, + "learning_rate": 1.7977723996634075e-05, + "loss": 1.2698, + "step": 3666 + }, + { + "epoch": 0.2299203711831463, + "grad_norm": 2.92710280418396, + "learning_rate": 1.7976499365022144e-05, + "loss": 1.1595, + "step": 3667 + }, + { + "epoch": 0.22998307103893662, + "grad_norm": 2.9555325508117676, + "learning_rate": 1.7975274404459028e-05, + "loss": 1.1041, + "step": 3668 + }, + { + "epoch": 0.23004577089472694, + "grad_norm": 3.2984278202056885, + "learning_rate": 1.7974049114995248e-05, + "loss": 1.1162, + "step": 3669 + }, + { + "epoch": 0.23010847075051727, + "grad_norm": 2.7783758640289307, + "learning_rate": 1.797282349668133e-05, + "loss": 1.1699, + "step": 3670 + }, + { + "epoch": 0.2301711706063076, + "grad_norm": 2.5923593044281006, + "learning_rate": 1.797159754956783e-05, + "loss": 1.1433, + "step": 3671 + }, + { + "epoch": 0.23023387046209795, + "grad_norm": 3.0986385345458984, + "learning_rate": 1.797037127370529e-05, + "loss": 1.1948, + "step": 3672 + }, + { + "epoch": 0.23029657031788828, + "grad_norm": 2.827237367630005, + "learning_rate": 1.7969144669144296e-05, + "loss": 1.1202, + "step": 3673 + }, + { + "epoch": 0.2303592701736786, + "grad_norm": 3.27990460395813, + "learning_rate": 1.796791773593543e-05, + "loss": 1.0265, + "step": 3674 + }, + { + "epoch": 0.23042197002946893, + "grad_norm": 2.7874321937561035, + "learning_rate": 1.7966690474129285e-05, + "loss": 1.3305, + "step": 3675 + }, + { + "epoch": 0.23048466988525926, + "grad_norm": 3.0278806686401367, + "learning_rate": 1.796546288377648e-05, + "loss": 1.1492, + "step": 3676 + }, + { + "epoch": 0.23054736974104958, + "grad_norm": 2.656442403793335, + "learning_rate": 1.7964234964927634e-05, + "loss": 1.1266, + "step": 3677 + }, + { + "epoch": 0.23061006959683994, + "grad_norm": 2.9295742511749268, + "learning_rate": 1.7963006717633395e-05, + "loss": 1.2608, + "step": 3678 + }, + { + "epoch": 0.23067276945263027, + "grad_norm": 3.2355690002441406, + "learning_rate": 1.7961778141944407e-05, + "loss": 1.0364, + "step": 3679 + }, + { + "epoch": 0.2307354693084206, + "grad_norm": 2.6644937992095947, + "learning_rate": 1.796054923791134e-05, + "loss": 1.1361, + "step": 3680 + }, + { + "epoch": 0.23079816916421092, + "grad_norm": 3.026761770248413, + "learning_rate": 1.795932000558488e-05, + "loss": 1.1408, + "step": 3681 + }, + { + "epoch": 0.23086086902000125, + "grad_norm": 2.966275215148926, + "learning_rate": 1.7958090445015707e-05, + "loss": 1.3253, + "step": 3682 + }, + { + "epoch": 0.23092356887579157, + "grad_norm": 2.7552950382232666, + "learning_rate": 1.7956860556254544e-05, + "loss": 1.3092, + "step": 3683 + }, + { + "epoch": 0.23098626873158193, + "grad_norm": 3.0775644779205322, + "learning_rate": 1.7955630339352102e-05, + "loss": 1.1202, + "step": 3684 + }, + { + "epoch": 0.23104896858737226, + "grad_norm": 2.8708980083465576, + "learning_rate": 1.7954399794359115e-05, + "loss": 1.1734, + "step": 3685 + }, + { + "epoch": 0.23111166844316258, + "grad_norm": 3.076138496398926, + "learning_rate": 1.7953168921326338e-05, + "loss": 1.1967, + "step": 3686 + }, + { + "epoch": 0.2311743682989529, + "grad_norm": 3.051985263824463, + "learning_rate": 1.7951937720304524e-05, + "loss": 1.3057, + "step": 3687 + }, + { + "epoch": 0.23123706815474324, + "grad_norm": 2.8222289085388184, + "learning_rate": 1.7950706191344452e-05, + "loss": 1.108, + "step": 3688 + }, + { + "epoch": 0.23129976801053356, + "grad_norm": 2.9288032054901123, + "learning_rate": 1.794947433449691e-05, + "loss": 1.2575, + "step": 3689 + }, + { + "epoch": 0.23136246786632392, + "grad_norm": 2.9505739212036133, + "learning_rate": 1.79482421498127e-05, + "loss": 1.217, + "step": 3690 + }, + { + "epoch": 0.23142516772211424, + "grad_norm": 3.0691640377044678, + "learning_rate": 1.7947009637342634e-05, + "loss": 1.235, + "step": 3691 + }, + { + "epoch": 0.23148786757790457, + "grad_norm": 3.029360055923462, + "learning_rate": 1.7945776797137544e-05, + "loss": 1.2346, + "step": 3692 + }, + { + "epoch": 0.2315505674336949, + "grad_norm": 2.9194607734680176, + "learning_rate": 1.7944543629248273e-05, + "loss": 1.1838, + "step": 3693 + }, + { + "epoch": 0.23161326728948523, + "grad_norm": 3.0526087284088135, + "learning_rate": 1.7943310133725674e-05, + "loss": 1.0914, + "step": 3694 + }, + { + "epoch": 0.23167596714527555, + "grad_norm": 3.224966526031494, + "learning_rate": 1.794207631062062e-05, + "loss": 1.1273, + "step": 3695 + }, + { + "epoch": 0.2317386670010659, + "grad_norm": 2.9591007232666016, + "learning_rate": 1.7940842159983993e-05, + "loss": 1.1853, + "step": 3696 + }, + { + "epoch": 0.23180136685685623, + "grad_norm": 2.816899538040161, + "learning_rate": 1.793960768186669e-05, + "loss": 1.2089, + "step": 3697 + }, + { + "epoch": 0.23186406671264656, + "grad_norm": 3.0356369018554688, + "learning_rate": 1.7938372876319614e-05, + "loss": 1.0994, + "step": 3698 + }, + { + "epoch": 0.2319267665684369, + "grad_norm": 2.9502341747283936, + "learning_rate": 1.7937137743393695e-05, + "loss": 1.1628, + "step": 3699 + }, + { + "epoch": 0.23198946642422721, + "grad_norm": 2.741265296936035, + "learning_rate": 1.7935902283139873e-05, + "loss": 1.1584, + "step": 3700 + }, + { + "epoch": 0.23205216628001757, + "grad_norm": 2.9334161281585693, + "learning_rate": 1.793466649560909e-05, + "loss": 1.1957, + "step": 3701 + }, + { + "epoch": 0.2321148661358079, + "grad_norm": 3.170027494430542, + "learning_rate": 1.7933430380852317e-05, + "loss": 1.062, + "step": 3702 + }, + { + "epoch": 0.23217756599159822, + "grad_norm": 2.6776485443115234, + "learning_rate": 1.7932193938920526e-05, + "loss": 1.3416, + "step": 3703 + }, + { + "epoch": 0.23224026584738855, + "grad_norm": 2.8746819496154785, + "learning_rate": 1.7930957169864713e-05, + "loss": 1.152, + "step": 3704 + }, + { + "epoch": 0.23230296570317888, + "grad_norm": 2.950089931488037, + "learning_rate": 1.7929720073735882e-05, + "loss": 1.1289, + "step": 3705 + }, + { + "epoch": 0.2323656655589692, + "grad_norm": 3.2309300899505615, + "learning_rate": 1.7928482650585045e-05, + "loss": 1.1772, + "step": 3706 + }, + { + "epoch": 0.23242836541475956, + "grad_norm": 3.103158473968506, + "learning_rate": 1.7927244900463237e-05, + "loss": 1.1414, + "step": 3707 + }, + { + "epoch": 0.23249106527054988, + "grad_norm": 3.5865421295166016, + "learning_rate": 1.7926006823421508e-05, + "loss": 1.2056, + "step": 3708 + }, + { + "epoch": 0.2325537651263402, + "grad_norm": 3.0684852600097656, + "learning_rate": 1.7924768419510906e-05, + "loss": 1.3824, + "step": 3709 + }, + { + "epoch": 0.23261646498213054, + "grad_norm": 2.9115426540374756, + "learning_rate": 1.792352968878251e-05, + "loss": 1.0792, + "step": 3710 + }, + { + "epoch": 0.23267916483792087, + "grad_norm": 2.864105224609375, + "learning_rate": 1.7922290631287403e-05, + "loss": 1.0722, + "step": 3711 + }, + { + "epoch": 0.2327418646937112, + "grad_norm": 2.895184278488159, + "learning_rate": 1.7921051247076685e-05, + "loss": 1.2854, + "step": 3712 + }, + { + "epoch": 0.23280456454950155, + "grad_norm": 2.8035459518432617, + "learning_rate": 1.791981153620147e-05, + "loss": 1.1476, + "step": 3713 + }, + { + "epoch": 0.23286726440529187, + "grad_norm": 3.627516508102417, + "learning_rate": 1.791857149871288e-05, + "loss": 1.0395, + "step": 3714 + }, + { + "epoch": 0.2329299642610822, + "grad_norm": 3.044405460357666, + "learning_rate": 1.7917331134662054e-05, + "loss": 1.2561, + "step": 3715 + }, + { + "epoch": 0.23299266411687253, + "grad_norm": 2.8894996643066406, + "learning_rate": 1.7916090444100145e-05, + "loss": 1.1443, + "step": 3716 + }, + { + "epoch": 0.23305536397266285, + "grad_norm": 3.304267644882202, + "learning_rate": 1.7914849427078326e-05, + "loss": 1.3144, + "step": 3717 + }, + { + "epoch": 0.23311806382845318, + "grad_norm": 2.884702444076538, + "learning_rate": 1.7913608083647766e-05, + "loss": 1.2468, + "step": 3718 + }, + { + "epoch": 0.23318076368424354, + "grad_norm": 2.7600901126861572, + "learning_rate": 1.7912366413859663e-05, + "loss": 1.3729, + "step": 3719 + }, + { + "epoch": 0.23324346354003386, + "grad_norm": 3.058915615081787, + "learning_rate": 1.7911124417765226e-05, + "loss": 1.1356, + "step": 3720 + }, + { + "epoch": 0.2333061633958242, + "grad_norm": 3.0757315158843994, + "learning_rate": 1.7909882095415673e-05, + "loss": 1.298, + "step": 3721 + }, + { + "epoch": 0.23336886325161452, + "grad_norm": 2.909719467163086, + "learning_rate": 1.7908639446862236e-05, + "loss": 1.1175, + "step": 3722 + }, + { + "epoch": 0.23343156310740484, + "grad_norm": 3.025355577468872, + "learning_rate": 1.790739647215616e-05, + "loss": 1.1285, + "step": 3723 + }, + { + "epoch": 0.2334942629631952, + "grad_norm": 2.904924154281616, + "learning_rate": 1.7906153171348714e-05, + "loss": 1.1868, + "step": 3724 + }, + { + "epoch": 0.23355696281898553, + "grad_norm": 3.0485634803771973, + "learning_rate": 1.790490954449116e-05, + "loss": 1.3359, + "step": 3725 + }, + { + "epoch": 0.23361966267477585, + "grad_norm": 3.083189010620117, + "learning_rate": 1.7903665591634794e-05, + "loss": 1.2138, + "step": 3726 + }, + { + "epoch": 0.23368236253056618, + "grad_norm": 3.0078585147857666, + "learning_rate": 1.7902421312830915e-05, + "loss": 1.2193, + "step": 3727 + }, + { + "epoch": 0.2337450623863565, + "grad_norm": 2.730030059814453, + "learning_rate": 1.7901176708130835e-05, + "loss": 1.1343, + "step": 3728 + }, + { + "epoch": 0.23380776224214683, + "grad_norm": 2.966252326965332, + "learning_rate": 1.789993177758588e-05, + "loss": 1.2029, + "step": 3729 + }, + { + "epoch": 0.2338704620979372, + "grad_norm": 3.3223538398742676, + "learning_rate": 1.7898686521247396e-05, + "loss": 1.1558, + "step": 3730 + }, + { + "epoch": 0.23393316195372751, + "grad_norm": 3.0343499183654785, + "learning_rate": 1.7897440939166735e-05, + "loss": 1.1848, + "step": 3731 + }, + { + "epoch": 0.23399586180951784, + "grad_norm": 2.900063991546631, + "learning_rate": 1.7896195031395262e-05, + "loss": 1.2793, + "step": 3732 + }, + { + "epoch": 0.23405856166530817, + "grad_norm": 3.04937481880188, + "learning_rate": 1.7894948797984368e-05, + "loss": 1.211, + "step": 3733 + }, + { + "epoch": 0.2341212615210985, + "grad_norm": 3.062575578689575, + "learning_rate": 1.7893702238985433e-05, + "loss": 1.2748, + "step": 3734 + }, + { + "epoch": 0.23418396137688882, + "grad_norm": 2.823662519454956, + "learning_rate": 1.7892455354449877e-05, + "loss": 1.2114, + "step": 3735 + }, + { + "epoch": 0.23424666123267918, + "grad_norm": 2.9927268028259277, + "learning_rate": 1.7891208144429117e-05, + "loss": 1.2791, + "step": 3736 + }, + { + "epoch": 0.2343093610884695, + "grad_norm": 3.015326738357544, + "learning_rate": 1.788996060897459e-05, + "loss": 1.1789, + "step": 3737 + }, + { + "epoch": 0.23437206094425983, + "grad_norm": 2.9988486766815186, + "learning_rate": 1.788871274813774e-05, + "loss": 1.1169, + "step": 3738 + }, + { + "epoch": 0.23443476080005016, + "grad_norm": 2.9841208457946777, + "learning_rate": 1.7887464561970032e-05, + "loss": 1.1441, + "step": 3739 + }, + { + "epoch": 0.23449746065584048, + "grad_norm": 2.893455743789673, + "learning_rate": 1.7886216050522942e-05, + "loss": 1.3664, + "step": 3740 + }, + { + "epoch": 0.2345601605116308, + "grad_norm": 3.1030335426330566, + "learning_rate": 1.788496721384796e-05, + "loss": 1.1621, + "step": 3741 + }, + { + "epoch": 0.23462286036742117, + "grad_norm": 2.9405531883239746, + "learning_rate": 1.788371805199658e-05, + "loss": 1.1184, + "step": 3742 + }, + { + "epoch": 0.2346855602232115, + "grad_norm": 3.2026307582855225, + "learning_rate": 1.7882468565020327e-05, + "loss": 1.2904, + "step": 3743 + }, + { + "epoch": 0.23474826007900182, + "grad_norm": 2.8419113159179688, + "learning_rate": 1.7881218752970725e-05, + "loss": 1.0108, + "step": 3744 + }, + { + "epoch": 0.23481095993479215, + "grad_norm": 2.838763952255249, + "learning_rate": 1.787996861589932e-05, + "loss": 1.2407, + "step": 3745 + }, + { + "epoch": 0.23487365979058247, + "grad_norm": 2.9557671546936035, + "learning_rate": 1.787871815385766e-05, + "loss": 1.0482, + "step": 3746 + }, + { + "epoch": 0.2349363596463728, + "grad_norm": 3.04819393157959, + "learning_rate": 1.7877467366897325e-05, + "loss": 1.1614, + "step": 3747 + }, + { + "epoch": 0.23499905950216315, + "grad_norm": 2.905240774154663, + "learning_rate": 1.787621625506989e-05, + "loss": 1.1454, + "step": 3748 + }, + { + "epoch": 0.23506175935795348, + "grad_norm": 2.5505011081695557, + "learning_rate": 1.7874964818426953e-05, + "loss": 1.35, + "step": 3749 + }, + { + "epoch": 0.2351244592137438, + "grad_norm": 2.875330686569214, + "learning_rate": 1.787371305702012e-05, + "loss": 1.1069, + "step": 3750 + }, + { + "epoch": 0.23518715906953414, + "grad_norm": 3.0634615421295166, + "learning_rate": 1.787246097090102e-05, + "loss": 1.2624, + "step": 3751 + }, + { + "epoch": 0.23524985892532446, + "grad_norm": 3.01633358001709, + "learning_rate": 1.7871208560121283e-05, + "loss": 1.042, + "step": 3752 + }, + { + "epoch": 0.23531255878111482, + "grad_norm": 2.7879505157470703, + "learning_rate": 1.7869955824732562e-05, + "loss": 1.1703, + "step": 3753 + }, + { + "epoch": 0.23537525863690514, + "grad_norm": 2.737107753753662, + "learning_rate": 1.7868702764786522e-05, + "loss": 1.3279, + "step": 3754 + }, + { + "epoch": 0.23543795849269547, + "grad_norm": 3.049248218536377, + "learning_rate": 1.7867449380334834e-05, + "loss": 1.1815, + "step": 3755 + }, + { + "epoch": 0.2355006583484858, + "grad_norm": 3.139641046524048, + "learning_rate": 1.786619567142919e-05, + "loss": 1.2414, + "step": 3756 + }, + { + "epoch": 0.23556335820427612, + "grad_norm": 3.4642601013183594, + "learning_rate": 1.7864941638121295e-05, + "loss": 1.0312, + "step": 3757 + }, + { + "epoch": 0.23562605806006645, + "grad_norm": 3.266648530960083, + "learning_rate": 1.786368728046286e-05, + "loss": 1.0856, + "step": 3758 + }, + { + "epoch": 0.2356887579158568, + "grad_norm": 2.9351961612701416, + "learning_rate": 1.7862432598505618e-05, + "loss": 1.2496, + "step": 3759 + }, + { + "epoch": 0.23575145777164713, + "grad_norm": 3.191131353378296, + "learning_rate": 1.786117759230132e-05, + "loss": 1.0432, + "step": 3760 + }, + { + "epoch": 0.23581415762743746, + "grad_norm": 3.027930498123169, + "learning_rate": 1.7859922261901705e-05, + "loss": 1.1205, + "step": 3761 + }, + { + "epoch": 0.2358768574832278, + "grad_norm": 2.536780834197998, + "learning_rate": 1.7858666607358554e-05, + "loss": 1.2973, + "step": 3762 + }, + { + "epoch": 0.2359395573390181, + "grad_norm": 2.9462945461273193, + "learning_rate": 1.7857410628723656e-05, + "loss": 1.0655, + "step": 3763 + }, + { + "epoch": 0.23600225719480844, + "grad_norm": 3.0196619033813477, + "learning_rate": 1.7856154326048793e-05, + "loss": 1.2299, + "step": 3764 + }, + { + "epoch": 0.2360649570505988, + "grad_norm": 2.866791009902954, + "learning_rate": 1.7854897699385786e-05, + "loss": 1.1384, + "step": 3765 + }, + { + "epoch": 0.23612765690638912, + "grad_norm": 3.0794711112976074, + "learning_rate": 1.7853640748786457e-05, + "loss": 1.1531, + "step": 3766 + }, + { + "epoch": 0.23619035676217945, + "grad_norm": 2.6792070865631104, + "learning_rate": 1.7852383474302633e-05, + "loss": 1.1976, + "step": 3767 + }, + { + "epoch": 0.23625305661796978, + "grad_norm": 2.9655563831329346, + "learning_rate": 1.785112587598618e-05, + "loss": 1.1932, + "step": 3768 + }, + { + "epoch": 0.2363157564737601, + "grad_norm": 2.9225683212280273, + "learning_rate": 1.784986795388895e-05, + "loss": 1.195, + "step": 3769 + }, + { + "epoch": 0.23637845632955043, + "grad_norm": 2.9782559871673584, + "learning_rate": 1.784860970806282e-05, + "loss": 1.114, + "step": 3770 + }, + { + "epoch": 0.23644115618534078, + "grad_norm": 3.344970464706421, + "learning_rate": 1.7847351138559686e-05, + "loss": 1.2572, + "step": 3771 + }, + { + "epoch": 0.2365038560411311, + "grad_norm": 3.380640983581543, + "learning_rate": 1.784609224543145e-05, + "loss": 1.1441, + "step": 3772 + }, + { + "epoch": 0.23656655589692144, + "grad_norm": 2.8827316761016846, + "learning_rate": 1.7844833028730022e-05, + "loss": 1.1836, + "step": 3773 + }, + { + "epoch": 0.23662925575271176, + "grad_norm": 3.0622012615203857, + "learning_rate": 1.784357348850734e-05, + "loss": 1.2469, + "step": 3774 + }, + { + "epoch": 0.2366919556085021, + "grad_norm": 2.7302029132843018, + "learning_rate": 1.7842313624815343e-05, + "loss": 1.0617, + "step": 3775 + }, + { + "epoch": 0.23675465546429242, + "grad_norm": 2.9276180267333984, + "learning_rate": 1.7841053437705992e-05, + "loss": 1.294, + "step": 3776 + }, + { + "epoch": 0.23681735532008277, + "grad_norm": 2.9978744983673096, + "learning_rate": 1.7839792927231253e-05, + "loss": 1.1007, + "step": 3777 + }, + { + "epoch": 0.2368800551758731, + "grad_norm": 2.821282148361206, + "learning_rate": 1.7838532093443116e-05, + "loss": 1.1699, + "step": 3778 + }, + { + "epoch": 0.23694275503166343, + "grad_norm": 3.2890923023223877, + "learning_rate": 1.783727093639357e-05, + "loss": 1.2125, + "step": 3779 + }, + { + "epoch": 0.23700545488745375, + "grad_norm": 2.8168537616729736, + "learning_rate": 1.7836009456134626e-05, + "loss": 1.211, + "step": 3780 + }, + { + "epoch": 0.23706815474324408, + "grad_norm": 2.836068630218506, + "learning_rate": 1.7834747652718313e-05, + "loss": 1.192, + "step": 3781 + }, + { + "epoch": 0.23713085459903444, + "grad_norm": 3.060673475265503, + "learning_rate": 1.7833485526196664e-05, + "loss": 1.0883, + "step": 3782 + }, + { + "epoch": 0.23719355445482476, + "grad_norm": 2.6712474822998047, + "learning_rate": 1.7832223076621728e-05, + "loss": 1.348, + "step": 3783 + }, + { + "epoch": 0.2372562543106151, + "grad_norm": 2.7608304023742676, + "learning_rate": 1.7830960304045573e-05, + "loss": 1.0974, + "step": 3784 + }, + { + "epoch": 0.23731895416640542, + "grad_norm": 3.113784074783325, + "learning_rate": 1.7829697208520272e-05, + "loss": 1.1321, + "step": 3785 + }, + { + "epoch": 0.23738165402219574, + "grad_norm": 3.5236029624938965, + "learning_rate": 1.7828433790097914e-05, + "loss": 1.1749, + "step": 3786 + }, + { + "epoch": 0.23744435387798607, + "grad_norm": 3.117981433868408, + "learning_rate": 1.7827170048830608e-05, + "loss": 1.0232, + "step": 3787 + }, + { + "epoch": 0.23750705373377642, + "grad_norm": 3.039140462875366, + "learning_rate": 1.7825905984770467e-05, + "loss": 1.145, + "step": 3788 + }, + { + "epoch": 0.23756975358956675, + "grad_norm": 2.870912790298462, + "learning_rate": 1.782464159796962e-05, + "loss": 1.2082, + "step": 3789 + }, + { + "epoch": 0.23763245344535708, + "grad_norm": 2.7469046115875244, + "learning_rate": 1.782337688848021e-05, + "loss": 1.2625, + "step": 3790 + }, + { + "epoch": 0.2376951533011474, + "grad_norm": 3.142934799194336, + "learning_rate": 1.78221118563544e-05, + "loss": 1.1413, + "step": 3791 + }, + { + "epoch": 0.23775785315693773, + "grad_norm": 3.148899555206299, + "learning_rate": 1.782084650164435e-05, + "loss": 1.2947, + "step": 3792 + }, + { + "epoch": 0.23782055301272806, + "grad_norm": 2.8828868865966797, + "learning_rate": 1.7819580824402253e-05, + "loss": 1.1228, + "step": 3793 + }, + { + "epoch": 0.2378832528685184, + "grad_norm": 2.776996612548828, + "learning_rate": 1.78183148246803e-05, + "loss": 1.1577, + "step": 3794 + }, + { + "epoch": 0.23794595272430874, + "grad_norm": 2.960860252380371, + "learning_rate": 1.7817048502530702e-05, + "loss": 0.964, + "step": 3795 + }, + { + "epoch": 0.23800865258009907, + "grad_norm": 3.278761863708496, + "learning_rate": 1.7815781858005684e-05, + "loss": 1.1293, + "step": 3796 + }, + { + "epoch": 0.2380713524358894, + "grad_norm": 3.2381582260131836, + "learning_rate": 1.7814514891157477e-05, + "loss": 1.0245, + "step": 3797 + }, + { + "epoch": 0.23813405229167972, + "grad_norm": 3.051020383834839, + "learning_rate": 1.781324760203834e-05, + "loss": 0.9308, + "step": 3798 + }, + { + "epoch": 0.23819675214747005, + "grad_norm": 3.114384412765503, + "learning_rate": 1.7811979990700527e-05, + "loss": 1.1129, + "step": 3799 + }, + { + "epoch": 0.2382594520032604, + "grad_norm": 3.1459314823150635, + "learning_rate": 1.781071205719632e-05, + "loss": 1.1543, + "step": 3800 + }, + { + "epoch": 0.23832215185905073, + "grad_norm": 2.943742275238037, + "learning_rate": 1.7809443801578005e-05, + "loss": 1.2536, + "step": 3801 + }, + { + "epoch": 0.23838485171484106, + "grad_norm": 2.499211072921753, + "learning_rate": 1.7808175223897888e-05, + "loss": 1.0936, + "step": 3802 + }, + { + "epoch": 0.23844755157063138, + "grad_norm": 2.937525987625122, + "learning_rate": 1.780690632420828e-05, + "loss": 1.1097, + "step": 3803 + }, + { + "epoch": 0.2385102514264217, + "grad_norm": 3.1163759231567383, + "learning_rate": 1.7805637102561516e-05, + "loss": 1.2324, + "step": 3804 + }, + { + "epoch": 0.23857295128221206, + "grad_norm": 2.7695181369781494, + "learning_rate": 1.780436755900994e-05, + "loss": 1.1218, + "step": 3805 + }, + { + "epoch": 0.2386356511380024, + "grad_norm": 3.2485063076019287, + "learning_rate": 1.7803097693605903e-05, + "loss": 1.2811, + "step": 3806 + }, + { + "epoch": 0.23869835099379272, + "grad_norm": 3.062424421310425, + "learning_rate": 1.7801827506401775e-05, + "loss": 1.244, + "step": 3807 + }, + { + "epoch": 0.23876105084958305, + "grad_norm": 3.241854429244995, + "learning_rate": 1.7800556997449946e-05, + "loss": 1.3528, + "step": 3808 + }, + { + "epoch": 0.23882375070537337, + "grad_norm": 2.984174966812134, + "learning_rate": 1.77992861668028e-05, + "loss": 1.0175, + "step": 3809 + }, + { + "epoch": 0.2388864505611637, + "grad_norm": 3.0213236808776855, + "learning_rate": 1.7798015014512753e-05, + "loss": 0.9937, + "step": 3810 + }, + { + "epoch": 0.23894915041695405, + "grad_norm": 3.0197901725769043, + "learning_rate": 1.7796743540632226e-05, + "loss": 1.2391, + "step": 3811 + }, + { + "epoch": 0.23901185027274438, + "grad_norm": 3.081200122833252, + "learning_rate": 1.7795471745213654e-05, + "loss": 1.0948, + "step": 3812 + }, + { + "epoch": 0.2390745501285347, + "grad_norm": 2.7410781383514404, + "learning_rate": 1.7794199628309488e-05, + "loss": 1.102, + "step": 3813 + }, + { + "epoch": 0.23913724998432503, + "grad_norm": 3.0472676753997803, + "learning_rate": 1.779292718997219e-05, + "loss": 1.1283, + "step": 3814 + }, + { + "epoch": 0.23919994984011536, + "grad_norm": 3.036102294921875, + "learning_rate": 1.7791654430254236e-05, + "loss": 1.1608, + "step": 3815 + }, + { + "epoch": 0.2392626496959057, + "grad_norm": 2.741490364074707, + "learning_rate": 1.779038134920811e-05, + "loss": 1.2394, + "step": 3816 + }, + { + "epoch": 0.23932534955169604, + "grad_norm": 3.0357394218444824, + "learning_rate": 1.7789107946886315e-05, + "loss": 1.2015, + "step": 3817 + }, + { + "epoch": 0.23938804940748637, + "grad_norm": 2.907041549682617, + "learning_rate": 1.778783422334137e-05, + "loss": 1.0927, + "step": 3818 + }, + { + "epoch": 0.2394507492632767, + "grad_norm": 2.6892244815826416, + "learning_rate": 1.7786560178625802e-05, + "loss": 1.1842, + "step": 3819 + }, + { + "epoch": 0.23951344911906702, + "grad_norm": 2.739777088165283, + "learning_rate": 1.778528581279215e-05, + "loss": 1.066, + "step": 3820 + }, + { + "epoch": 0.23957614897485735, + "grad_norm": 3.105302095413208, + "learning_rate": 1.7784011125892977e-05, + "loss": 1.1143, + "step": 3821 + }, + { + "epoch": 0.23963884883064768, + "grad_norm": 2.7232019901275635, + "learning_rate": 1.778273611798084e-05, + "loss": 1.1942, + "step": 3822 + }, + { + "epoch": 0.23970154868643803, + "grad_norm": 2.6363883018493652, + "learning_rate": 1.7781460789108328e-05, + "loss": 1.156, + "step": 3823 + }, + { + "epoch": 0.23976424854222836, + "grad_norm": 2.828447103500366, + "learning_rate": 1.7780185139328035e-05, + "loss": 1.3795, + "step": 3824 + }, + { + "epoch": 0.23982694839801869, + "grad_norm": 2.90712308883667, + "learning_rate": 1.7778909168692562e-05, + "loss": 1.2191, + "step": 3825 + }, + { + "epoch": 0.239889648253809, + "grad_norm": 2.6985299587249756, + "learning_rate": 1.777763287725454e-05, + "loss": 1.2425, + "step": 3826 + }, + { + "epoch": 0.23995234810959934, + "grad_norm": 2.757195234298706, + "learning_rate": 1.7776356265066595e-05, + "loss": 1.1683, + "step": 3827 + }, + { + "epoch": 0.24001504796538967, + "grad_norm": 2.9279067516326904, + "learning_rate": 1.777507933218138e-05, + "loss": 1.3255, + "step": 3828 + }, + { + "epoch": 0.24007774782118002, + "grad_norm": 2.874755859375, + "learning_rate": 1.777380207865155e-05, + "loss": 1.1298, + "step": 3829 + }, + { + "epoch": 0.24014044767697035, + "grad_norm": 3.0784666538238525, + "learning_rate": 1.777252450452979e-05, + "loss": 1.2346, + "step": 3830 + }, + { + "epoch": 0.24020314753276067, + "grad_norm": 2.799327850341797, + "learning_rate": 1.7771246609868776e-05, + "loss": 1.2264, + "step": 3831 + }, + { + "epoch": 0.240265847388551, + "grad_norm": 2.9602949619293213, + "learning_rate": 1.7769968394721213e-05, + "loss": 1.0922, + "step": 3832 + }, + { + "epoch": 0.24032854724434133, + "grad_norm": 3.0109684467315674, + "learning_rate": 1.776868985913981e-05, + "loss": 1.257, + "step": 3833 + }, + { + "epoch": 0.24039124710013168, + "grad_norm": 2.7735660076141357, + "learning_rate": 1.77674110031773e-05, + "loss": 1.1233, + "step": 3834 + }, + { + "epoch": 0.240453946955922, + "grad_norm": 2.7875099182128906, + "learning_rate": 1.7766131826886425e-05, + "loss": 1.0356, + "step": 3835 + }, + { + "epoch": 0.24051664681171234, + "grad_norm": 2.730480909347534, + "learning_rate": 1.776485233031993e-05, + "loss": 1.1813, + "step": 3836 + }, + { + "epoch": 0.24057934666750266, + "grad_norm": 2.9169809818267822, + "learning_rate": 1.7763572513530588e-05, + "loss": 1.1673, + "step": 3837 + }, + { + "epoch": 0.240642046523293, + "grad_norm": 3.2564218044281006, + "learning_rate": 1.7762292376571176e-05, + "loss": 1.2138, + "step": 3838 + }, + { + "epoch": 0.24070474637908332, + "grad_norm": 3.037358522415161, + "learning_rate": 1.776101191949449e-05, + "loss": 1.2639, + "step": 3839 + }, + { + "epoch": 0.24076744623487367, + "grad_norm": 2.852370500564575, + "learning_rate": 1.775973114235333e-05, + "loss": 1.0896, + "step": 3840 + }, + { + "epoch": 0.240830146090664, + "grad_norm": 3.18965220451355, + "learning_rate": 1.775845004520052e-05, + "loss": 1.0637, + "step": 3841 + }, + { + "epoch": 0.24089284594645433, + "grad_norm": 2.8080055713653564, + "learning_rate": 1.775716862808889e-05, + "loss": 1.102, + "step": 3842 + }, + { + "epoch": 0.24095554580224465, + "grad_norm": 2.9632952213287354, + "learning_rate": 1.7755886891071287e-05, + "loss": 1.1236, + "step": 3843 + }, + { + "epoch": 0.24101824565803498, + "grad_norm": 2.7907748222351074, + "learning_rate": 1.7754604834200573e-05, + "loss": 1.0533, + "step": 3844 + }, + { + "epoch": 0.2410809455138253, + "grad_norm": 3.3203043937683105, + "learning_rate": 1.7753322457529615e-05, + "loss": 1.1526, + "step": 3845 + }, + { + "epoch": 0.24114364536961566, + "grad_norm": 3.005897045135498, + "learning_rate": 1.77520397611113e-05, + "loss": 1.255, + "step": 3846 + }, + { + "epoch": 0.241206345225406, + "grad_norm": 2.9681527614593506, + "learning_rate": 1.7750756744998527e-05, + "loss": 1.3275, + "step": 3847 + }, + { + "epoch": 0.24126904508119631, + "grad_norm": 2.706369400024414, + "learning_rate": 1.7749473409244205e-05, + "loss": 1.1973, + "step": 3848 + }, + { + "epoch": 0.24133174493698664, + "grad_norm": 2.9686832427978516, + "learning_rate": 1.7748189753901265e-05, + "loss": 1.2539, + "step": 3849 + }, + { + "epoch": 0.24139444479277697, + "grad_norm": 2.9460716247558594, + "learning_rate": 1.774690577902264e-05, + "loss": 1.1843, + "step": 3850 + }, + { + "epoch": 0.2414571446485673, + "grad_norm": 3.131366491317749, + "learning_rate": 1.7745621484661284e-05, + "loss": 1.0814, + "step": 3851 + }, + { + "epoch": 0.24151984450435765, + "grad_norm": 3.032944440841675, + "learning_rate": 1.7744336870870156e-05, + "loss": 1.172, + "step": 3852 + }, + { + "epoch": 0.24158254436014798, + "grad_norm": 2.947030544281006, + "learning_rate": 1.774305193770224e-05, + "loss": 1.1634, + "step": 3853 + }, + { + "epoch": 0.2416452442159383, + "grad_norm": 2.7615387439727783, + "learning_rate": 1.7741766685210522e-05, + "loss": 1.2107, + "step": 3854 + }, + { + "epoch": 0.24170794407172863, + "grad_norm": 2.8413686752319336, + "learning_rate": 1.774048111344801e-05, + "loss": 1.1307, + "step": 3855 + }, + { + "epoch": 0.24177064392751896, + "grad_norm": 3.1622507572174072, + "learning_rate": 1.773919522246772e-05, + "loss": 1.3324, + "step": 3856 + }, + { + "epoch": 0.24183334378330928, + "grad_norm": 3.117241621017456, + "learning_rate": 1.7737909012322676e-05, + "loss": 1.1698, + "step": 3857 + }, + { + "epoch": 0.24189604363909964, + "grad_norm": 3.2802770137786865, + "learning_rate": 1.7736622483065928e-05, + "loss": 1.1306, + "step": 3858 + }, + { + "epoch": 0.24195874349488997, + "grad_norm": 3.1554884910583496, + "learning_rate": 1.773533563475053e-05, + "loss": 1.0481, + "step": 3859 + }, + { + "epoch": 0.2420214433506803, + "grad_norm": 2.762199878692627, + "learning_rate": 1.7734048467429556e-05, + "loss": 1.2314, + "step": 3860 + }, + { + "epoch": 0.24208414320647062, + "grad_norm": 3.15598201751709, + "learning_rate": 1.7732760981156086e-05, + "loss": 1.14, + "step": 3861 + }, + { + "epoch": 0.24214684306226095, + "grad_norm": 2.7641189098358154, + "learning_rate": 1.7731473175983215e-05, + "loss": 1.1915, + "step": 3862 + }, + { + "epoch": 0.2422095429180513, + "grad_norm": 2.898149251937866, + "learning_rate": 1.773018505196405e-05, + "loss": 1.1649, + "step": 3863 + }, + { + "epoch": 0.24227224277384163, + "grad_norm": 2.844223737716675, + "learning_rate": 1.7728896609151718e-05, + "loss": 1.1255, + "step": 3864 + }, + { + "epoch": 0.24233494262963196, + "grad_norm": 2.6570942401885986, + "learning_rate": 1.772760784759935e-05, + "loss": 1.0464, + "step": 3865 + }, + { + "epoch": 0.24239764248542228, + "grad_norm": 3.1842257976531982, + "learning_rate": 1.77263187673601e-05, + "loss": 1.1142, + "step": 3866 + }, + { + "epoch": 0.2424603423412126, + "grad_norm": 2.8021178245544434, + "learning_rate": 1.7725029368487125e-05, + "loss": 1.1356, + "step": 3867 + }, + { + "epoch": 0.24252304219700294, + "grad_norm": 2.7466626167297363, + "learning_rate": 1.77237396510336e-05, + "loss": 1.2097, + "step": 3868 + }, + { + "epoch": 0.2425857420527933, + "grad_norm": 2.6950647830963135, + "learning_rate": 1.7722449615052716e-05, + "loss": 1.143, + "step": 3869 + }, + { + "epoch": 0.24264844190858362, + "grad_norm": 2.9126808643341064, + "learning_rate": 1.7721159260597672e-05, + "loss": 1.1776, + "step": 3870 + }, + { + "epoch": 0.24271114176437394, + "grad_norm": 3.0827293395996094, + "learning_rate": 1.7719868587721683e-05, + "loss": 1.1592, + "step": 3871 + }, + { + "epoch": 0.24277384162016427, + "grad_norm": 2.453096628189087, + "learning_rate": 1.7718577596477976e-05, + "loss": 1.1702, + "step": 3872 + }, + { + "epoch": 0.2428365414759546, + "grad_norm": 3.061572790145874, + "learning_rate": 1.7717286286919796e-05, + "loss": 0.9744, + "step": 3873 + }, + { + "epoch": 0.24289924133174493, + "grad_norm": 2.8372802734375, + "learning_rate": 1.771599465910039e-05, + "loss": 1.2007, + "step": 3874 + }, + { + "epoch": 0.24296194118753528, + "grad_norm": 2.773909091949463, + "learning_rate": 1.7714702713073024e-05, + "loss": 1.3125, + "step": 3875 + }, + { + "epoch": 0.2430246410433256, + "grad_norm": 3.06315541267395, + "learning_rate": 1.7713410448890985e-05, + "loss": 1.1169, + "step": 3876 + }, + { + "epoch": 0.24308734089911593, + "grad_norm": 2.9362356662750244, + "learning_rate": 1.771211786660756e-05, + "loss": 1.0069, + "step": 3877 + }, + { + "epoch": 0.24315004075490626, + "grad_norm": 2.980656147003174, + "learning_rate": 1.7710824966276056e-05, + "loss": 1.1256, + "step": 3878 + }, + { + "epoch": 0.2432127406106966, + "grad_norm": 3.2512269020080566, + "learning_rate": 1.7709531747949796e-05, + "loss": 1.25, + "step": 3879 + }, + { + "epoch": 0.24327544046648691, + "grad_norm": 3.137532949447632, + "learning_rate": 1.770823821168211e-05, + "loss": 1.1267, + "step": 3880 + }, + { + "epoch": 0.24333814032227727, + "grad_norm": 3.0280728340148926, + "learning_rate": 1.7706944357526344e-05, + "loss": 1.1415, + "step": 3881 + }, + { + "epoch": 0.2434008401780676, + "grad_norm": 2.8186347484588623, + "learning_rate": 1.7705650185535857e-05, + "loss": 1.2053, + "step": 3882 + }, + { + "epoch": 0.24346354003385792, + "grad_norm": 3.314753293991089, + "learning_rate": 1.7704355695764016e-05, + "loss": 1.1921, + "step": 3883 + }, + { + "epoch": 0.24352623988964825, + "grad_norm": 2.934279680252075, + "learning_rate": 1.7703060888264215e-05, + "loss": 1.0789, + "step": 3884 + }, + { + "epoch": 0.24358893974543858, + "grad_norm": 3.1842706203460693, + "learning_rate": 1.7701765763089846e-05, + "loss": 1.1546, + "step": 3885 + }, + { + "epoch": 0.24365163960122893, + "grad_norm": 3.295222759246826, + "learning_rate": 1.7700470320294317e-05, + "loss": 1.2824, + "step": 3886 + }, + { + "epoch": 0.24371433945701926, + "grad_norm": 3.0356228351593018, + "learning_rate": 1.7699174559931056e-05, + "loss": 0.9917, + "step": 3887 + }, + { + "epoch": 0.24377703931280958, + "grad_norm": 2.887071371078491, + "learning_rate": 1.76978784820535e-05, + "loss": 1.1162, + "step": 3888 + }, + { + "epoch": 0.2438397391685999, + "grad_norm": 3.408728837966919, + "learning_rate": 1.7696582086715103e-05, + "loss": 1.0812, + "step": 3889 + }, + { + "epoch": 0.24390243902439024, + "grad_norm": 3.0996928215026855, + "learning_rate": 1.7695285373969317e-05, + "loss": 1.2981, + "step": 3890 + }, + { + "epoch": 0.24396513888018057, + "grad_norm": 3.4062743186950684, + "learning_rate": 1.7693988343869634e-05, + "loss": 1.0151, + "step": 3891 + }, + { + "epoch": 0.24402783873597092, + "grad_norm": 2.906155824661255, + "learning_rate": 1.769269099646953e-05, + "loss": 1.1796, + "step": 3892 + }, + { + "epoch": 0.24409053859176125, + "grad_norm": 3.1546969413757324, + "learning_rate": 1.7691393331822517e-05, + "loss": 1.2525, + "step": 3893 + }, + { + "epoch": 0.24415323844755157, + "grad_norm": 2.964385747909546, + "learning_rate": 1.769009534998211e-05, + "loss": 1.1928, + "step": 3894 + }, + { + "epoch": 0.2442159383033419, + "grad_norm": 2.8932313919067383, + "learning_rate": 1.768879705100183e-05, + "loss": 1.1325, + "step": 3895 + }, + { + "epoch": 0.24427863815913223, + "grad_norm": 2.866359233856201, + "learning_rate": 1.7687498434935224e-05, + "loss": 1.1755, + "step": 3896 + }, + { + "epoch": 0.24434133801492255, + "grad_norm": 2.9927263259887695, + "learning_rate": 1.7686199501835844e-05, + "loss": 1.2039, + "step": 3897 + }, + { + "epoch": 0.2444040378707129, + "grad_norm": 2.906463861465454, + "learning_rate": 1.7684900251757268e-05, + "loss": 1.1174, + "step": 3898 + }, + { + "epoch": 0.24446673772650324, + "grad_norm": 2.737276077270508, + "learning_rate": 1.7683600684753067e-05, + "loss": 1.1053, + "step": 3899 + }, + { + "epoch": 0.24452943758229356, + "grad_norm": 2.961165189743042, + "learning_rate": 1.7682300800876836e-05, + "loss": 1.3308, + "step": 3900 + }, + { + "epoch": 0.2445921374380839, + "grad_norm": 3.0103225708007812, + "learning_rate": 1.7681000600182185e-05, + "loss": 1.1556, + "step": 3901 + }, + { + "epoch": 0.24465483729387422, + "grad_norm": 2.819223642349243, + "learning_rate": 1.7679700082722738e-05, + "loss": 1.1535, + "step": 3902 + }, + { + "epoch": 0.24471753714966454, + "grad_norm": 2.956170082092285, + "learning_rate": 1.7678399248552122e-05, + "loss": 1.1905, + "step": 3903 + }, + { + "epoch": 0.2447802370054549, + "grad_norm": 3.0190324783325195, + "learning_rate": 1.7677098097723985e-05, + "loss": 1.1398, + "step": 3904 + }, + { + "epoch": 0.24484293686124523, + "grad_norm": 2.859529733657837, + "learning_rate": 1.7675796630291985e-05, + "loss": 1.0381, + "step": 3905 + }, + { + "epoch": 0.24490563671703555, + "grad_norm": 2.903684616088867, + "learning_rate": 1.76744948463098e-05, + "loss": 1.1263, + "step": 3906 + }, + { + "epoch": 0.24496833657282588, + "grad_norm": 2.9669573307037354, + "learning_rate": 1.767319274583111e-05, + "loss": 1.1215, + "step": 3907 + }, + { + "epoch": 0.2450310364286162, + "grad_norm": 2.8950839042663574, + "learning_rate": 1.7671890328909617e-05, + "loss": 1.1795, + "step": 3908 + }, + { + "epoch": 0.24509373628440653, + "grad_norm": 3.0871999263763428, + "learning_rate": 1.7670587595599034e-05, + "loss": 1.1205, + "step": 3909 + }, + { + "epoch": 0.2451564361401969, + "grad_norm": 2.9507954120635986, + "learning_rate": 1.7669284545953084e-05, + "loss": 1.1785, + "step": 3910 + }, + { + "epoch": 0.24521913599598721, + "grad_norm": 2.946661949157715, + "learning_rate": 1.7667981180025498e-05, + "loss": 1.2415, + "step": 3911 + }, + { + "epoch": 0.24528183585177754, + "grad_norm": 2.855513572692871, + "learning_rate": 1.766667749787004e-05, + "loss": 1.2055, + "step": 3912 + }, + { + "epoch": 0.24534453570756787, + "grad_norm": 2.842237949371338, + "learning_rate": 1.7665373499540464e-05, + "loss": 1.0773, + "step": 3913 + }, + { + "epoch": 0.2454072355633582, + "grad_norm": 3.068077325820923, + "learning_rate": 1.766406918509055e-05, + "loss": 1.0892, + "step": 3914 + }, + { + "epoch": 0.24546993541914855, + "grad_norm": 3.087428569793701, + "learning_rate": 1.766276455457409e-05, + "loss": 1.0034, + "step": 3915 + }, + { + "epoch": 0.24553263527493888, + "grad_norm": 3.087799549102783, + "learning_rate": 1.7661459608044883e-05, + "loss": 1.1347, + "step": 3916 + }, + { + "epoch": 0.2455953351307292, + "grad_norm": 2.963413953781128, + "learning_rate": 1.7660154345556752e-05, + "loss": 1.2385, + "step": 3917 + }, + { + "epoch": 0.24565803498651953, + "grad_norm": 2.946051597595215, + "learning_rate": 1.7658848767163516e-05, + "loss": 1.1071, + "step": 3918 + }, + { + "epoch": 0.24572073484230986, + "grad_norm": 2.8250203132629395, + "learning_rate": 1.7657542872919023e-05, + "loss": 1.0847, + "step": 3919 + }, + { + "epoch": 0.24578343469810018, + "grad_norm": 2.947087526321411, + "learning_rate": 1.765623666287713e-05, + "loss": 1.222, + "step": 3920 + }, + { + "epoch": 0.24584613455389054, + "grad_norm": 3.368070363998413, + "learning_rate": 1.7654930137091698e-05, + "loss": 1.1175, + "step": 3921 + }, + { + "epoch": 0.24590883440968087, + "grad_norm": 3.134331464767456, + "learning_rate": 1.765362329561662e-05, + "loss": 1.1376, + "step": 3922 + }, + { + "epoch": 0.2459715342654712, + "grad_norm": 2.898284673690796, + "learning_rate": 1.7652316138505775e-05, + "loss": 1.079, + "step": 3923 + }, + { + "epoch": 0.24603423412126152, + "grad_norm": 2.7939834594726562, + "learning_rate": 1.7651008665813083e-05, + "loss": 1.0355, + "step": 3924 + }, + { + "epoch": 0.24609693397705185, + "grad_norm": 2.9844391345977783, + "learning_rate": 1.764970087759246e-05, + "loss": 1.112, + "step": 3925 + }, + { + "epoch": 0.24615963383284217, + "grad_norm": 3.1182305812835693, + "learning_rate": 1.7648392773897836e-05, + "loss": 1.3009, + "step": 3926 + }, + { + "epoch": 0.24622233368863253, + "grad_norm": 3.1177783012390137, + "learning_rate": 1.7647084354783164e-05, + "loss": 1.0599, + "step": 3927 + }, + { + "epoch": 0.24628503354442285, + "grad_norm": 3.070773124694824, + "learning_rate": 1.7645775620302395e-05, + "loss": 1.1752, + "step": 3928 + }, + { + "epoch": 0.24634773340021318, + "grad_norm": 3.1038379669189453, + "learning_rate": 1.7644466570509508e-05, + "loss": 1.1149, + "step": 3929 + }, + { + "epoch": 0.2464104332560035, + "grad_norm": 2.687263250350952, + "learning_rate": 1.7643157205458483e-05, + "loss": 1.0652, + "step": 3930 + }, + { + "epoch": 0.24647313311179384, + "grad_norm": 3.210946798324585, + "learning_rate": 1.7641847525203324e-05, + "loss": 1.2477, + "step": 3931 + }, + { + "epoch": 0.24653583296758416, + "grad_norm": 2.754873037338257, + "learning_rate": 1.7640537529798037e-05, + "loss": 1.1875, + "step": 3932 + }, + { + "epoch": 0.24659853282337452, + "grad_norm": 2.919154644012451, + "learning_rate": 1.763922721929665e-05, + "loss": 1.1123, + "step": 3933 + }, + { + "epoch": 0.24666123267916484, + "grad_norm": 3.16851544380188, + "learning_rate": 1.7637916593753203e-05, + "loss": 1.1333, + "step": 3934 + }, + { + "epoch": 0.24672393253495517, + "grad_norm": 2.7668354511260986, + "learning_rate": 1.7636605653221736e-05, + "loss": 1.1112, + "step": 3935 + }, + { + "epoch": 0.2467866323907455, + "grad_norm": 3.4912827014923096, + "learning_rate": 1.7635294397756322e-05, + "loss": 1.1948, + "step": 3936 + }, + { + "epoch": 0.24684933224653582, + "grad_norm": 3.0799193382263184, + "learning_rate": 1.763398282741103e-05, + "loss": 1.1102, + "step": 3937 + }, + { + "epoch": 0.24691203210232615, + "grad_norm": 3.165498733520508, + "learning_rate": 1.7632670942239958e-05, + "loss": 1.3208, + "step": 3938 + }, + { + "epoch": 0.2469747319581165, + "grad_norm": 3.122622489929199, + "learning_rate": 1.76313587422972e-05, + "loss": 1.2198, + "step": 3939 + }, + { + "epoch": 0.24703743181390683, + "grad_norm": 3.077213764190674, + "learning_rate": 1.7630046227636874e-05, + "loss": 1.1087, + "step": 3940 + }, + { + "epoch": 0.24710013166969716, + "grad_norm": 2.7431657314300537, + "learning_rate": 1.7628733398313106e-05, + "loss": 1.2546, + "step": 3941 + }, + { + "epoch": 0.2471628315254875, + "grad_norm": 2.6185762882232666, + "learning_rate": 1.762742025438004e-05, + "loss": 1.2314, + "step": 3942 + }, + { + "epoch": 0.2472255313812778, + "grad_norm": 3.0729379653930664, + "learning_rate": 1.7626106795891832e-05, + "loss": 1.0586, + "step": 3943 + }, + { + "epoch": 0.24728823123706817, + "grad_norm": 2.761321544647217, + "learning_rate": 1.7624793022902648e-05, + "loss": 1.0438, + "step": 3944 + }, + { + "epoch": 0.2473509310928585, + "grad_norm": 2.8622193336486816, + "learning_rate": 1.7623478935466668e-05, + "loss": 1.2011, + "step": 3945 + }, + { + "epoch": 0.24741363094864882, + "grad_norm": 3.3125619888305664, + "learning_rate": 1.762216453363808e-05, + "loss": 1.2644, + "step": 3946 + }, + { + "epoch": 0.24747633080443915, + "grad_norm": 3.5588338375091553, + "learning_rate": 1.7620849817471094e-05, + "loss": 1.1736, + "step": 3947 + }, + { + "epoch": 0.24753903066022948, + "grad_norm": 3.2623701095581055, + "learning_rate": 1.761953478701993e-05, + "loss": 1.2007, + "step": 3948 + }, + { + "epoch": 0.2476017305160198, + "grad_norm": 2.7856783866882324, + "learning_rate": 1.761821944233882e-05, + "loss": 1.2602, + "step": 3949 + }, + { + "epoch": 0.24766443037181016, + "grad_norm": 2.8198764324188232, + "learning_rate": 1.7616903783482002e-05, + "loss": 1.2388, + "step": 3950 + }, + { + "epoch": 0.24772713022760048, + "grad_norm": 2.850133180618286, + "learning_rate": 1.7615587810503742e-05, + "loss": 1.339, + "step": 3951 + }, + { + "epoch": 0.2477898300833908, + "grad_norm": 2.7030367851257324, + "learning_rate": 1.761427152345831e-05, + "loss": 1.0454, + "step": 3952 + }, + { + "epoch": 0.24785252993918114, + "grad_norm": 2.7411766052246094, + "learning_rate": 1.7612954922399987e-05, + "loss": 1.1216, + "step": 3953 + }, + { + "epoch": 0.24791522979497146, + "grad_norm": 2.786980152130127, + "learning_rate": 1.761163800738307e-05, + "loss": 1.076, + "step": 3954 + }, + { + "epoch": 0.2479779296507618, + "grad_norm": 2.9401702880859375, + "learning_rate": 1.761032077846187e-05, + "loss": 1.2001, + "step": 3955 + }, + { + "epoch": 0.24804062950655215, + "grad_norm": 2.6825342178344727, + "learning_rate": 1.7609003235690707e-05, + "loss": 1.1378, + "step": 3956 + }, + { + "epoch": 0.24810332936234247, + "grad_norm": 2.9771568775177, + "learning_rate": 1.760768537912392e-05, + "loss": 1.3334, + "step": 3957 + }, + { + "epoch": 0.2481660292181328, + "grad_norm": 2.9025015830993652, + "learning_rate": 1.7606367208815856e-05, + "loss": 1.0859, + "step": 3958 + }, + { + "epoch": 0.24822872907392313, + "grad_norm": 3.1369330883026123, + "learning_rate": 1.760504872482088e-05, + "loss": 0.959, + "step": 3959 + }, + { + "epoch": 0.24829142892971345, + "grad_norm": 2.896876573562622, + "learning_rate": 1.7603729927193355e-05, + "loss": 1.1727, + "step": 3960 + }, + { + "epoch": 0.24835412878550378, + "grad_norm": 2.636173963546753, + "learning_rate": 1.7602410815987675e-05, + "loss": 1.0854, + "step": 3961 + }, + { + "epoch": 0.24841682864129414, + "grad_norm": 2.65647292137146, + "learning_rate": 1.7601091391258246e-05, + "loss": 1.1294, + "step": 3962 + }, + { + "epoch": 0.24847952849708446, + "grad_norm": 3.0226824283599854, + "learning_rate": 1.7599771653059473e-05, + "loss": 1.1019, + "step": 3963 + }, + { + "epoch": 0.2485422283528748, + "grad_norm": 2.7122437953948975, + "learning_rate": 1.759845160144579e-05, + "loss": 1.1564, + "step": 3964 + }, + { + "epoch": 0.24860492820866512, + "grad_norm": 3.1565232276916504, + "learning_rate": 1.759713123647163e-05, + "loss": 1.0634, + "step": 3965 + }, + { + "epoch": 0.24866762806445544, + "grad_norm": 3.2240657806396484, + "learning_rate": 1.7595810558191442e-05, + "loss": 1.1631, + "step": 3966 + }, + { + "epoch": 0.2487303279202458, + "grad_norm": 3.301238536834717, + "learning_rate": 1.7594489566659698e-05, + "loss": 1.1938, + "step": 3967 + }, + { + "epoch": 0.24879302777603612, + "grad_norm": 3.107604503631592, + "learning_rate": 1.759316826193087e-05, + "loss": 1.093, + "step": 3968 + }, + { + "epoch": 0.24885572763182645, + "grad_norm": 3.1334893703460693, + "learning_rate": 1.7591846644059454e-05, + "loss": 1.1304, + "step": 3969 + }, + { + "epoch": 0.24891842748761678, + "grad_norm": 3.577333450317383, + "learning_rate": 1.7590524713099948e-05, + "loss": 1.0568, + "step": 3970 + }, + { + "epoch": 0.2489811273434071, + "grad_norm": 3.0202388763427734, + "learning_rate": 1.7589202469106873e-05, + "loss": 1.1323, + "step": 3971 + }, + { + "epoch": 0.24904382719919743, + "grad_norm": 3.148299217224121, + "learning_rate": 1.758787991213476e-05, + "loss": 1.0151, + "step": 3972 + }, + { + "epoch": 0.2491065270549878, + "grad_norm": 3.3377904891967773, + "learning_rate": 1.7586557042238143e-05, + "loss": 1.1425, + "step": 3973 + }, + { + "epoch": 0.2491692269107781, + "grad_norm": 3.3678760528564453, + "learning_rate": 1.758523385947158e-05, + "loss": 1.0534, + "step": 3974 + }, + { + "epoch": 0.24923192676656844, + "grad_norm": 2.81699275970459, + "learning_rate": 1.758391036388965e-05, + "loss": 1.1454, + "step": 3975 + }, + { + "epoch": 0.24929462662235877, + "grad_norm": 3.3136157989501953, + "learning_rate": 1.758258655554692e-05, + "loss": 1.1054, + "step": 3976 + }, + { + "epoch": 0.2493573264781491, + "grad_norm": 3.1645045280456543, + "learning_rate": 1.758126243449799e-05, + "loss": 1.2468, + "step": 3977 + }, + { + "epoch": 0.24942002633393942, + "grad_norm": 2.915182590484619, + "learning_rate": 1.7579938000797466e-05, + "loss": 0.9788, + "step": 3978 + }, + { + "epoch": 0.24948272618972978, + "grad_norm": 3.0280025005340576, + "learning_rate": 1.757861325449997e-05, + "loss": 1.2609, + "step": 3979 + }, + { + "epoch": 0.2495454260455201, + "grad_norm": 3.0601584911346436, + "learning_rate": 1.757728819566013e-05, + "loss": 1.2697, + "step": 3980 + }, + { + "epoch": 0.24960812590131043, + "grad_norm": 2.643019199371338, + "learning_rate": 1.7575962824332595e-05, + "loss": 1.2224, + "step": 3981 + }, + { + "epoch": 0.24967082575710076, + "grad_norm": 3.085860013961792, + "learning_rate": 1.757463714057202e-05, + "loss": 1.0717, + "step": 3982 + }, + { + "epoch": 0.24973352561289108, + "grad_norm": 3.027271270751953, + "learning_rate": 1.757331114443308e-05, + "loss": 1.1065, + "step": 3983 + }, + { + "epoch": 0.2497962254686814, + "grad_norm": 3.0279510021209717, + "learning_rate": 1.757198483597046e-05, + "loss": 1.219, + "step": 3984 + }, + { + "epoch": 0.24985892532447176, + "grad_norm": 2.983430862426758, + "learning_rate": 1.757065821523885e-05, + "loss": 1.0641, + "step": 3985 + }, + { + "epoch": 0.2499216251802621, + "grad_norm": 3.087249755859375, + "learning_rate": 1.7569331282292967e-05, + "loss": 1.0019, + "step": 3986 + }, + { + "epoch": 0.24998432503605242, + "grad_norm": 2.7662246227264404, + "learning_rate": 1.7568004037187534e-05, + "loss": 1.1335, + "step": 3987 + }, + { + "epoch": 0.2500470248918428, + "grad_norm": 2.8193771839141846, + "learning_rate": 1.756667647997728e-05, + "loss": 1.191, + "step": 3988 + }, + { + "epoch": 0.2501097247476331, + "grad_norm": 2.710176467895508, + "learning_rate": 1.7565348610716963e-05, + "loss": 1.0897, + "step": 3989 + }, + { + "epoch": 0.2501724246034234, + "grad_norm": 2.6303508281707764, + "learning_rate": 1.756402042946133e-05, + "loss": 1.1068, + "step": 3990 + }, + { + "epoch": 0.25023512445921375, + "grad_norm": 2.9269447326660156, + "learning_rate": 1.7562691936265174e-05, + "loss": 1.18, + "step": 3991 + }, + { + "epoch": 0.2502978243150041, + "grad_norm": 2.515939712524414, + "learning_rate": 1.7561363131183265e-05, + "loss": 0.9862, + "step": 3992 + }, + { + "epoch": 0.2503605241707944, + "grad_norm": 2.7894160747528076, + "learning_rate": 1.7560034014270412e-05, + "loss": 1.238, + "step": 3993 + }, + { + "epoch": 0.25042322402658473, + "grad_norm": 2.9076039791107178, + "learning_rate": 1.7558704585581428e-05, + "loss": 1.1193, + "step": 3994 + }, + { + "epoch": 0.25048592388237506, + "grad_norm": 2.810685873031616, + "learning_rate": 1.7557374845171136e-05, + "loss": 1.2376, + "step": 3995 + }, + { + "epoch": 0.2505486237381654, + "grad_norm": 2.7092936038970947, + "learning_rate": 1.7556044793094377e-05, + "loss": 1.1163, + "step": 3996 + }, + { + "epoch": 0.2506113235939557, + "grad_norm": 2.7544524669647217, + "learning_rate": 1.7554714429405997e-05, + "loss": 1.1906, + "step": 3997 + }, + { + "epoch": 0.25067402344974604, + "grad_norm": 2.6824772357940674, + "learning_rate": 1.7553383754160864e-05, + "loss": 1.2163, + "step": 3998 + }, + { + "epoch": 0.2507367233055364, + "grad_norm": 2.7236592769622803, + "learning_rate": 1.755205276741386e-05, + "loss": 1.1045, + "step": 3999 + }, + { + "epoch": 0.25079942316132675, + "grad_norm": 3.113518714904785, + "learning_rate": 1.7550721469219866e-05, + "loss": 1.1866, + "step": 4000 + }, + { + "epoch": 0.25079942316132675, + "eval_loss": 1.1720733642578125, + "eval_runtime": 143.7906, + "eval_samples_per_second": 4.381, + "eval_steps_per_second": 1.099, + "step": 4000 + }, + { + "epoch": 0.2508621230171171, + "grad_norm": 3.00426983833313, + "learning_rate": 1.754938985963379e-05, + "loss": 1.2742, + "step": 4001 + }, + { + "epoch": 0.2509248228729074, + "grad_norm": 2.899606227874756, + "learning_rate": 1.7548057938710547e-05, + "loss": 1.2057, + "step": 4002 + }, + { + "epoch": 0.25098752272869773, + "grad_norm": 3.3578357696533203, + "learning_rate": 1.7546725706505063e-05, + "loss": 1.2108, + "step": 4003 + }, + { + "epoch": 0.25105022258448806, + "grad_norm": 2.851710081100464, + "learning_rate": 1.7545393163072285e-05, + "loss": 1.0062, + "step": 4004 + }, + { + "epoch": 0.2511129224402784, + "grad_norm": 3.133490562438965, + "learning_rate": 1.754406030846716e-05, + "loss": 1.1654, + "step": 4005 + }, + { + "epoch": 0.2511756222960687, + "grad_norm": 2.861103057861328, + "learning_rate": 1.7542727142744656e-05, + "loss": 1.175, + "step": 4006 + }, + { + "epoch": 0.25123832215185904, + "grad_norm": 3.083064317703247, + "learning_rate": 1.754139366595976e-05, + "loss": 1.0794, + "step": 4007 + }, + { + "epoch": 0.25130102200764937, + "grad_norm": 2.6803088188171387, + "learning_rate": 1.7540059878167462e-05, + "loss": 1.3378, + "step": 4008 + }, + { + "epoch": 0.2513637218634397, + "grad_norm": 2.7546679973602295, + "learning_rate": 1.753872577942276e-05, + "loss": 0.9709, + "step": 4009 + }, + { + "epoch": 0.25142642171923, + "grad_norm": 2.9646964073181152, + "learning_rate": 1.753739136978068e-05, + "loss": 1.1336, + "step": 4010 + }, + { + "epoch": 0.2514891215750204, + "grad_norm": 3.2337522506713867, + "learning_rate": 1.7536056649296253e-05, + "loss": 1.0539, + "step": 4011 + }, + { + "epoch": 0.25155182143081073, + "grad_norm": 2.8686861991882324, + "learning_rate": 1.7534721618024516e-05, + "loss": 1.1802, + "step": 4012 + }, + { + "epoch": 0.25161452128660106, + "grad_norm": 3.278081178665161, + "learning_rate": 1.753338627602053e-05, + "loss": 1.0309, + "step": 4013 + }, + { + "epoch": 0.2516772211423914, + "grad_norm": 2.7933366298675537, + "learning_rate": 1.753205062333937e-05, + "loss": 0.9271, + "step": 4014 + }, + { + "epoch": 0.2517399209981817, + "grad_norm": 2.9589319229125977, + "learning_rate": 1.7530714660036112e-05, + "loss": 1.2552, + "step": 4015 + }, + { + "epoch": 0.25180262085397204, + "grad_norm": 2.8841516971588135, + "learning_rate": 1.752937838616585e-05, + "loss": 1.2888, + "step": 4016 + }, + { + "epoch": 0.25186532070976236, + "grad_norm": 2.9638378620147705, + "learning_rate": 1.7528041801783696e-05, + "loss": 1.3098, + "step": 4017 + }, + { + "epoch": 0.2519280205655527, + "grad_norm": 3.1591103076934814, + "learning_rate": 1.752670490694477e-05, + "loss": 1.2283, + "step": 4018 + }, + { + "epoch": 0.251990720421343, + "grad_norm": 3.1263132095336914, + "learning_rate": 1.7525367701704204e-05, + "loss": 1.2155, + "step": 4019 + }, + { + "epoch": 0.25205342027713334, + "grad_norm": 3.228267192840576, + "learning_rate": 1.7524030186117146e-05, + "loss": 1.2963, + "step": 4020 + }, + { + "epoch": 0.25211612013292367, + "grad_norm": 2.8388357162475586, + "learning_rate": 1.7522692360238754e-05, + "loss": 1.1272, + "step": 4021 + }, + { + "epoch": 0.25217881998871405, + "grad_norm": 2.933638334274292, + "learning_rate": 1.7521354224124202e-05, + "loss": 1.3478, + "step": 4022 + }, + { + "epoch": 0.2522415198445044, + "grad_norm": 3.075258493423462, + "learning_rate": 1.752001577782867e-05, + "loss": 1.3397, + "step": 4023 + }, + { + "epoch": 0.2523042197002947, + "grad_norm": 3.039623975753784, + "learning_rate": 1.751867702140736e-05, + "loss": 1.1372, + "step": 4024 + }, + { + "epoch": 0.25236691955608503, + "grad_norm": 2.8698225021362305, + "learning_rate": 1.7517337954915485e-05, + "loss": 1.1077, + "step": 4025 + }, + { + "epoch": 0.25242961941187536, + "grad_norm": 3.211581230163574, + "learning_rate": 1.7515998578408263e-05, + "loss": 1.0357, + "step": 4026 + }, + { + "epoch": 0.2524923192676657, + "grad_norm": 3.0643372535705566, + "learning_rate": 1.751465889194093e-05, + "loss": 1.2549, + "step": 4027 + }, + { + "epoch": 0.252555019123456, + "grad_norm": 2.967808961868286, + "learning_rate": 1.7513318895568734e-05, + "loss": 1.2343, + "step": 4028 + }, + { + "epoch": 0.25261771897924634, + "grad_norm": 2.9336233139038086, + "learning_rate": 1.751197858934694e-05, + "loss": 1.206, + "step": 4029 + }, + { + "epoch": 0.25268041883503667, + "grad_norm": 2.691505193710327, + "learning_rate": 1.7510637973330824e-05, + "loss": 1.0652, + "step": 4030 + }, + { + "epoch": 0.252743118690827, + "grad_norm": 2.7956433296203613, + "learning_rate": 1.750929704757567e-05, + "loss": 1.149, + "step": 4031 + }, + { + "epoch": 0.2528058185466173, + "grad_norm": 3.188903570175171, + "learning_rate": 1.7507955812136775e-05, + "loss": 1.1534, + "step": 4032 + }, + { + "epoch": 0.25286851840240765, + "grad_norm": 3.168375253677368, + "learning_rate": 1.7506614267069452e-05, + "loss": 1.093, + "step": 4033 + }, + { + "epoch": 0.25293121825819803, + "grad_norm": 3.100299835205078, + "learning_rate": 1.7505272412429033e-05, + "loss": 1.0576, + "step": 4034 + }, + { + "epoch": 0.25299391811398836, + "grad_norm": 3.061707019805908, + "learning_rate": 1.750393024827085e-05, + "loss": 1.0621, + "step": 4035 + }, + { + "epoch": 0.2530566179697787, + "grad_norm": 2.7065823078155518, + "learning_rate": 1.750258777465026e-05, + "loss": 1.2331, + "step": 4036 + }, + { + "epoch": 0.253119317825569, + "grad_norm": 2.946404457092285, + "learning_rate": 1.7501244991622618e-05, + "loss": 1.184, + "step": 4037 + }, + { + "epoch": 0.25318201768135934, + "grad_norm": 3.2165355682373047, + "learning_rate": 1.7499901899243307e-05, + "loss": 1.1857, + "step": 4038 + }, + { + "epoch": 0.25324471753714967, + "grad_norm": 3.1896109580993652, + "learning_rate": 1.749855849756771e-05, + "loss": 1.1836, + "step": 4039 + }, + { + "epoch": 0.25330741739294, + "grad_norm": 2.9184107780456543, + "learning_rate": 1.749721478665124e-05, + "loss": 1.1577, + "step": 4040 + }, + { + "epoch": 0.2533701172487303, + "grad_norm": 3.0514156818389893, + "learning_rate": 1.74958707665493e-05, + "loss": 1.1931, + "step": 4041 + }, + { + "epoch": 0.25343281710452065, + "grad_norm": 3.1845991611480713, + "learning_rate": 1.749452643731732e-05, + "loss": 1.1908, + "step": 4042 + }, + { + "epoch": 0.253495516960311, + "grad_norm": 3.112719774246216, + "learning_rate": 1.7493181799010748e-05, + "loss": 1.211, + "step": 4043 + }, + { + "epoch": 0.2535582168161013, + "grad_norm": 2.671642541885376, + "learning_rate": 1.7491836851685028e-05, + "loss": 1.1195, + "step": 4044 + }, + { + "epoch": 0.2536209166718916, + "grad_norm": 3.1073672771453857, + "learning_rate": 1.7490491595395634e-05, + "loss": 1.2776, + "step": 4045 + }, + { + "epoch": 0.253683616527682, + "grad_norm": 3.046452045440674, + "learning_rate": 1.7489146030198033e-05, + "loss": 1.0061, + "step": 4046 + }, + { + "epoch": 0.25374631638347234, + "grad_norm": 3.043562412261963, + "learning_rate": 1.7487800156147727e-05, + "loss": 1.1367, + "step": 4047 + }, + { + "epoch": 0.25380901623926266, + "grad_norm": 2.930220603942871, + "learning_rate": 1.7486453973300214e-05, + "loss": 1.2997, + "step": 4048 + }, + { + "epoch": 0.253871716095053, + "grad_norm": 2.480196237564087, + "learning_rate": 1.7485107481711014e-05, + "loss": 1.1862, + "step": 4049 + }, + { + "epoch": 0.2539344159508433, + "grad_norm": 2.920713186264038, + "learning_rate": 1.7483760681435652e-05, + "loss": 1.1959, + "step": 4050 + }, + { + "epoch": 0.25399711580663364, + "grad_norm": 3.256801128387451, + "learning_rate": 1.7482413572529674e-05, + "loss": 1.2097, + "step": 4051 + }, + { + "epoch": 0.25405981566242397, + "grad_norm": 2.7189829349517822, + "learning_rate": 1.748106615504863e-05, + "loss": 1.2269, + "step": 4052 + }, + { + "epoch": 0.2541225155182143, + "grad_norm": 2.931955575942993, + "learning_rate": 1.7479718429048097e-05, + "loss": 1.2357, + "step": 4053 + }, + { + "epoch": 0.2541852153740046, + "grad_norm": 3.0431783199310303, + "learning_rate": 1.7478370394583647e-05, + "loss": 1.2746, + "step": 4054 + }, + { + "epoch": 0.25424791522979495, + "grad_norm": 3.1598803997039795, + "learning_rate": 1.7477022051710873e-05, + "loss": 1.047, + "step": 4055 + }, + { + "epoch": 0.2543106150855853, + "grad_norm": 3.1444754600524902, + "learning_rate": 1.7475673400485386e-05, + "loss": 0.9361, + "step": 4056 + }, + { + "epoch": 0.25437331494137566, + "grad_norm": 2.712735176086426, + "learning_rate": 1.74743244409628e-05, + "loss": 1.1346, + "step": 4057 + }, + { + "epoch": 0.254436014797166, + "grad_norm": 2.9044559001922607, + "learning_rate": 1.7472975173198748e-05, + "loss": 1.2896, + "step": 4058 + }, + { + "epoch": 0.2544987146529563, + "grad_norm": 2.896153211593628, + "learning_rate": 1.7471625597248873e-05, + "loss": 1.1726, + "step": 4059 + }, + { + "epoch": 0.25456141450874664, + "grad_norm": 2.4448394775390625, + "learning_rate": 1.7470275713168835e-05, + "loss": 1.3081, + "step": 4060 + }, + { + "epoch": 0.25462411436453697, + "grad_norm": 2.9300315380096436, + "learning_rate": 1.7468925521014296e-05, + "loss": 1.2042, + "step": 4061 + }, + { + "epoch": 0.2546868142203273, + "grad_norm": 2.8483853340148926, + "learning_rate": 1.7467575020840944e-05, + "loss": 1.2361, + "step": 4062 + }, + { + "epoch": 0.2547495140761176, + "grad_norm": 2.764068365097046, + "learning_rate": 1.7466224212704476e-05, + "loss": 1.1476, + "step": 4063 + }, + { + "epoch": 0.25481221393190795, + "grad_norm": 2.8496408462524414, + "learning_rate": 1.746487309666059e-05, + "loss": 1.0973, + "step": 4064 + }, + { + "epoch": 0.2548749137876983, + "grad_norm": 2.879873275756836, + "learning_rate": 1.7463521672765013e-05, + "loss": 1.0447, + "step": 4065 + }, + { + "epoch": 0.2549376136434886, + "grad_norm": 2.7206785678863525, + "learning_rate": 1.7462169941073478e-05, + "loss": 1.2476, + "step": 4066 + }, + { + "epoch": 0.25500031349927893, + "grad_norm": 2.819780111312866, + "learning_rate": 1.7460817901641728e-05, + "loss": 1.0772, + "step": 4067 + }, + { + "epoch": 0.25506301335506926, + "grad_norm": 2.787397861480713, + "learning_rate": 1.745946555452552e-05, + "loss": 1.0328, + "step": 4068 + }, + { + "epoch": 0.25512571321085964, + "grad_norm": 3.2749998569488525, + "learning_rate": 1.7458112899780627e-05, + "loss": 1.1079, + "step": 4069 + }, + { + "epoch": 0.25518841306664997, + "grad_norm": 2.8733174800872803, + "learning_rate": 1.7456759937462835e-05, + "loss": 1.2988, + "step": 4070 + }, + { + "epoch": 0.2552511129224403, + "grad_norm": 3.430624485015869, + "learning_rate": 1.7455406667627936e-05, + "loss": 1.232, + "step": 4071 + }, + { + "epoch": 0.2553138127782306, + "grad_norm": 3.051137685775757, + "learning_rate": 1.7454053090331738e-05, + "loss": 1.149, + "step": 4072 + }, + { + "epoch": 0.25537651263402095, + "grad_norm": 3.3431520462036133, + "learning_rate": 1.745269920563007e-05, + "loss": 1.2922, + "step": 4073 + }, + { + "epoch": 0.2554392124898113, + "grad_norm": 3.13134765625, + "learning_rate": 1.7451345013578758e-05, + "loss": 0.9529, + "step": 4074 + }, + { + "epoch": 0.2555019123456016, + "grad_norm": 3.1808345317840576, + "learning_rate": 1.744999051423365e-05, + "loss": 1.1634, + "step": 4075 + }, + { + "epoch": 0.2555646122013919, + "grad_norm": 2.7267305850982666, + "learning_rate": 1.7448635707650613e-05, + "loss": 1.2399, + "step": 4076 + }, + { + "epoch": 0.25562731205718225, + "grad_norm": 3.0735015869140625, + "learning_rate": 1.7447280593885513e-05, + "loss": 1.1851, + "step": 4077 + }, + { + "epoch": 0.2556900119129726, + "grad_norm": 2.6645243167877197, + "learning_rate": 1.7445925172994235e-05, + "loss": 1.1671, + "step": 4078 + }, + { + "epoch": 0.2557527117687629, + "grad_norm": 3.0670673847198486, + "learning_rate": 1.7444569445032677e-05, + "loss": 1.3991, + "step": 4079 + }, + { + "epoch": 0.2558154116245533, + "grad_norm": 3.3858931064605713, + "learning_rate": 1.7443213410056752e-05, + "loss": 0.9933, + "step": 4080 + }, + { + "epoch": 0.2558781114803436, + "grad_norm": 2.521127700805664, + "learning_rate": 1.7441857068122376e-05, + "loss": 1.1923, + "step": 4081 + }, + { + "epoch": 0.25594081133613394, + "grad_norm": 3.0389602184295654, + "learning_rate": 1.7440500419285497e-05, + "loss": 1.2572, + "step": 4082 + }, + { + "epoch": 0.25600351119192427, + "grad_norm": 2.9409713745117188, + "learning_rate": 1.7439143463602052e-05, + "loss": 1.0677, + "step": 4083 + }, + { + "epoch": 0.2560662110477146, + "grad_norm": 3.1413402557373047, + "learning_rate": 1.7437786201128003e-05, + "loss": 1.2024, + "step": 4084 + }, + { + "epoch": 0.2561289109035049, + "grad_norm": 3.158215284347534, + "learning_rate": 1.743642863191933e-05, + "loss": 1.1631, + "step": 4085 + }, + { + "epoch": 0.25619161075929525, + "grad_norm": 3.1879703998565674, + "learning_rate": 1.7435070756032013e-05, + "loss": 1.1588, + "step": 4086 + }, + { + "epoch": 0.2562543106150856, + "grad_norm": 2.8440346717834473, + "learning_rate": 1.7433712573522056e-05, + "loss": 1.178, + "step": 4087 + }, + { + "epoch": 0.2563170104708759, + "grad_norm": 3.1303040981292725, + "learning_rate": 1.7432354084445465e-05, + "loss": 1.1002, + "step": 4088 + }, + { + "epoch": 0.25637971032666623, + "grad_norm": 3.194790840148926, + "learning_rate": 1.7430995288858272e-05, + "loss": 1.3438, + "step": 4089 + }, + { + "epoch": 0.25644241018245656, + "grad_norm": 2.7275338172912598, + "learning_rate": 1.7429636186816503e-05, + "loss": 1.1289, + "step": 4090 + }, + { + "epoch": 0.2565051100382469, + "grad_norm": 2.828784227371216, + "learning_rate": 1.7428276778376216e-05, + "loss": 1.114, + "step": 4091 + }, + { + "epoch": 0.25656780989403727, + "grad_norm": 3.123901128768921, + "learning_rate": 1.742691706359347e-05, + "loss": 1.088, + "step": 4092 + }, + { + "epoch": 0.2566305097498276, + "grad_norm": 3.1965854167938232, + "learning_rate": 1.742555704252434e-05, + "loss": 0.9844, + "step": 4093 + }, + { + "epoch": 0.2566932096056179, + "grad_norm": 3.075395345687866, + "learning_rate": 1.742419671522491e-05, + "loss": 1.1586, + "step": 4094 + }, + { + "epoch": 0.25675590946140825, + "grad_norm": 3.438467502593994, + "learning_rate": 1.7422836081751287e-05, + "loss": 1.0697, + "step": 4095 + }, + { + "epoch": 0.2568186093171986, + "grad_norm": 2.9420366287231445, + "learning_rate": 1.742147514215958e-05, + "loss": 1.3793, + "step": 4096 + }, + { + "epoch": 0.2568813091729889, + "grad_norm": 2.831570625305176, + "learning_rate": 1.7420113896505914e-05, + "loss": 1.1807, + "step": 4097 + }, + { + "epoch": 0.25694400902877923, + "grad_norm": 2.7973976135253906, + "learning_rate": 1.7418752344846427e-05, + "loss": 0.9645, + "step": 4098 + }, + { + "epoch": 0.25700670888456956, + "grad_norm": 2.887399911880493, + "learning_rate": 1.7417390487237268e-05, + "loss": 0.999, + "step": 4099 + }, + { + "epoch": 0.2570694087403599, + "grad_norm": 2.768785238265991, + "learning_rate": 1.74160283237346e-05, + "loss": 1.2079, + "step": 4100 + }, + { + "epoch": 0.2571321085961502, + "grad_norm": 3.2395122051239014, + "learning_rate": 1.74146658543946e-05, + "loss": 1.1318, + "step": 4101 + }, + { + "epoch": 0.25719480845194054, + "grad_norm": 2.550657033920288, + "learning_rate": 1.741330307927346e-05, + "loss": 1.1865, + "step": 4102 + }, + { + "epoch": 0.2572575083077309, + "grad_norm": 3.2560184001922607, + "learning_rate": 1.7411939998427376e-05, + "loss": 0.9714, + "step": 4103 + }, + { + "epoch": 0.25732020816352125, + "grad_norm": 2.763301134109497, + "learning_rate": 1.7410576611912563e-05, + "loss": 1.1657, + "step": 4104 + }, + { + "epoch": 0.2573829080193116, + "grad_norm": 2.7627153396606445, + "learning_rate": 1.7409212919785246e-05, + "loss": 1.0706, + "step": 4105 + }, + { + "epoch": 0.2574456078751019, + "grad_norm": 3.4676995277404785, + "learning_rate": 1.7407848922101663e-05, + "loss": 1.3151, + "step": 4106 + }, + { + "epoch": 0.2575083077308922, + "grad_norm": 2.8094630241394043, + "learning_rate": 1.740648461891807e-05, + "loss": 1.2287, + "step": 4107 + }, + { + "epoch": 0.25757100758668255, + "grad_norm": 3.2514636516571045, + "learning_rate": 1.7405120010290727e-05, + "loss": 1.0666, + "step": 4108 + }, + { + "epoch": 0.2576337074424729, + "grad_norm": 2.820417881011963, + "learning_rate": 1.740375509627591e-05, + "loss": 1.2561, + "step": 4109 + }, + { + "epoch": 0.2576964072982632, + "grad_norm": 3.007873773574829, + "learning_rate": 1.740238987692991e-05, + "loss": 1.1454, + "step": 4110 + }, + { + "epoch": 0.25775910715405354, + "grad_norm": 2.819185495376587, + "learning_rate": 1.740102435230903e-05, + "loss": 1.1577, + "step": 4111 + }, + { + "epoch": 0.25782180700984386, + "grad_norm": 2.8178226947784424, + "learning_rate": 1.7399658522469584e-05, + "loss": 1.2062, + "step": 4112 + }, + { + "epoch": 0.2578845068656342, + "grad_norm": 3.182579278945923, + "learning_rate": 1.739829238746789e-05, + "loss": 1.1208, + "step": 4113 + }, + { + "epoch": 0.2579472067214245, + "grad_norm": 2.878345251083374, + "learning_rate": 1.73969259473603e-05, + "loss": 1.0733, + "step": 4114 + }, + { + "epoch": 0.2580099065772149, + "grad_norm": 2.7618885040283203, + "learning_rate": 1.7395559202203164e-05, + "loss": 1.2037, + "step": 4115 + }, + { + "epoch": 0.2580726064330052, + "grad_norm": 3.051992416381836, + "learning_rate": 1.7394192152052838e-05, + "loss": 1.0637, + "step": 4116 + }, + { + "epoch": 0.25813530628879555, + "grad_norm": 2.7240798473358154, + "learning_rate": 1.7392824796965703e-05, + "loss": 1.218, + "step": 4117 + }, + { + "epoch": 0.2581980061445859, + "grad_norm": 2.9627251625061035, + "learning_rate": 1.7391457136998157e-05, + "loss": 1.1535, + "step": 4118 + }, + { + "epoch": 0.2582607060003762, + "grad_norm": 3.2017502784729004, + "learning_rate": 1.7390089172206594e-05, + "loss": 1.0756, + "step": 4119 + }, + { + "epoch": 0.25832340585616653, + "grad_norm": 2.5915019512176514, + "learning_rate": 1.738872090264743e-05, + "loss": 1.0848, + "step": 4120 + }, + { + "epoch": 0.25838610571195686, + "grad_norm": 3.049438238143921, + "learning_rate": 1.738735232837709e-05, + "loss": 1.0467, + "step": 4121 + }, + { + "epoch": 0.2584488055677472, + "grad_norm": 2.907139539718628, + "learning_rate": 1.7385983449452022e-05, + "loss": 1.3338, + "step": 4122 + }, + { + "epoch": 0.2585115054235375, + "grad_norm": 2.858581781387329, + "learning_rate": 1.7384614265928675e-05, + "loss": 1.2302, + "step": 4123 + }, + { + "epoch": 0.25857420527932784, + "grad_norm": 3.03839373588562, + "learning_rate": 1.738324477786351e-05, + "loss": 1.2162, + "step": 4124 + }, + { + "epoch": 0.25863690513511817, + "grad_norm": 2.877469539642334, + "learning_rate": 1.7381874985313006e-05, + "loss": 1.0977, + "step": 4125 + }, + { + "epoch": 0.2586996049909085, + "grad_norm": 2.7971203327178955, + "learning_rate": 1.738050488833366e-05, + "loss": 1.0357, + "step": 4126 + }, + { + "epoch": 0.2587623048466989, + "grad_norm": 3.0783870220184326, + "learning_rate": 1.7379134486981966e-05, + "loss": 1.0074, + "step": 4127 + }, + { + "epoch": 0.2588250047024892, + "grad_norm": 3.0033156871795654, + "learning_rate": 1.7377763781314446e-05, + "loss": 1.0903, + "step": 4128 + }, + { + "epoch": 0.25888770455827953, + "grad_norm": 2.8074138164520264, + "learning_rate": 1.7376392771387623e-05, + "loss": 1.0917, + "step": 4129 + }, + { + "epoch": 0.25895040441406986, + "grad_norm": 2.9777615070343018, + "learning_rate": 1.7375021457258044e-05, + "loss": 1.1629, + "step": 4130 + }, + { + "epoch": 0.2590131042698602, + "grad_norm": 3.284410238265991, + "learning_rate": 1.7373649838982255e-05, + "loss": 1.0055, + "step": 4131 + }, + { + "epoch": 0.2590758041256505, + "grad_norm": 3.0246384143829346, + "learning_rate": 1.737227791661683e-05, + "loss": 1.0735, + "step": 4132 + }, + { + "epoch": 0.25913850398144084, + "grad_norm": 3.480741262435913, + "learning_rate": 1.7370905690218336e-05, + "loss": 1.0928, + "step": 4133 + }, + { + "epoch": 0.25920120383723116, + "grad_norm": 3.230555772781372, + "learning_rate": 1.7369533159843368e-05, + "loss": 1.2375, + "step": 4134 + }, + { + "epoch": 0.2592639036930215, + "grad_norm": 2.949148416519165, + "learning_rate": 1.7368160325548536e-05, + "loss": 0.999, + "step": 4135 + }, + { + "epoch": 0.2593266035488118, + "grad_norm": 2.7337512969970703, + "learning_rate": 1.736678718739045e-05, + "loss": 1.1349, + "step": 4136 + }, + { + "epoch": 0.25938930340460215, + "grad_norm": 3.1946003437042236, + "learning_rate": 1.7365413745425735e-05, + "loss": 1.1232, + "step": 4137 + }, + { + "epoch": 0.2594520032603925, + "grad_norm": 2.964611530303955, + "learning_rate": 1.7364039999711034e-05, + "loss": 1.1827, + "step": 4138 + }, + { + "epoch": 0.25951470311618285, + "grad_norm": 2.8807644844055176, + "learning_rate": 1.7362665950303008e-05, + "loss": 1.2163, + "step": 4139 + }, + { + "epoch": 0.2595774029719732, + "grad_norm": 2.729625701904297, + "learning_rate": 1.736129159725831e-05, + "loss": 1.0959, + "step": 4140 + }, + { + "epoch": 0.2596401028277635, + "grad_norm": 2.803588628768921, + "learning_rate": 1.735991694063363e-05, + "loss": 1.252, + "step": 4141 + }, + { + "epoch": 0.25970280268355384, + "grad_norm": 3.0257439613342285, + "learning_rate": 1.7358541980485653e-05, + "loss": 1.1171, + "step": 4142 + }, + { + "epoch": 0.25976550253934416, + "grad_norm": 3.0111641883850098, + "learning_rate": 1.7357166716871082e-05, + "loss": 1.1213, + "step": 4143 + }, + { + "epoch": 0.2598282023951345, + "grad_norm": 3.067279815673828, + "learning_rate": 1.7355791149846636e-05, + "loss": 1.1867, + "step": 4144 + }, + { + "epoch": 0.2598909022509248, + "grad_norm": 3.2493703365325928, + "learning_rate": 1.735441527946904e-05, + "loss": 1.0411, + "step": 4145 + }, + { + "epoch": 0.25995360210671514, + "grad_norm": 2.8986408710479736, + "learning_rate": 1.735303910579504e-05, + "loss": 1.2863, + "step": 4146 + }, + { + "epoch": 0.26001630196250547, + "grad_norm": 3.235570192337036, + "learning_rate": 1.7351662628881385e-05, + "loss": 1.1495, + "step": 4147 + }, + { + "epoch": 0.2600790018182958, + "grad_norm": 3.040762424468994, + "learning_rate": 1.735028584878484e-05, + "loss": 1.2537, + "step": 4148 + }, + { + "epoch": 0.2601417016740861, + "grad_norm": 2.847273826599121, + "learning_rate": 1.7348908765562185e-05, + "loss": 1.2463, + "step": 4149 + }, + { + "epoch": 0.2602044015298765, + "grad_norm": 2.9086008071899414, + "learning_rate": 1.7347531379270214e-05, + "loss": 1.2002, + "step": 4150 + }, + { + "epoch": 0.26026710138566683, + "grad_norm": 2.9779293537139893, + "learning_rate": 1.734615368996573e-05, + "loss": 1.1531, + "step": 4151 + }, + { + "epoch": 0.26032980124145716, + "grad_norm": 2.6928203105926514, + "learning_rate": 1.734477569770554e-05, + "loss": 1.0269, + "step": 4152 + }, + { + "epoch": 0.2603925010972475, + "grad_norm": 2.9053218364715576, + "learning_rate": 1.7343397402546488e-05, + "loss": 1.0817, + "step": 4153 + }, + { + "epoch": 0.2604552009530378, + "grad_norm": 3.0179970264434814, + "learning_rate": 1.73420188045454e-05, + "loss": 1.234, + "step": 4154 + }, + { + "epoch": 0.26051790080882814, + "grad_norm": 2.9420721530914307, + "learning_rate": 1.7340639903759138e-05, + "loss": 1.1837, + "step": 4155 + }, + { + "epoch": 0.26058060066461847, + "grad_norm": 2.9515535831451416, + "learning_rate": 1.7339260700244566e-05, + "loss": 0.9405, + "step": 4156 + }, + { + "epoch": 0.2606433005204088, + "grad_norm": 2.882797956466675, + "learning_rate": 1.7337881194058562e-05, + "loss": 1.3051, + "step": 4157 + }, + { + "epoch": 0.2607060003761991, + "grad_norm": 2.718079090118408, + "learning_rate": 1.7336501385258017e-05, + "loss": 1.1016, + "step": 4158 + }, + { + "epoch": 0.26076870023198945, + "grad_norm": 2.771373748779297, + "learning_rate": 1.7335121273899833e-05, + "loss": 1.2759, + "step": 4159 + }, + { + "epoch": 0.2608314000877798, + "grad_norm": 3.2485814094543457, + "learning_rate": 1.733374086004093e-05, + "loss": 1.0186, + "step": 4160 + }, + { + "epoch": 0.26089409994357016, + "grad_norm": 3.043281078338623, + "learning_rate": 1.7332360143738233e-05, + "loss": 1.3226, + "step": 4161 + }, + { + "epoch": 0.2609567997993605, + "grad_norm": 3.1540462970733643, + "learning_rate": 1.7330979125048683e-05, + "loss": 1.1802, + "step": 4162 + }, + { + "epoch": 0.2610194996551508, + "grad_norm": 2.862966775894165, + "learning_rate": 1.7329597804029233e-05, + "loss": 1.2271, + "step": 4163 + }, + { + "epoch": 0.26108219951094114, + "grad_norm": 2.801457166671753, + "learning_rate": 1.7328216180736853e-05, + "loss": 1.1008, + "step": 4164 + }, + { + "epoch": 0.26114489936673146, + "grad_norm": 3.080874443054199, + "learning_rate": 1.7326834255228514e-05, + "loss": 1.0833, + "step": 4165 + }, + { + "epoch": 0.2612075992225218, + "grad_norm": 2.87172269821167, + "learning_rate": 1.732545202756121e-05, + "loss": 1.1909, + "step": 4166 + }, + { + "epoch": 0.2612702990783121, + "grad_norm": 3.1464006900787354, + "learning_rate": 1.7324069497791947e-05, + "loss": 1.2333, + "step": 4167 + }, + { + "epoch": 0.26133299893410245, + "grad_norm": 3.0403313636779785, + "learning_rate": 1.7322686665977738e-05, + "loss": 1.097, + "step": 4168 + }, + { + "epoch": 0.26139569878989277, + "grad_norm": 3.047980546951294, + "learning_rate": 1.732130353217561e-05, + "loss": 1.0729, + "step": 4169 + }, + { + "epoch": 0.2614583986456831, + "grad_norm": 2.9454362392425537, + "learning_rate": 1.7319920096442604e-05, + "loss": 1.1527, + "step": 4170 + }, + { + "epoch": 0.2615210985014734, + "grad_norm": 2.8924753665924072, + "learning_rate": 1.7318536358835773e-05, + "loss": 1.1713, + "step": 4171 + }, + { + "epoch": 0.26158379835726375, + "grad_norm": 2.9430179595947266, + "learning_rate": 1.7317152319412184e-05, + "loss": 0.9799, + "step": 4172 + }, + { + "epoch": 0.26164649821305414, + "grad_norm": 2.8916895389556885, + "learning_rate": 1.7315767978228916e-05, + "loss": 1.1442, + "step": 4173 + }, + { + "epoch": 0.26170919806884446, + "grad_norm": 2.931882619857788, + "learning_rate": 1.7314383335343056e-05, + "loss": 1.1684, + "step": 4174 + }, + { + "epoch": 0.2617718979246348, + "grad_norm": 2.6893839836120605, + "learning_rate": 1.7312998390811704e-05, + "loss": 1.284, + "step": 4175 + }, + { + "epoch": 0.2618345977804251, + "grad_norm": 2.750725746154785, + "learning_rate": 1.731161314469198e-05, + "loss": 1.182, + "step": 4176 + }, + { + "epoch": 0.26189729763621544, + "grad_norm": 2.989229440689087, + "learning_rate": 1.7310227597041015e-05, + "loss": 1.2908, + "step": 4177 + }, + { + "epoch": 0.26195999749200577, + "grad_norm": 3.036567449569702, + "learning_rate": 1.7308841747915942e-05, + "loss": 1.1252, + "step": 4178 + }, + { + "epoch": 0.2620226973477961, + "grad_norm": 2.8105523586273193, + "learning_rate": 1.7307455597373916e-05, + "loss": 1.1344, + "step": 4179 + }, + { + "epoch": 0.2620853972035864, + "grad_norm": 2.677891969680786, + "learning_rate": 1.7306069145472104e-05, + "loss": 1.183, + "step": 4180 + }, + { + "epoch": 0.26214809705937675, + "grad_norm": 2.7417335510253906, + "learning_rate": 1.730468239226768e-05, + "loss": 1.276, + "step": 4181 + }, + { + "epoch": 0.2622107969151671, + "grad_norm": 2.929577112197876, + "learning_rate": 1.7303295337817834e-05, + "loss": 0.8672, + "step": 4182 + }, + { + "epoch": 0.2622734967709574, + "grad_norm": 2.739769458770752, + "learning_rate": 1.730190798217977e-05, + "loss": 1.177, + "step": 4183 + }, + { + "epoch": 0.26233619662674773, + "grad_norm": 2.623758316040039, + "learning_rate": 1.73005203254107e-05, + "loss": 0.8631, + "step": 4184 + }, + { + "epoch": 0.2623988964825381, + "grad_norm": 3.3413267135620117, + "learning_rate": 1.7299132367567856e-05, + "loss": 1.1427, + "step": 4185 + }, + { + "epoch": 0.26246159633832844, + "grad_norm": 2.9303479194641113, + "learning_rate": 1.7297744108708475e-05, + "loss": 1.079, + "step": 4186 + }, + { + "epoch": 0.26252429619411877, + "grad_norm": 2.789386510848999, + "learning_rate": 1.7296355548889807e-05, + "loss": 1.2254, + "step": 4187 + }, + { + "epoch": 0.2625869960499091, + "grad_norm": 2.8827593326568604, + "learning_rate": 1.729496668816912e-05, + "loss": 1.2179, + "step": 4188 + }, + { + "epoch": 0.2626496959056994, + "grad_norm": 2.8402867317199707, + "learning_rate": 1.7293577526603684e-05, + "loss": 1.205, + "step": 4189 + }, + { + "epoch": 0.26271239576148975, + "grad_norm": 3.125487804412842, + "learning_rate": 1.7292188064250794e-05, + "loss": 1.1302, + "step": 4190 + }, + { + "epoch": 0.2627750956172801, + "grad_norm": 3.1988563537597656, + "learning_rate": 1.7290798301167747e-05, + "loss": 0.9434, + "step": 4191 + }, + { + "epoch": 0.2628377954730704, + "grad_norm": 2.7284083366394043, + "learning_rate": 1.7289408237411862e-05, + "loss": 1.2053, + "step": 4192 + }, + { + "epoch": 0.26290049532886073, + "grad_norm": 2.949479103088379, + "learning_rate": 1.7288017873040465e-05, + "loss": 1.3049, + "step": 4193 + }, + { + "epoch": 0.26296319518465106, + "grad_norm": 3.0888490676879883, + "learning_rate": 1.728662720811089e-05, + "loss": 1.0516, + "step": 4194 + }, + { + "epoch": 0.2630258950404414, + "grad_norm": 3.1908862590789795, + "learning_rate": 1.7285236242680493e-05, + "loss": 1.0361, + "step": 4195 + }, + { + "epoch": 0.26308859489623176, + "grad_norm": 2.7631852626800537, + "learning_rate": 1.7283844976806634e-05, + "loss": 1.1516, + "step": 4196 + }, + { + "epoch": 0.2631512947520221, + "grad_norm": 3.440462827682495, + "learning_rate": 1.728245341054669e-05, + "loss": 1.2361, + "step": 4197 + }, + { + "epoch": 0.2632139946078124, + "grad_norm": 2.8826496601104736, + "learning_rate": 1.728106154395805e-05, + "loss": 1.2496, + "step": 4198 + }, + { + "epoch": 0.26327669446360275, + "grad_norm": 2.586733818054199, + "learning_rate": 1.727966937709811e-05, + "loss": 1.2047, + "step": 4199 + }, + { + "epoch": 0.26333939431939307, + "grad_norm": 3.0785250663757324, + "learning_rate": 1.7278276910024292e-05, + "loss": 1.2405, + "step": 4200 + }, + { + "epoch": 0.2634020941751834, + "grad_norm": 2.8086180686950684, + "learning_rate": 1.7276884142794014e-05, + "loss": 1.0402, + "step": 4201 + }, + { + "epoch": 0.2634647940309737, + "grad_norm": 2.777320146560669, + "learning_rate": 1.7275491075464716e-05, + "loss": 1.1448, + "step": 4202 + }, + { + "epoch": 0.26352749388676405, + "grad_norm": 2.849393606185913, + "learning_rate": 1.727409770809385e-05, + "loss": 1.1236, + "step": 4203 + }, + { + "epoch": 0.2635901937425544, + "grad_norm": 2.8348007202148438, + "learning_rate": 1.7272704040738875e-05, + "loss": 1.0569, + "step": 4204 + }, + { + "epoch": 0.2636528935983447, + "grad_norm": 2.7976932525634766, + "learning_rate": 1.727131007345727e-05, + "loss": 1.2616, + "step": 4205 + }, + { + "epoch": 0.26371559345413503, + "grad_norm": 2.8687102794647217, + "learning_rate": 1.7269915806306522e-05, + "loss": 1.0646, + "step": 4206 + }, + { + "epoch": 0.26377829330992536, + "grad_norm": 2.91021990776062, + "learning_rate": 1.726852123934412e-05, + "loss": 1.1573, + "step": 4207 + }, + { + "epoch": 0.26384099316571574, + "grad_norm": 2.6696157455444336, + "learning_rate": 1.7267126372627592e-05, + "loss": 1.164, + "step": 4208 + }, + { + "epoch": 0.26390369302150607, + "grad_norm": 2.878793954849243, + "learning_rate": 1.7265731206214455e-05, + "loss": 1.204, + "step": 4209 + }, + { + "epoch": 0.2639663928772964, + "grad_norm": 2.698880672454834, + "learning_rate": 1.7264335740162244e-05, + "loss": 1.1413, + "step": 4210 + }, + { + "epoch": 0.2640290927330867, + "grad_norm": 3.146850824356079, + "learning_rate": 1.7262939974528506e-05, + "loss": 1.2474, + "step": 4211 + }, + { + "epoch": 0.26409179258887705, + "grad_norm": 3.3068766593933105, + "learning_rate": 1.7261543909370813e-05, + "loss": 1.0447, + "step": 4212 + }, + { + "epoch": 0.2641544924446674, + "grad_norm": 3.10029673576355, + "learning_rate": 1.726014754474673e-05, + "loss": 1.1155, + "step": 4213 + }, + { + "epoch": 0.2642171923004577, + "grad_norm": 2.8417460918426514, + "learning_rate": 1.7258750880713845e-05, + "loss": 1.1128, + "step": 4214 + }, + { + "epoch": 0.26427989215624803, + "grad_norm": 2.7818708419799805, + "learning_rate": 1.7257353917329753e-05, + "loss": 1.1992, + "step": 4215 + }, + { + "epoch": 0.26434259201203836, + "grad_norm": 3.2382888793945312, + "learning_rate": 1.725595665465207e-05, + "loss": 1.0182, + "step": 4216 + }, + { + "epoch": 0.2644052918678287, + "grad_norm": 3.0145673751831055, + "learning_rate": 1.7254559092738422e-05, + "loss": 1.2415, + "step": 4217 + }, + { + "epoch": 0.264467991723619, + "grad_norm": 2.9235026836395264, + "learning_rate": 1.7253161231646438e-05, + "loss": 1.1111, + "step": 4218 + }, + { + "epoch": 0.2645306915794094, + "grad_norm": 3.1521780490875244, + "learning_rate": 1.7251763071433767e-05, + "loss": 1.0196, + "step": 4219 + }, + { + "epoch": 0.2645933914351997, + "grad_norm": 2.9001007080078125, + "learning_rate": 1.725036461215807e-05, + "loss": 1.1527, + "step": 4220 + }, + { + "epoch": 0.26465609129099005, + "grad_norm": 2.9979522228240967, + "learning_rate": 1.7248965853877022e-05, + "loss": 1.2296, + "step": 4221 + }, + { + "epoch": 0.2647187911467804, + "grad_norm": 3.0984745025634766, + "learning_rate": 1.72475667966483e-05, + "loss": 1.162, + "step": 4222 + }, + { + "epoch": 0.2647814910025707, + "grad_norm": 3.062624454498291, + "learning_rate": 1.7246167440529612e-05, + "loss": 1.188, + "step": 4223 + }, + { + "epoch": 0.26484419085836103, + "grad_norm": 2.7213733196258545, + "learning_rate": 1.724476778557866e-05, + "loss": 1.071, + "step": 4224 + }, + { + "epoch": 0.26490689071415136, + "grad_norm": 3.370295524597168, + "learning_rate": 1.7243367831853173e-05, + "loss": 1.1306, + "step": 4225 + }, + { + "epoch": 0.2649695905699417, + "grad_norm": 2.953148603439331, + "learning_rate": 1.7241967579410873e-05, + "loss": 1.1504, + "step": 4226 + }, + { + "epoch": 0.265032290425732, + "grad_norm": 2.877384901046753, + "learning_rate": 1.724056702830952e-05, + "loss": 1.0875, + "step": 4227 + }, + { + "epoch": 0.26509499028152234, + "grad_norm": 3.1119678020477295, + "learning_rate": 1.7239166178606864e-05, + "loss": 1.1318, + "step": 4228 + }, + { + "epoch": 0.26515769013731266, + "grad_norm": 3.1817946434020996, + "learning_rate": 1.723776503036068e-05, + "loss": 1.0677, + "step": 4229 + }, + { + "epoch": 0.265220389993103, + "grad_norm": 2.850396156311035, + "learning_rate": 1.723636358362875e-05, + "loss": 1.0731, + "step": 4230 + }, + { + "epoch": 0.26528308984889337, + "grad_norm": 2.9732773303985596, + "learning_rate": 1.7234961838468865e-05, + "loss": 1.0402, + "step": 4231 + }, + { + "epoch": 0.2653457897046837, + "grad_norm": 3.160414934158325, + "learning_rate": 1.723355979493884e-05, + "loss": 1.3246, + "step": 4232 + }, + { + "epoch": 0.265408489560474, + "grad_norm": 3.150684118270874, + "learning_rate": 1.7232157453096497e-05, + "loss": 1.1345, + "step": 4233 + }, + { + "epoch": 0.26547118941626435, + "grad_norm": 2.7937333583831787, + "learning_rate": 1.723075481299966e-05, + "loss": 1.2414, + "step": 4234 + }, + { + "epoch": 0.2655338892720547, + "grad_norm": 2.654735803604126, + "learning_rate": 1.7229351874706185e-05, + "loss": 1.266, + "step": 4235 + }, + { + "epoch": 0.265596589127845, + "grad_norm": 2.647247791290283, + "learning_rate": 1.7227948638273918e-05, + "loss": 1.0798, + "step": 4236 + }, + { + "epoch": 0.26565928898363533, + "grad_norm": 2.646059274673462, + "learning_rate": 1.7226545103760735e-05, + "loss": 1.224, + "step": 4237 + }, + { + "epoch": 0.26572198883942566, + "grad_norm": 3.2017531394958496, + "learning_rate": 1.7225141271224514e-05, + "loss": 1.2691, + "step": 4238 + }, + { + "epoch": 0.265784688695216, + "grad_norm": 2.9488396644592285, + "learning_rate": 1.7223737140723154e-05, + "loss": 1.1308, + "step": 4239 + }, + { + "epoch": 0.2658473885510063, + "grad_norm": 2.9202325344085693, + "learning_rate": 1.7222332712314556e-05, + "loss": 1.151, + "step": 4240 + }, + { + "epoch": 0.26591008840679664, + "grad_norm": 2.9860825538635254, + "learning_rate": 1.7220927986056645e-05, + "loss": 1.1735, + "step": 4241 + }, + { + "epoch": 0.265972788262587, + "grad_norm": 2.780564069747925, + "learning_rate": 1.7219522962007346e-05, + "loss": 1.272, + "step": 4242 + }, + { + "epoch": 0.26603548811837735, + "grad_norm": 3.1401820182800293, + "learning_rate": 1.721811764022461e-05, + "loss": 1.0303, + "step": 4243 + }, + { + "epoch": 0.2660981879741677, + "grad_norm": 3.1140387058258057, + "learning_rate": 1.7216712020766378e-05, + "loss": 1.1015, + "step": 4244 + }, + { + "epoch": 0.266160887829958, + "grad_norm": 2.6280362606048584, + "learning_rate": 1.7215306103690633e-05, + "loss": 1.2264, + "step": 4245 + }, + { + "epoch": 0.26622358768574833, + "grad_norm": 3.0097858905792236, + "learning_rate": 1.721389988905535e-05, + "loss": 1.074, + "step": 4246 + }, + { + "epoch": 0.26628628754153866, + "grad_norm": 3.1395976543426514, + "learning_rate": 1.721249337691852e-05, + "loss": 1.0657, + "step": 4247 + }, + { + "epoch": 0.266348987397329, + "grad_norm": 2.7133917808532715, + "learning_rate": 1.7211086567338145e-05, + "loss": 1.0179, + "step": 4248 + }, + { + "epoch": 0.2664116872531193, + "grad_norm": 2.898770570755005, + "learning_rate": 1.720967946037225e-05, + "loss": 1.0246, + "step": 4249 + }, + { + "epoch": 0.26647438710890964, + "grad_norm": 2.985043525695801, + "learning_rate": 1.7208272056078856e-05, + "loss": 1.1382, + "step": 4250 + }, + { + "epoch": 0.26653708696469997, + "grad_norm": 2.841766119003296, + "learning_rate": 1.720686435451601e-05, + "loss": 1.1172, + "step": 4251 + }, + { + "epoch": 0.2665997868204903, + "grad_norm": 3.105656147003174, + "learning_rate": 1.7205456355741762e-05, + "loss": 1.3611, + "step": 4252 + }, + { + "epoch": 0.2666624866762806, + "grad_norm": 3.2663419246673584, + "learning_rate": 1.7204048059814175e-05, + "loss": 1.2059, + "step": 4253 + }, + { + "epoch": 0.266725186532071, + "grad_norm": 3.0185329914093018, + "learning_rate": 1.7202639466791336e-05, + "loss": 1.1125, + "step": 4254 + }, + { + "epoch": 0.26678788638786133, + "grad_norm": 2.8698208332061768, + "learning_rate": 1.7201230576731333e-05, + "loss": 1.2047, + "step": 4255 + }, + { + "epoch": 0.26685058624365166, + "grad_norm": 3.578943967819214, + "learning_rate": 1.719982138969226e-05, + "loss": 1.1224, + "step": 4256 + }, + { + "epoch": 0.266913286099442, + "grad_norm": 3.1991708278656006, + "learning_rate": 1.7198411905732244e-05, + "loss": 1.2076, + "step": 4257 + }, + { + "epoch": 0.2669759859552323, + "grad_norm": 3.160168409347534, + "learning_rate": 1.7197002124909405e-05, + "loss": 1.3211, + "step": 4258 + }, + { + "epoch": 0.26703868581102264, + "grad_norm": 2.865222930908203, + "learning_rate": 1.719559204728188e-05, + "loss": 1.0881, + "step": 4259 + }, + { + "epoch": 0.26710138566681296, + "grad_norm": 3.2439639568328857, + "learning_rate": 1.7194181672907832e-05, + "loss": 1.397, + "step": 4260 + }, + { + "epoch": 0.2671640855226033, + "grad_norm": 2.6828267574310303, + "learning_rate": 1.7192771001845414e-05, + "loss": 1.2417, + "step": 4261 + }, + { + "epoch": 0.2672267853783936, + "grad_norm": 2.7789855003356934, + "learning_rate": 1.7191360034152806e-05, + "loss": 1.1663, + "step": 4262 + }, + { + "epoch": 0.26728948523418394, + "grad_norm": 2.887814998626709, + "learning_rate": 1.7189948769888193e-05, + "loss": 1.1427, + "step": 4263 + }, + { + "epoch": 0.26735218508997427, + "grad_norm": 2.783698558807373, + "learning_rate": 1.7188537209109783e-05, + "loss": 1.2262, + "step": 4264 + }, + { + "epoch": 0.2674148849457646, + "grad_norm": 2.7950503826141357, + "learning_rate": 1.7187125351875784e-05, + "loss": 1.2402, + "step": 4265 + }, + { + "epoch": 0.267477584801555, + "grad_norm": 2.7933502197265625, + "learning_rate": 1.7185713198244422e-05, + "loss": 1.1482, + "step": 4266 + }, + { + "epoch": 0.2675402846573453, + "grad_norm": 2.8899130821228027, + "learning_rate": 1.718430074827393e-05, + "loss": 1.1028, + "step": 4267 + }, + { + "epoch": 0.26760298451313563, + "grad_norm": 3.245253324508667, + "learning_rate": 1.7182888002022567e-05, + "loss": 1.2302, + "step": 4268 + }, + { + "epoch": 0.26766568436892596, + "grad_norm": 2.9188525676727295, + "learning_rate": 1.7181474959548586e-05, + "loss": 1.3072, + "step": 4269 + }, + { + "epoch": 0.2677283842247163, + "grad_norm": 3.091296434402466, + "learning_rate": 1.7180061620910263e-05, + "loss": 0.9451, + "step": 4270 + }, + { + "epoch": 0.2677910840805066, + "grad_norm": 2.854032516479492, + "learning_rate": 1.717864798616589e-05, + "loss": 1.1325, + "step": 4271 + }, + { + "epoch": 0.26785378393629694, + "grad_norm": 2.5003180503845215, + "learning_rate": 1.7177234055373758e-05, + "loss": 1.0306, + "step": 4272 + }, + { + "epoch": 0.26791648379208727, + "grad_norm": 3.010894775390625, + "learning_rate": 1.7175819828592177e-05, + "loss": 1.1966, + "step": 4273 + }, + { + "epoch": 0.2679791836478776, + "grad_norm": 2.8870787620544434, + "learning_rate": 1.7174405305879474e-05, + "loss": 1.0699, + "step": 4274 + }, + { + "epoch": 0.2680418835036679, + "grad_norm": 3.142378091812134, + "learning_rate": 1.7172990487293984e-05, + "loss": 1.2568, + "step": 4275 + }, + { + "epoch": 0.26810458335945825, + "grad_norm": 2.9211766719818115, + "learning_rate": 1.7171575372894053e-05, + "loss": 1.1109, + "step": 4276 + }, + { + "epoch": 0.26816728321524863, + "grad_norm": 2.9989993572235107, + "learning_rate": 1.717015996273804e-05, + "loss": 1.1162, + "step": 4277 + }, + { + "epoch": 0.26822998307103896, + "grad_norm": 2.962181568145752, + "learning_rate": 1.716874425688432e-05, + "loss": 1.0662, + "step": 4278 + }, + { + "epoch": 0.2682926829268293, + "grad_norm": 3.1079587936401367, + "learning_rate": 1.716732825539127e-05, + "loss": 1.1357, + "step": 4279 + }, + { + "epoch": 0.2683553827826196, + "grad_norm": 2.775360345840454, + "learning_rate": 1.7165911958317292e-05, + "loss": 1.2807, + "step": 4280 + }, + { + "epoch": 0.26841808263840994, + "grad_norm": 3.017981767654419, + "learning_rate": 1.716449536572079e-05, + "loss": 1.1193, + "step": 4281 + }, + { + "epoch": 0.26848078249420027, + "grad_norm": 2.876990795135498, + "learning_rate": 1.716307847766019e-05, + "loss": 1.0298, + "step": 4282 + }, + { + "epoch": 0.2685434823499906, + "grad_norm": 2.9442412853240967, + "learning_rate": 1.7161661294193918e-05, + "loss": 1.2274, + "step": 4283 + }, + { + "epoch": 0.2686061822057809, + "grad_norm": 2.7351372241973877, + "learning_rate": 1.716024381538042e-05, + "loss": 1.1744, + "step": 4284 + }, + { + "epoch": 0.26866888206157125, + "grad_norm": 3.2167954444885254, + "learning_rate": 1.7158826041278155e-05, + "loss": 1.0912, + "step": 4285 + }, + { + "epoch": 0.2687315819173616, + "grad_norm": 2.9894015789031982, + "learning_rate": 1.7157407971945592e-05, + "loss": 1.1924, + "step": 4286 + }, + { + "epoch": 0.2687942817731519, + "grad_norm": 2.977327585220337, + "learning_rate": 1.715598960744121e-05, + "loss": 1.0765, + "step": 4287 + }, + { + "epoch": 0.2688569816289422, + "grad_norm": 3.116556167602539, + "learning_rate": 1.7154570947823506e-05, + "loss": 1.1078, + "step": 4288 + }, + { + "epoch": 0.2689196814847326, + "grad_norm": 2.648308038711548, + "learning_rate": 1.7153151993150985e-05, + "loss": 1.2211, + "step": 4289 + }, + { + "epoch": 0.26898238134052294, + "grad_norm": 2.782334804534912, + "learning_rate": 1.7151732743482165e-05, + "loss": 1.3144, + "step": 4290 + }, + { + "epoch": 0.26904508119631326, + "grad_norm": 2.7468466758728027, + "learning_rate": 1.715031319887557e-05, + "loss": 0.9999, + "step": 4291 + }, + { + "epoch": 0.2691077810521036, + "grad_norm": 2.7350332736968994, + "learning_rate": 1.714889335938975e-05, + "loss": 1.1777, + "step": 4292 + }, + { + "epoch": 0.2691704809078939, + "grad_norm": 3.0216336250305176, + "learning_rate": 1.7147473225083252e-05, + "loss": 1.203, + "step": 4293 + }, + { + "epoch": 0.26923318076368424, + "grad_norm": 2.752256393432617, + "learning_rate": 1.7146052796014646e-05, + "loss": 1.2282, + "step": 4294 + }, + { + "epoch": 0.26929588061947457, + "grad_norm": 3.5122268199920654, + "learning_rate": 1.7144632072242514e-05, + "loss": 1.0454, + "step": 4295 + }, + { + "epoch": 0.2693585804752649, + "grad_norm": 3.3605058193206787, + "learning_rate": 1.7143211053825443e-05, + "loss": 1.139, + "step": 4296 + }, + { + "epoch": 0.2694212803310552, + "grad_norm": 3.242938756942749, + "learning_rate": 1.714178974082203e-05, + "loss": 1.0916, + "step": 4297 + }, + { + "epoch": 0.26948398018684555, + "grad_norm": 3.1718828678131104, + "learning_rate": 1.7140368133290902e-05, + "loss": 1.1295, + "step": 4298 + }, + { + "epoch": 0.2695466800426359, + "grad_norm": 2.733051061630249, + "learning_rate": 1.713894623129068e-05, + "loss": 1.1985, + "step": 4299 + }, + { + "epoch": 0.26960937989842626, + "grad_norm": 3.2965168952941895, + "learning_rate": 1.7137524034880006e-05, + "loss": 1.2136, + "step": 4300 + }, + { + "epoch": 0.2696720797542166, + "grad_norm": 2.8795783519744873, + "learning_rate": 1.7136101544117526e-05, + "loss": 1.1531, + "step": 4301 + }, + { + "epoch": 0.2697347796100069, + "grad_norm": 3.1675772666931152, + "learning_rate": 1.7134678759061905e-05, + "loss": 1.1551, + "step": 4302 + }, + { + "epoch": 0.26979747946579724, + "grad_norm": 2.6131887435913086, + "learning_rate": 1.7133255679771822e-05, + "loss": 1.2239, + "step": 4303 + }, + { + "epoch": 0.26986017932158757, + "grad_norm": 2.794743776321411, + "learning_rate": 1.7131832306305964e-05, + "loss": 1.0509, + "step": 4304 + }, + { + "epoch": 0.2699228791773779, + "grad_norm": 2.984708547592163, + "learning_rate": 1.7130408638723032e-05, + "loss": 1.0397, + "step": 4305 + }, + { + "epoch": 0.2699855790331682, + "grad_norm": 2.986625909805298, + "learning_rate": 1.7128984677081734e-05, + "loss": 1.0151, + "step": 4306 + }, + { + "epoch": 0.27004827888895855, + "grad_norm": 2.7341556549072266, + "learning_rate": 1.7127560421440795e-05, + "loss": 1.0564, + "step": 4307 + }, + { + "epoch": 0.2701109787447489, + "grad_norm": 3.078719139099121, + "learning_rate": 1.7126135871858952e-05, + "loss": 1.2132, + "step": 4308 + }, + { + "epoch": 0.2701736786005392, + "grad_norm": 2.687843084335327, + "learning_rate": 1.712471102839496e-05, + "loss": 1.1825, + "step": 4309 + }, + { + "epoch": 0.27023637845632953, + "grad_norm": 2.926459550857544, + "learning_rate": 1.712328589110757e-05, + "loss": 1.095, + "step": 4310 + }, + { + "epoch": 0.27029907831211986, + "grad_norm": 3.0058114528656006, + "learning_rate": 1.712186046005556e-05, + "loss": 1.0475, + "step": 4311 + }, + { + "epoch": 0.27036177816791024, + "grad_norm": 3.0337252616882324, + "learning_rate": 1.7120434735297717e-05, + "loss": 1.1477, + "step": 4312 + }, + { + "epoch": 0.27042447802370057, + "grad_norm": 3.076108694076538, + "learning_rate": 1.711900871689283e-05, + "loss": 1.2283, + "step": 4313 + }, + { + "epoch": 0.2704871778794909, + "grad_norm": 2.9686543941497803, + "learning_rate": 1.7117582404899714e-05, + "loss": 0.9783, + "step": 4314 + }, + { + "epoch": 0.2705498777352812, + "grad_norm": 2.8212227821350098, + "learning_rate": 1.7116155799377184e-05, + "loss": 1.1249, + "step": 4315 + }, + { + "epoch": 0.27061257759107155, + "grad_norm": 3.0624101161956787, + "learning_rate": 1.7114728900384087e-05, + "loss": 1.2475, + "step": 4316 + }, + { + "epoch": 0.2706752774468619, + "grad_norm": 2.8287971019744873, + "learning_rate": 1.7113301707979252e-05, + "loss": 1.06, + "step": 4317 + }, + { + "epoch": 0.2707379773026522, + "grad_norm": 2.8712379932403564, + "learning_rate": 1.7111874222221547e-05, + "loss": 1.1952, + "step": 4318 + }, + { + "epoch": 0.2708006771584425, + "grad_norm": 3.0771985054016113, + "learning_rate": 1.711044644316984e-05, + "loss": 1.3234, + "step": 4319 + }, + { + "epoch": 0.27086337701423285, + "grad_norm": 3.019258737564087, + "learning_rate": 1.710901837088301e-05, + "loss": 0.9572, + "step": 4320 + }, + { + "epoch": 0.2709260768700232, + "grad_norm": 3.0404584407806396, + "learning_rate": 1.710759000541995e-05, + "loss": 1.159, + "step": 4321 + }, + { + "epoch": 0.2709887767258135, + "grad_norm": 3.304725408554077, + "learning_rate": 1.710616134683957e-05, + "loss": 1.0673, + "step": 4322 + }, + { + "epoch": 0.2710514765816039, + "grad_norm": 2.9946553707122803, + "learning_rate": 1.7104732395200786e-05, + "loss": 1.1857, + "step": 4323 + }, + { + "epoch": 0.2711141764373942, + "grad_norm": 2.8928754329681396, + "learning_rate": 1.7103303150562526e-05, + "loss": 1.2239, + "step": 4324 + }, + { + "epoch": 0.27117687629318454, + "grad_norm": 2.8430557250976562, + "learning_rate": 1.7101873612983733e-05, + "loss": 1.2033, + "step": 4325 + }, + { + "epoch": 0.27123957614897487, + "grad_norm": 2.8337128162384033, + "learning_rate": 1.7100443782523366e-05, + "loss": 1.0093, + "step": 4326 + }, + { + "epoch": 0.2713022760047652, + "grad_norm": 3.0550379753112793, + "learning_rate": 1.7099013659240385e-05, + "loss": 1.1557, + "step": 4327 + }, + { + "epoch": 0.2713649758605555, + "grad_norm": 3.1849279403686523, + "learning_rate": 1.709758324319377e-05, + "loss": 1.1333, + "step": 4328 + }, + { + "epoch": 0.27142767571634585, + "grad_norm": 2.9206268787384033, + "learning_rate": 1.7096152534442515e-05, + "loss": 1.03, + "step": 4329 + }, + { + "epoch": 0.2714903755721362, + "grad_norm": 2.8905043601989746, + "learning_rate": 1.7094721533045614e-05, + "loss": 1.21, + "step": 4330 + }, + { + "epoch": 0.2715530754279265, + "grad_norm": 3.122591018676758, + "learning_rate": 1.7093290239062094e-05, + "loss": 1.1018, + "step": 4331 + }, + { + "epoch": 0.27161577528371683, + "grad_norm": 3.0996623039245605, + "learning_rate": 1.709185865255097e-05, + "loss": 1.0647, + "step": 4332 + }, + { + "epoch": 0.27167847513950716, + "grad_norm": 2.718989133834839, + "learning_rate": 1.7090426773571287e-05, + "loss": 1.2036, + "step": 4333 + }, + { + "epoch": 0.2717411749952975, + "grad_norm": 3.1691372394561768, + "learning_rate": 1.7088994602182094e-05, + "loss": 1.1476, + "step": 4334 + }, + { + "epoch": 0.27180387485108787, + "grad_norm": 3.1215808391571045, + "learning_rate": 1.708756213844245e-05, + "loss": 1.0566, + "step": 4335 + }, + { + "epoch": 0.2718665747068782, + "grad_norm": 2.737459659576416, + "learning_rate": 1.7086129382411444e-05, + "loss": 1.1931, + "step": 4336 + }, + { + "epoch": 0.2719292745626685, + "grad_norm": 3.1557626724243164, + "learning_rate": 1.7084696334148147e-05, + "loss": 1.027, + "step": 4337 + }, + { + "epoch": 0.27199197441845885, + "grad_norm": 2.711893081665039, + "learning_rate": 1.7083262993711663e-05, + "loss": 1.2197, + "step": 4338 + }, + { + "epoch": 0.2720546742742492, + "grad_norm": 2.924787998199463, + "learning_rate": 1.7081829361161105e-05, + "loss": 1.0285, + "step": 4339 + }, + { + "epoch": 0.2721173741300395, + "grad_norm": 2.6772854328155518, + "learning_rate": 1.7080395436555593e-05, + "loss": 1.1835, + "step": 4340 + }, + { + "epoch": 0.27218007398582983, + "grad_norm": 2.891530752182007, + "learning_rate": 1.7078961219954263e-05, + "loss": 1.2211, + "step": 4341 + }, + { + "epoch": 0.27224277384162016, + "grad_norm": 3.1633431911468506, + "learning_rate": 1.7077526711416265e-05, + "loss": 1.2096, + "step": 4342 + }, + { + "epoch": 0.2723054736974105, + "grad_norm": 3.205242872238159, + "learning_rate": 1.707609191100076e-05, + "loss": 1.0024, + "step": 4343 + }, + { + "epoch": 0.2723681735532008, + "grad_norm": 2.9300472736358643, + "learning_rate": 1.7074656818766913e-05, + "loss": 1.1149, + "step": 4344 + }, + { + "epoch": 0.27243087340899114, + "grad_norm": 3.158977746963501, + "learning_rate": 1.707322143477391e-05, + "loss": 1.2701, + "step": 4345 + }, + { + "epoch": 0.27249357326478146, + "grad_norm": 2.892127752304077, + "learning_rate": 1.7071785759080943e-05, + "loss": 1.0199, + "step": 4346 + }, + { + "epoch": 0.27255627312057185, + "grad_norm": 3.138380765914917, + "learning_rate": 1.7070349791747224e-05, + "loss": 1.2197, + "step": 4347 + }, + { + "epoch": 0.2726189729763622, + "grad_norm": 2.8029143810272217, + "learning_rate": 1.706891353283197e-05, + "loss": 1.0133, + "step": 4348 + }, + { + "epoch": 0.2726816728321525, + "grad_norm": 2.981855630874634, + "learning_rate": 1.7067476982394416e-05, + "loss": 1.1379, + "step": 4349 + }, + { + "epoch": 0.2727443726879428, + "grad_norm": 2.7800846099853516, + "learning_rate": 1.7066040140493803e-05, + "loss": 1.206, + "step": 4350 + }, + { + "epoch": 0.27280707254373315, + "grad_norm": 3.096372365951538, + "learning_rate": 1.7064603007189383e-05, + "loss": 1.3079, + "step": 4351 + }, + { + "epoch": 0.2728697723995235, + "grad_norm": 2.8271005153656006, + "learning_rate": 1.706316558254043e-05, + "loss": 1.1238, + "step": 4352 + }, + { + "epoch": 0.2729324722553138, + "grad_norm": 3.0200443267822266, + "learning_rate": 1.7061727866606215e-05, + "loss": 1.1544, + "step": 4353 + }, + { + "epoch": 0.27299517211110413, + "grad_norm": 2.886338472366333, + "learning_rate": 1.706028985944604e-05, + "loss": 1.0763, + "step": 4354 + }, + { + "epoch": 0.27305787196689446, + "grad_norm": 2.9624197483062744, + "learning_rate": 1.7058851561119198e-05, + "loss": 1.1332, + "step": 4355 + }, + { + "epoch": 0.2731205718226848, + "grad_norm": 3.2070870399475098, + "learning_rate": 1.7057412971685013e-05, + "loss": 1.1434, + "step": 4356 + }, + { + "epoch": 0.2731832716784751, + "grad_norm": 2.9478554725646973, + "learning_rate": 1.705597409120281e-05, + "loss": 1.0156, + "step": 4357 + }, + { + "epoch": 0.2732459715342655, + "grad_norm": 2.901623010635376, + "learning_rate": 1.7054534919731922e-05, + "loss": 1.0852, + "step": 4358 + }, + { + "epoch": 0.2733086713900558, + "grad_norm": 2.7262096405029297, + "learning_rate": 1.705309545733171e-05, + "loss": 1.231, + "step": 4359 + }, + { + "epoch": 0.27337137124584615, + "grad_norm": 2.671295166015625, + "learning_rate": 1.7051655704061533e-05, + "loss": 1.142, + "step": 4360 + }, + { + "epoch": 0.2734340711016365, + "grad_norm": 3.1362125873565674, + "learning_rate": 1.7050215659980763e-05, + "loss": 1.0805, + "step": 4361 + }, + { + "epoch": 0.2734967709574268, + "grad_norm": 2.9943346977233887, + "learning_rate": 1.7048775325148793e-05, + "loss": 1.1584, + "step": 4362 + }, + { + "epoch": 0.27355947081321713, + "grad_norm": 3.1194992065429688, + "learning_rate": 1.7047334699625025e-05, + "loss": 1.2016, + "step": 4363 + }, + { + "epoch": 0.27362217066900746, + "grad_norm": 3.2792651653289795, + "learning_rate": 1.704589378346886e-05, + "loss": 1.1988, + "step": 4364 + }, + { + "epoch": 0.2736848705247978, + "grad_norm": 2.676126718521118, + "learning_rate": 1.704445257673973e-05, + "loss": 1.0589, + "step": 4365 + }, + { + "epoch": 0.2737475703805881, + "grad_norm": 3.2565717697143555, + "learning_rate": 1.704301107949707e-05, + "loss": 1.1409, + "step": 4366 + }, + { + "epoch": 0.27381027023637844, + "grad_norm": 3.2308104038238525, + "learning_rate": 1.7041569291800324e-05, + "loss": 1.189, + "step": 4367 + }, + { + "epoch": 0.27387297009216877, + "grad_norm": 2.905022382736206, + "learning_rate": 1.7040127213708954e-05, + "loss": 1.1119, + "step": 4368 + }, + { + "epoch": 0.2739356699479591, + "grad_norm": 3.200944185256958, + "learning_rate": 1.7038684845282428e-05, + "loss": 1.0365, + "step": 4369 + }, + { + "epoch": 0.2739983698037495, + "grad_norm": 3.023688793182373, + "learning_rate": 1.7037242186580235e-05, + "loss": 1.3364, + "step": 4370 + }, + { + "epoch": 0.2740610696595398, + "grad_norm": 3.035581350326538, + "learning_rate": 1.7035799237661864e-05, + "loss": 1.1331, + "step": 4371 + }, + { + "epoch": 0.27412376951533013, + "grad_norm": 2.894770622253418, + "learning_rate": 1.7034355998586828e-05, + "loss": 1.2785, + "step": 4372 + }, + { + "epoch": 0.27418646937112046, + "grad_norm": 2.680304765701294, + "learning_rate": 1.7032912469414638e-05, + "loss": 1.1085, + "step": 4373 + }, + { + "epoch": 0.2742491692269108, + "grad_norm": 2.92120623588562, + "learning_rate": 1.7031468650204835e-05, + "loss": 1.2264, + "step": 4374 + }, + { + "epoch": 0.2743118690827011, + "grad_norm": 2.6918623447418213, + "learning_rate": 1.703002454101696e-05, + "loss": 1.1466, + "step": 4375 + }, + { + "epoch": 0.27437456893849144, + "grad_norm": 2.5292091369628906, + "learning_rate": 1.7028580141910562e-05, + "loss": 1.3227, + "step": 4376 + }, + { + "epoch": 0.27443726879428176, + "grad_norm": 2.536440134048462, + "learning_rate": 1.7027135452945215e-05, + "loss": 1.1372, + "step": 4377 + }, + { + "epoch": 0.2744999686500721, + "grad_norm": 2.983839988708496, + "learning_rate": 1.702569047418049e-05, + "loss": 1.1717, + "step": 4378 + }, + { + "epoch": 0.2745626685058624, + "grad_norm": 3.221409797668457, + "learning_rate": 1.7024245205675986e-05, + "loss": 1.0895, + "step": 4379 + }, + { + "epoch": 0.27462536836165274, + "grad_norm": 2.8046910762786865, + "learning_rate": 1.7022799647491304e-05, + "loss": 1.0078, + "step": 4380 + }, + { + "epoch": 0.2746880682174431, + "grad_norm": 2.852248430252075, + "learning_rate": 1.702135379968606e-05, + "loss": 1.1509, + "step": 4381 + }, + { + "epoch": 0.27475076807323345, + "grad_norm": 2.889815330505371, + "learning_rate": 1.701990766231987e-05, + "loss": 1.1373, + "step": 4382 + }, + { + "epoch": 0.2748134679290238, + "grad_norm": 3.066136598587036, + "learning_rate": 1.7018461235452387e-05, + "loss": 1.0486, + "step": 4383 + }, + { + "epoch": 0.2748761677848141, + "grad_norm": 3.0367789268493652, + "learning_rate": 1.701701451914326e-05, + "loss": 1.2702, + "step": 4384 + }, + { + "epoch": 0.27493886764060443, + "grad_norm": 3.0585033893585205, + "learning_rate": 1.701556751345214e-05, + "loss": 1.2562, + "step": 4385 + }, + { + "epoch": 0.27500156749639476, + "grad_norm": 3.1511406898498535, + "learning_rate": 1.7014120218438716e-05, + "loss": 1.0525, + "step": 4386 + }, + { + "epoch": 0.2750642673521851, + "grad_norm": 3.336883068084717, + "learning_rate": 1.7012672634162662e-05, + "loss": 1.2908, + "step": 4387 + }, + { + "epoch": 0.2751269672079754, + "grad_norm": 3.2604618072509766, + "learning_rate": 1.7011224760683688e-05, + "loss": 1.2477, + "step": 4388 + }, + { + "epoch": 0.27518966706376574, + "grad_norm": 3.032499074935913, + "learning_rate": 1.7009776598061496e-05, + "loss": 1.0172, + "step": 4389 + }, + { + "epoch": 0.27525236691955607, + "grad_norm": 2.974562406539917, + "learning_rate": 1.7008328146355812e-05, + "loss": 1.2811, + "step": 4390 + }, + { + "epoch": 0.2753150667753464, + "grad_norm": 3.2020578384399414, + "learning_rate": 1.7006879405626366e-05, + "loss": 1.2288, + "step": 4391 + }, + { + "epoch": 0.2753777666311367, + "grad_norm": 3.0780441761016846, + "learning_rate": 1.700543037593291e-05, + "loss": 1.2653, + "step": 4392 + }, + { + "epoch": 0.2754404664869271, + "grad_norm": 3.058246612548828, + "learning_rate": 1.7003981057335203e-05, + "loss": 1.1142, + "step": 4393 + }, + { + "epoch": 0.27550316634271743, + "grad_norm": 3.1451244354248047, + "learning_rate": 1.7002531449893005e-05, + "loss": 1.0073, + "step": 4394 + }, + { + "epoch": 0.27556586619850776, + "grad_norm": 2.8379383087158203, + "learning_rate": 1.7001081553666105e-05, + "loss": 1.194, + "step": 4395 + }, + { + "epoch": 0.2756285660542981, + "grad_norm": 3.1820998191833496, + "learning_rate": 1.6999631368714296e-05, + "loss": 1.2855, + "step": 4396 + }, + { + "epoch": 0.2756912659100884, + "grad_norm": 3.0860953330993652, + "learning_rate": 1.699818089509739e-05, + "loss": 1.2362, + "step": 4397 + }, + { + "epoch": 0.27575396576587874, + "grad_norm": 3.3867626190185547, + "learning_rate": 1.699673013287519e-05, + "loss": 1.0052, + "step": 4398 + }, + { + "epoch": 0.27581666562166907, + "grad_norm": 3.404193162918091, + "learning_rate": 1.6995279082107537e-05, + "loss": 1.241, + "step": 4399 + }, + { + "epoch": 0.2758793654774594, + "grad_norm": 2.57403826713562, + "learning_rate": 1.6993827742854268e-05, + "loss": 1.227, + "step": 4400 + }, + { + "epoch": 0.2759420653332497, + "grad_norm": 2.9391446113586426, + "learning_rate": 1.6992376115175242e-05, + "loss": 1.1468, + "step": 4401 + }, + { + "epoch": 0.27600476518904005, + "grad_norm": 2.884242534637451, + "learning_rate": 1.6990924199130316e-05, + "loss": 1.1488, + "step": 4402 + }, + { + "epoch": 0.2760674650448304, + "grad_norm": 2.8090336322784424, + "learning_rate": 1.698947199477937e-05, + "loss": 1.0511, + "step": 4403 + }, + { + "epoch": 0.27613016490062076, + "grad_norm": 2.8849971294403076, + "learning_rate": 1.6988019502182296e-05, + "loss": 1.2892, + "step": 4404 + }, + { + "epoch": 0.2761928647564111, + "grad_norm": 3.0294666290283203, + "learning_rate": 1.6986566721398992e-05, + "loss": 1.1451, + "step": 4405 + }, + { + "epoch": 0.2762555646122014, + "grad_norm": 3.2821121215820312, + "learning_rate": 1.6985113652489374e-05, + "loss": 1.2776, + "step": 4406 + }, + { + "epoch": 0.27631826446799174, + "grad_norm": 3.213494300842285, + "learning_rate": 1.6983660295513358e-05, + "loss": 1.2964, + "step": 4407 + }, + { + "epoch": 0.27638096432378206, + "grad_norm": 3.2257351875305176, + "learning_rate": 1.6982206650530886e-05, + "loss": 1.2383, + "step": 4408 + }, + { + "epoch": 0.2764436641795724, + "grad_norm": 2.8468074798583984, + "learning_rate": 1.6980752717601912e-05, + "loss": 1.1334, + "step": 4409 + }, + { + "epoch": 0.2765063640353627, + "grad_norm": 2.740835189819336, + "learning_rate": 1.697929849678639e-05, + "loss": 1.0571, + "step": 4410 + }, + { + "epoch": 0.27656906389115304, + "grad_norm": 3.000371217727661, + "learning_rate": 1.6977843988144292e-05, + "loss": 1.166, + "step": 4411 + }, + { + "epoch": 0.27663176374694337, + "grad_norm": 3.2667148113250732, + "learning_rate": 1.6976389191735603e-05, + "loss": 1.3344, + "step": 4412 + }, + { + "epoch": 0.2766944636027337, + "grad_norm": 3.0326077938079834, + "learning_rate": 1.6974934107620322e-05, + "loss": 1.3745, + "step": 4413 + }, + { + "epoch": 0.276757163458524, + "grad_norm": 2.9280014038085938, + "learning_rate": 1.697347873585845e-05, + "loss": 1.2135, + "step": 4414 + }, + { + "epoch": 0.27681986331431435, + "grad_norm": 2.879906177520752, + "learning_rate": 1.6972023076510015e-05, + "loss": 1.1466, + "step": 4415 + }, + { + "epoch": 0.27688256317010473, + "grad_norm": 3.0486044883728027, + "learning_rate": 1.6970567129635038e-05, + "loss": 0.9814, + "step": 4416 + }, + { + "epoch": 0.27694526302589506, + "grad_norm": 2.905524730682373, + "learning_rate": 1.696911089529357e-05, + "loss": 1.149, + "step": 4417 + }, + { + "epoch": 0.2770079628816854, + "grad_norm": 3.0502371788024902, + "learning_rate": 1.6967654373545664e-05, + "loss": 1.1278, + "step": 4418 + }, + { + "epoch": 0.2770706627374757, + "grad_norm": 2.934122085571289, + "learning_rate": 1.696619756445139e-05, + "loss": 1.0977, + "step": 4419 + }, + { + "epoch": 0.27713336259326604, + "grad_norm": 3.2001123428344727, + "learning_rate": 1.6964740468070823e-05, + "loss": 1.071, + "step": 4420 + }, + { + "epoch": 0.27719606244905637, + "grad_norm": 2.9861598014831543, + "learning_rate": 1.6963283084464056e-05, + "loss": 1.1706, + "step": 4421 + }, + { + "epoch": 0.2772587623048467, + "grad_norm": 3.245037794113159, + "learning_rate": 1.696182541369119e-05, + "loss": 1.0078, + "step": 4422 + }, + { + "epoch": 0.277321462160637, + "grad_norm": 3.2515010833740234, + "learning_rate": 1.6960367455812336e-05, + "loss": 1.0321, + "step": 4423 + }, + { + "epoch": 0.27738416201642735, + "grad_norm": 2.923870801925659, + "learning_rate": 1.6958909210887632e-05, + "loss": 1.264, + "step": 4424 + }, + { + "epoch": 0.2774468618722177, + "grad_norm": 2.9205567836761475, + "learning_rate": 1.69574506789772e-05, + "loss": 1.2076, + "step": 4425 + }, + { + "epoch": 0.277509561728008, + "grad_norm": 2.982541561126709, + "learning_rate": 1.6955991860141203e-05, + "loss": 1.1347, + "step": 4426 + }, + { + "epoch": 0.27757226158379833, + "grad_norm": 3.23974347114563, + "learning_rate": 1.6954532754439797e-05, + "loss": 1.0718, + "step": 4427 + }, + { + "epoch": 0.2776349614395887, + "grad_norm": 3.1522974967956543, + "learning_rate": 1.6953073361933158e-05, + "loss": 1.1552, + "step": 4428 + }, + { + "epoch": 0.27769766129537904, + "grad_norm": 3.170963764190674, + "learning_rate": 1.6951613682681465e-05, + "loss": 1.1438, + "step": 4429 + }, + { + "epoch": 0.27776036115116937, + "grad_norm": 3.02669620513916, + "learning_rate": 1.6950153716744925e-05, + "loss": 1.2162, + "step": 4430 + }, + { + "epoch": 0.2778230610069597, + "grad_norm": 2.842099666595459, + "learning_rate": 1.694869346418374e-05, + "loss": 1.1198, + "step": 4431 + }, + { + "epoch": 0.27788576086275, + "grad_norm": 2.8098032474517822, + "learning_rate": 1.6947232925058133e-05, + "loss": 1.1793, + "step": 4432 + }, + { + "epoch": 0.27794846071854035, + "grad_norm": 2.790818214416504, + "learning_rate": 1.6945772099428335e-05, + "loss": 1.278, + "step": 4433 + }, + { + "epoch": 0.2780111605743307, + "grad_norm": 2.782705545425415, + "learning_rate": 1.6944310987354597e-05, + "loss": 1.1899, + "step": 4434 + }, + { + "epoch": 0.278073860430121, + "grad_norm": 3.140209674835205, + "learning_rate": 1.6942849588897167e-05, + "loss": 1.2275, + "step": 4435 + }, + { + "epoch": 0.2781365602859113, + "grad_norm": 2.729440212249756, + "learning_rate": 1.6941387904116318e-05, + "loss": 1.1545, + "step": 4436 + }, + { + "epoch": 0.27819926014170165, + "grad_norm": 2.9205400943756104, + "learning_rate": 1.693992593307233e-05, + "loss": 1.1958, + "step": 4437 + }, + { + "epoch": 0.278261959997492, + "grad_norm": 2.9372873306274414, + "learning_rate": 1.693846367582549e-05, + "loss": 1.2507, + "step": 4438 + }, + { + "epoch": 0.27832465985328236, + "grad_norm": 3.0629565715789795, + "learning_rate": 1.693700113243611e-05, + "loss": 1.1064, + "step": 4439 + }, + { + "epoch": 0.2783873597090727, + "grad_norm": 2.945110321044922, + "learning_rate": 1.6935538302964496e-05, + "loss": 1.117, + "step": 4440 + }, + { + "epoch": 0.278450059564863, + "grad_norm": 2.695836305618286, + "learning_rate": 1.693407518747098e-05, + "loss": 1.3363, + "step": 4441 + }, + { + "epoch": 0.27851275942065334, + "grad_norm": 3.23099422454834, + "learning_rate": 1.6932611786015904e-05, + "loss": 1.3186, + "step": 4442 + }, + { + "epoch": 0.27857545927644367, + "grad_norm": 2.780022621154785, + "learning_rate": 1.6931148098659612e-05, + "loss": 1.1237, + "step": 4443 + }, + { + "epoch": 0.278638159132234, + "grad_norm": 2.666083574295044, + "learning_rate": 1.692968412546247e-05, + "loss": 1.161, + "step": 4444 + }, + { + "epoch": 0.2787008589880243, + "grad_norm": 2.919342517852783, + "learning_rate": 1.6928219866484853e-05, + "loss": 0.9632, + "step": 4445 + }, + { + "epoch": 0.27876355884381465, + "grad_norm": 2.8833110332489014, + "learning_rate": 1.6926755321787145e-05, + "loss": 1.1009, + "step": 4446 + }, + { + "epoch": 0.278826258699605, + "grad_norm": 2.993332862854004, + "learning_rate": 1.6925290491429747e-05, + "loss": 1.2455, + "step": 4447 + }, + { + "epoch": 0.2788889585553953, + "grad_norm": 2.8831074237823486, + "learning_rate": 1.6923825375473064e-05, + "loss": 1.1182, + "step": 4448 + }, + { + "epoch": 0.27895165841118563, + "grad_norm": 3.019561290740967, + "learning_rate": 1.692235997397752e-05, + "loss": 1.1887, + "step": 4449 + }, + { + "epoch": 0.27901435826697596, + "grad_norm": 2.847487688064575, + "learning_rate": 1.692089428700355e-05, + "loss": 1.2386, + "step": 4450 + }, + { + "epoch": 0.27907705812276634, + "grad_norm": 2.8887927532196045, + "learning_rate": 1.6919428314611597e-05, + "loss": 1.1156, + "step": 4451 + }, + { + "epoch": 0.27913975797855667, + "grad_norm": 3.254026412963867, + "learning_rate": 1.6917962056862117e-05, + "loss": 1.1385, + "step": 4452 + }, + { + "epoch": 0.279202457834347, + "grad_norm": 3.121262311935425, + "learning_rate": 1.6916495513815582e-05, + "loss": 1.0927, + "step": 4453 + }, + { + "epoch": 0.2792651576901373, + "grad_norm": 2.971618890762329, + "learning_rate": 1.691502868553247e-05, + "loss": 1.2077, + "step": 4454 + }, + { + "epoch": 0.27932785754592765, + "grad_norm": 3.232581853866577, + "learning_rate": 1.6913561572073273e-05, + "loss": 1.0317, + "step": 4455 + }, + { + "epoch": 0.279390557401718, + "grad_norm": 3.3173725605010986, + "learning_rate": 1.691209417349849e-05, + "loss": 1.1835, + "step": 4456 + }, + { + "epoch": 0.2794532572575083, + "grad_norm": 3.1067962646484375, + "learning_rate": 1.691062648986865e-05, + "loss": 1.1428, + "step": 4457 + }, + { + "epoch": 0.27951595711329863, + "grad_norm": 3.03047251701355, + "learning_rate": 1.6909158521244268e-05, + "loss": 1.2266, + "step": 4458 + }, + { + "epoch": 0.27957865696908896, + "grad_norm": 2.790215492248535, + "learning_rate": 1.6907690267685884e-05, + "loss": 1.06, + "step": 4459 + }, + { + "epoch": 0.2796413568248793, + "grad_norm": 2.773446798324585, + "learning_rate": 1.6906221729254054e-05, + "loss": 1.0633, + "step": 4460 + }, + { + "epoch": 0.2797040566806696, + "grad_norm": 3.2135026454925537, + "learning_rate": 1.6904752906009337e-05, + "loss": 1.1445, + "step": 4461 + }, + { + "epoch": 0.27976675653646, + "grad_norm": 2.9662511348724365, + "learning_rate": 1.690328379801231e-05, + "loss": 1.0507, + "step": 4462 + }, + { + "epoch": 0.2798294563922503, + "grad_norm": 2.805638551712036, + "learning_rate": 1.6901814405323554e-05, + "loss": 1.0613, + "step": 4463 + }, + { + "epoch": 0.27989215624804065, + "grad_norm": 2.9208476543426514, + "learning_rate": 1.6900344728003678e-05, + "loss": 1.0132, + "step": 4464 + }, + { + "epoch": 0.279954856103831, + "grad_norm": 2.9862842559814453, + "learning_rate": 1.689887476611328e-05, + "loss": 1.2956, + "step": 4465 + }, + { + "epoch": 0.2800175559596213, + "grad_norm": 2.674433946609497, + "learning_rate": 1.6897404519712982e-05, + "loss": 1.1527, + "step": 4466 + }, + { + "epoch": 0.2800802558154116, + "grad_norm": 2.9807560443878174, + "learning_rate": 1.6895933988863426e-05, + "loss": 1.2625, + "step": 4467 + }, + { + "epoch": 0.28014295567120195, + "grad_norm": 2.8350279331207275, + "learning_rate": 1.6894463173625245e-05, + "loss": 1.108, + "step": 4468 + }, + { + "epoch": 0.2802056555269923, + "grad_norm": 2.9713635444641113, + "learning_rate": 1.689299207405911e-05, + "loss": 1.3065, + "step": 4469 + }, + { + "epoch": 0.2802683553827826, + "grad_norm": 3.0380043983459473, + "learning_rate": 1.6891520690225672e-05, + "loss": 1.2174, + "step": 4470 + }, + { + "epoch": 0.28033105523857293, + "grad_norm": 2.988345146179199, + "learning_rate": 1.6890049022185626e-05, + "loss": 1.1168, + "step": 4471 + }, + { + "epoch": 0.28039375509436326, + "grad_norm": 2.977421283721924, + "learning_rate": 1.6888577069999653e-05, + "loss": 1.1422, + "step": 4472 + }, + { + "epoch": 0.2804564549501536, + "grad_norm": 2.8486826419830322, + "learning_rate": 1.6887104833728466e-05, + "loss": 1.1934, + "step": 4473 + }, + { + "epoch": 0.28051915480594397, + "grad_norm": 2.8344433307647705, + "learning_rate": 1.6885632313432772e-05, + "loss": 1.18, + "step": 4474 + }, + { + "epoch": 0.2805818546617343, + "grad_norm": 2.881894111633301, + "learning_rate": 1.68841595091733e-05, + "loss": 1.1997, + "step": 4475 + }, + { + "epoch": 0.2806445545175246, + "grad_norm": 3.286658525466919, + "learning_rate": 1.688268642101079e-05, + "loss": 1.2177, + "step": 4476 + }, + { + "epoch": 0.28070725437331495, + "grad_norm": 2.837334156036377, + "learning_rate": 1.6881213049005995e-05, + "loss": 1.233, + "step": 4477 + }, + { + "epoch": 0.2807699542291053, + "grad_norm": 2.776597499847412, + "learning_rate": 1.6879739393219667e-05, + "loss": 1.1289, + "step": 4478 + }, + { + "epoch": 0.2808326540848956, + "grad_norm": 3.239694833755493, + "learning_rate": 1.6878265453712587e-05, + "loss": 1.2128, + "step": 4479 + }, + { + "epoch": 0.28089535394068593, + "grad_norm": 2.9242022037506104, + "learning_rate": 1.6876791230545542e-05, + "loss": 1.1764, + "step": 4480 + }, + { + "epoch": 0.28095805379647626, + "grad_norm": 2.8256568908691406, + "learning_rate": 1.6875316723779328e-05, + "loss": 1.1736, + "step": 4481 + }, + { + "epoch": 0.2810207536522666, + "grad_norm": 3.224274158477783, + "learning_rate": 1.687384193347475e-05, + "loss": 1.0306, + "step": 4482 + }, + { + "epoch": 0.2810834535080569, + "grad_norm": 2.662553071975708, + "learning_rate": 1.687236685969263e-05, + "loss": 1.1398, + "step": 4483 + }, + { + "epoch": 0.28114615336384724, + "grad_norm": 2.8317644596099854, + "learning_rate": 1.68708915024938e-05, + "loss": 1.1607, + "step": 4484 + }, + { + "epoch": 0.2812088532196376, + "grad_norm": 2.9601125717163086, + "learning_rate": 1.6869415861939103e-05, + "loss": 1.1213, + "step": 4485 + }, + { + "epoch": 0.28127155307542795, + "grad_norm": 3.065079927444458, + "learning_rate": 1.68679399380894e-05, + "loss": 1.1933, + "step": 4486 + }, + { + "epoch": 0.2813342529312183, + "grad_norm": 2.587083578109741, + "learning_rate": 1.686646373100555e-05, + "loss": 1.1954, + "step": 4487 + }, + { + "epoch": 0.2813969527870086, + "grad_norm": 2.7354736328125, + "learning_rate": 1.6864987240748437e-05, + "loss": 1.2626, + "step": 4488 + }, + { + "epoch": 0.28145965264279893, + "grad_norm": 3.198300838470459, + "learning_rate": 1.6863510467378953e-05, + "loss": 1.2069, + "step": 4489 + }, + { + "epoch": 0.28152235249858926, + "grad_norm": 2.876214027404785, + "learning_rate": 1.6862033410957996e-05, + "loss": 1.345, + "step": 4490 + }, + { + "epoch": 0.2815850523543796, + "grad_norm": 2.903899908065796, + "learning_rate": 1.686055607154648e-05, + "loss": 1.0839, + "step": 4491 + }, + { + "epoch": 0.2816477522101699, + "grad_norm": 3.005488157272339, + "learning_rate": 1.6859078449205335e-05, + "loss": 1.2028, + "step": 4492 + }, + { + "epoch": 0.28171045206596024, + "grad_norm": 3.0465266704559326, + "learning_rate": 1.6857600543995492e-05, + "loss": 1.1501, + "step": 4493 + }, + { + "epoch": 0.28177315192175056, + "grad_norm": 2.9591758251190186, + "learning_rate": 1.6856122355977905e-05, + "loss": 1.128, + "step": 4494 + }, + { + "epoch": 0.2818358517775409, + "grad_norm": 2.9454426765441895, + "learning_rate": 1.685464388521353e-05, + "loss": 1.2361, + "step": 4495 + }, + { + "epoch": 0.2818985516333312, + "grad_norm": 2.5392301082611084, + "learning_rate": 1.685316513176335e-05, + "loss": 1.1404, + "step": 4496 + }, + { + "epoch": 0.2819612514891216, + "grad_norm": 3.156041145324707, + "learning_rate": 1.685168609568833e-05, + "loss": 1.1796, + "step": 4497 + }, + { + "epoch": 0.2820239513449119, + "grad_norm": 2.6976358890533447, + "learning_rate": 1.685020677704948e-05, + "loss": 1.2687, + "step": 4498 + }, + { + "epoch": 0.28208665120070225, + "grad_norm": 3.0083742141723633, + "learning_rate": 1.6848727175907805e-05, + "loss": 1.1483, + "step": 4499 + }, + { + "epoch": 0.2821493510564926, + "grad_norm": 3.0284605026245117, + "learning_rate": 1.6847247292324323e-05, + "loss": 1.108, + "step": 4500 + }, + { + "epoch": 0.2822120509122829, + "grad_norm": 2.959456443786621, + "learning_rate": 1.684576712636006e-05, + "loss": 0.9842, + "step": 4501 + }, + { + "epoch": 0.28227475076807323, + "grad_norm": 3.171400308609009, + "learning_rate": 1.6844286678076067e-05, + "loss": 1.181, + "step": 4502 + }, + { + "epoch": 0.28233745062386356, + "grad_norm": 3.076241970062256, + "learning_rate": 1.684280594753339e-05, + "loss": 1.1881, + "step": 4503 + }, + { + "epoch": 0.2824001504796539, + "grad_norm": 2.709653615951538, + "learning_rate": 1.6841324934793096e-05, + "loss": 1.1242, + "step": 4504 + }, + { + "epoch": 0.2824628503354442, + "grad_norm": 3.4272067546844482, + "learning_rate": 1.683984363991626e-05, + "loss": 0.9353, + "step": 4505 + }, + { + "epoch": 0.28252555019123454, + "grad_norm": 3.000345230102539, + "learning_rate": 1.6838362062963978e-05, + "loss": 1.0662, + "step": 4506 + }, + { + "epoch": 0.28258825004702487, + "grad_norm": 3.035567283630371, + "learning_rate": 1.6836880203997345e-05, + "loss": 1.2613, + "step": 4507 + }, + { + "epoch": 0.2826509499028152, + "grad_norm": 2.961183786392212, + "learning_rate": 1.6835398063077476e-05, + "loss": 1.1061, + "step": 4508 + }, + { + "epoch": 0.2827136497586056, + "grad_norm": 3.1843338012695312, + "learning_rate": 1.6833915640265485e-05, + "loss": 1.231, + "step": 4509 + }, + { + "epoch": 0.2827763496143959, + "grad_norm": 3.2502143383026123, + "learning_rate": 1.683243293562252e-05, + "loss": 1.1865, + "step": 4510 + }, + { + "epoch": 0.28283904947018623, + "grad_norm": 2.802988052368164, + "learning_rate": 1.6830949949209724e-05, + "loss": 1.1949, + "step": 4511 + }, + { + "epoch": 0.28290174932597656, + "grad_norm": 3.083589792251587, + "learning_rate": 1.682946668108825e-05, + "loss": 1.1427, + "step": 4512 + }, + { + "epoch": 0.2829644491817669, + "grad_norm": 3.001082181930542, + "learning_rate": 1.6827983131319278e-05, + "loss": 1.0375, + "step": 4513 + }, + { + "epoch": 0.2830271490375572, + "grad_norm": 3.181408405303955, + "learning_rate": 1.6826499299963975e-05, + "loss": 1.1645, + "step": 4514 + }, + { + "epoch": 0.28308984889334754, + "grad_norm": 2.9928627014160156, + "learning_rate": 1.6825015187083547e-05, + "loss": 1.1441, + "step": 4515 + }, + { + "epoch": 0.28315254874913787, + "grad_norm": 3.1813406944274902, + "learning_rate": 1.6823530792739197e-05, + "loss": 1.0804, + "step": 4516 + }, + { + "epoch": 0.2832152486049282, + "grad_norm": 3.1195900440216064, + "learning_rate": 1.682204611699213e-05, + "loss": 1.0635, + "step": 4517 + }, + { + "epoch": 0.2832779484607185, + "grad_norm": 2.915759325027466, + "learning_rate": 1.6820561159903594e-05, + "loss": 1.1325, + "step": 4518 + }, + { + "epoch": 0.28334064831650885, + "grad_norm": 3.0412278175354004, + "learning_rate": 1.6819075921534812e-05, + "loss": 1.1595, + "step": 4519 + }, + { + "epoch": 0.28340334817229923, + "grad_norm": 3.1268157958984375, + "learning_rate": 1.6817590401947043e-05, + "loss": 1.1604, + "step": 4520 + }, + { + "epoch": 0.28346604802808956, + "grad_norm": 2.845003366470337, + "learning_rate": 1.6816104601201547e-05, + "loss": 1.1125, + "step": 4521 + }, + { + "epoch": 0.2835287478838799, + "grad_norm": 3.1417598724365234, + "learning_rate": 1.6814618519359603e-05, + "loss": 1.1738, + "step": 4522 + }, + { + "epoch": 0.2835914477396702, + "grad_norm": 3.2165656089782715, + "learning_rate": 1.681313215648249e-05, + "loss": 1.1915, + "step": 4523 + }, + { + "epoch": 0.28365414759546054, + "grad_norm": 2.857635736465454, + "learning_rate": 1.6811645512631512e-05, + "loss": 1.1492, + "step": 4524 + }, + { + "epoch": 0.28371684745125086, + "grad_norm": 2.792860507965088, + "learning_rate": 1.6810158587867973e-05, + "loss": 1.3245, + "step": 4525 + }, + { + "epoch": 0.2837795473070412, + "grad_norm": 3.0166072845458984, + "learning_rate": 1.6808671382253197e-05, + "loss": 1.0094, + "step": 4526 + }, + { + "epoch": 0.2838422471628315, + "grad_norm": 2.9877076148986816, + "learning_rate": 1.6807183895848517e-05, + "loss": 1.2728, + "step": 4527 + }, + { + "epoch": 0.28390494701862184, + "grad_norm": 3.0435080528259277, + "learning_rate": 1.6805696128715276e-05, + "loss": 1.0963, + "step": 4528 + }, + { + "epoch": 0.28396764687441217, + "grad_norm": 3.0872864723205566, + "learning_rate": 1.6804208080914824e-05, + "loss": 0.8994, + "step": 4529 + }, + { + "epoch": 0.2840303467302025, + "grad_norm": 3.0516180992126465, + "learning_rate": 1.6802719752508542e-05, + "loss": 1.3227, + "step": 4530 + }, + { + "epoch": 0.2840930465859928, + "grad_norm": 3.022932529449463, + "learning_rate": 1.6801231143557796e-05, + "loss": 1.1441, + "step": 4531 + }, + { + "epoch": 0.2841557464417832, + "grad_norm": 2.646162986755371, + "learning_rate": 1.679974225412398e-05, + "loss": 1.1977, + "step": 4532 + }, + { + "epoch": 0.28421844629757353, + "grad_norm": 2.4684059619903564, + "learning_rate": 1.67982530842685e-05, + "loss": 1.1994, + "step": 4533 + }, + { + "epoch": 0.28428114615336386, + "grad_norm": 3.0111794471740723, + "learning_rate": 1.6796763634052764e-05, + "loss": 1.2874, + "step": 4534 + }, + { + "epoch": 0.2843438460091542, + "grad_norm": 2.8143110275268555, + "learning_rate": 1.6795273903538198e-05, + "loss": 1.231, + "step": 4535 + }, + { + "epoch": 0.2844065458649445, + "grad_norm": 2.680699348449707, + "learning_rate": 1.679378389278624e-05, + "loss": 1.0944, + "step": 4536 + }, + { + "epoch": 0.28446924572073484, + "grad_norm": 2.8674428462982178, + "learning_rate": 1.6792293601858335e-05, + "loss": 1.3086, + "step": 4537 + }, + { + "epoch": 0.28453194557652517, + "grad_norm": 3.0217268466949463, + "learning_rate": 1.679080303081595e-05, + "loss": 1.0056, + "step": 4538 + }, + { + "epoch": 0.2845946454323155, + "grad_norm": 2.920543670654297, + "learning_rate": 1.678931217972055e-05, + "loss": 1.2385, + "step": 4539 + }, + { + "epoch": 0.2846573452881058, + "grad_norm": 3.214421272277832, + "learning_rate": 1.6787821048633616e-05, + "loss": 1.1838, + "step": 4540 + }, + { + "epoch": 0.28472004514389615, + "grad_norm": 3.0145063400268555, + "learning_rate": 1.678632963761665e-05, + "loss": 1.0896, + "step": 4541 + }, + { + "epoch": 0.2847827449996865, + "grad_norm": 3.161940574645996, + "learning_rate": 1.6784837946731148e-05, + "loss": 1.2336, + "step": 4542 + }, + { + "epoch": 0.28484544485547686, + "grad_norm": 2.923471689224243, + "learning_rate": 1.678334597603864e-05, + "loss": 1.1325, + "step": 4543 + }, + { + "epoch": 0.2849081447112672, + "grad_norm": 3.1122822761535645, + "learning_rate": 1.6781853725600645e-05, + "loss": 1.2352, + "step": 4544 + }, + { + "epoch": 0.2849708445670575, + "grad_norm": 2.917957067489624, + "learning_rate": 1.6780361195478703e-05, + "loss": 1.0932, + "step": 4545 + }, + { + "epoch": 0.28503354442284784, + "grad_norm": 2.9039528369903564, + "learning_rate": 1.677886838573437e-05, + "loss": 1.1558, + "step": 4546 + }, + { + "epoch": 0.28509624427863817, + "grad_norm": 2.8528692722320557, + "learning_rate": 1.6777375296429213e-05, + "loss": 1.233, + "step": 4547 + }, + { + "epoch": 0.2851589441344285, + "grad_norm": 2.925050735473633, + "learning_rate": 1.6775881927624796e-05, + "loss": 1.1368, + "step": 4548 + }, + { + "epoch": 0.2852216439902188, + "grad_norm": 2.802098274230957, + "learning_rate": 1.677438827938272e-05, + "loss": 1.1378, + "step": 4549 + }, + { + "epoch": 0.28528434384600915, + "grad_norm": 3.330643892288208, + "learning_rate": 1.677289435176457e-05, + "loss": 1.0557, + "step": 4550 + }, + { + "epoch": 0.2853470437017995, + "grad_norm": 2.74886155128479, + "learning_rate": 1.677140014483196e-05, + "loss": 1.094, + "step": 4551 + }, + { + "epoch": 0.2854097435575898, + "grad_norm": 3.0702035427093506, + "learning_rate": 1.676990565864652e-05, + "loss": 1.223, + "step": 4552 + }, + { + "epoch": 0.28547244341338013, + "grad_norm": 2.6440718173980713, + "learning_rate": 1.6768410893269868e-05, + "loss": 1.1158, + "step": 4553 + }, + { + "epoch": 0.28553514326917045, + "grad_norm": 3.0265848636627197, + "learning_rate": 1.6766915848763657e-05, + "loss": 1.0251, + "step": 4554 + }, + { + "epoch": 0.28559784312496084, + "grad_norm": 3.208881378173828, + "learning_rate": 1.6765420525189543e-05, + "loss": 1.0083, + "step": 4555 + }, + { + "epoch": 0.28566054298075116, + "grad_norm": 3.040510892868042, + "learning_rate": 1.6763924922609187e-05, + "loss": 1.1344, + "step": 4556 + }, + { + "epoch": 0.2857232428365415, + "grad_norm": 3.227999210357666, + "learning_rate": 1.6762429041084276e-05, + "loss": 1.1639, + "step": 4557 + }, + { + "epoch": 0.2857859426923318, + "grad_norm": 3.2542998790740967, + "learning_rate": 1.676093288067649e-05, + "loss": 1.095, + "step": 4558 + }, + { + "epoch": 0.28584864254812214, + "grad_norm": 3.030372142791748, + "learning_rate": 1.6759436441447544e-05, + "loss": 0.9865, + "step": 4559 + }, + { + "epoch": 0.28591134240391247, + "grad_norm": 3.12097430229187, + "learning_rate": 1.6757939723459142e-05, + "loss": 1.0975, + "step": 4560 + }, + { + "epoch": 0.2859740422597028, + "grad_norm": 3.0575883388519287, + "learning_rate": 1.675644272677301e-05, + "loss": 1.1154, + "step": 4561 + }, + { + "epoch": 0.2860367421154931, + "grad_norm": 2.939202070236206, + "learning_rate": 1.6754945451450883e-05, + "loss": 1.1989, + "step": 4562 + }, + { + "epoch": 0.28609944197128345, + "grad_norm": 3.114699363708496, + "learning_rate": 1.6753447897554512e-05, + "loss": 1.1529, + "step": 4563 + }, + { + "epoch": 0.2861621418270738, + "grad_norm": 2.975552558898926, + "learning_rate": 1.6751950065145658e-05, + "loss": 1.0571, + "step": 4564 + }, + { + "epoch": 0.2862248416828641, + "grad_norm": 3.1264233589172363, + "learning_rate": 1.6750451954286083e-05, + "loss": 1.1336, + "step": 4565 + }, + { + "epoch": 0.2862875415386545, + "grad_norm": 2.984203815460205, + "learning_rate": 1.674895356503758e-05, + "loss": 1.1396, + "step": 4566 + }, + { + "epoch": 0.2863502413944448, + "grad_norm": 2.8210628032684326, + "learning_rate": 1.674745489746193e-05, + "loss": 1.2109, + "step": 4567 + }, + { + "epoch": 0.28641294125023514, + "grad_norm": 3.122283935546875, + "learning_rate": 1.6745955951620952e-05, + "loss": 1.176, + "step": 4568 + }, + { + "epoch": 0.28647564110602547, + "grad_norm": 2.963357925415039, + "learning_rate": 1.674445672757645e-05, + "loss": 1.237, + "step": 4569 + }, + { + "epoch": 0.2865383409618158, + "grad_norm": 3.2389042377471924, + "learning_rate": 1.6742957225390264e-05, + "loss": 1.1071, + "step": 4570 + }, + { + "epoch": 0.2866010408176061, + "grad_norm": 3.311884641647339, + "learning_rate": 1.6741457445124224e-05, + "loss": 1.0814, + "step": 4571 + }, + { + "epoch": 0.28666374067339645, + "grad_norm": 3.075576066970825, + "learning_rate": 1.6739957386840186e-05, + "loss": 1.1916, + "step": 4572 + }, + { + "epoch": 0.2867264405291868, + "grad_norm": 3.0980184078216553, + "learning_rate": 1.673845705060001e-05, + "loss": 1.0676, + "step": 4573 + }, + { + "epoch": 0.2867891403849771, + "grad_norm": 3.3326022624969482, + "learning_rate": 1.6736956436465573e-05, + "loss": 1.127, + "step": 4574 + }, + { + "epoch": 0.28685184024076743, + "grad_norm": 3.376507520675659, + "learning_rate": 1.6735455544498758e-05, + "loss": 1.1414, + "step": 4575 + }, + { + "epoch": 0.28691454009655776, + "grad_norm": 2.786715030670166, + "learning_rate": 1.673395437476146e-05, + "loss": 1.1681, + "step": 4576 + }, + { + "epoch": 0.2869772399523481, + "grad_norm": 3.0584375858306885, + "learning_rate": 1.6732452927315592e-05, + "loss": 1.1208, + "step": 4577 + }, + { + "epoch": 0.28703993980813847, + "grad_norm": 2.991140842437744, + "learning_rate": 1.673095120222307e-05, + "loss": 1.0443, + "step": 4578 + }, + { + "epoch": 0.2871026396639288, + "grad_norm": 3.0195510387420654, + "learning_rate": 1.6729449199545828e-05, + "loss": 1.1242, + "step": 4579 + }, + { + "epoch": 0.2871653395197191, + "grad_norm": 2.948024034500122, + "learning_rate": 1.6727946919345802e-05, + "loss": 1.0386, + "step": 4580 + }, + { + "epoch": 0.28722803937550945, + "grad_norm": 2.426767587661743, + "learning_rate": 1.6726444361684956e-05, + "loss": 1.2497, + "step": 4581 + }, + { + "epoch": 0.2872907392312998, + "grad_norm": 3.057924509048462, + "learning_rate": 1.672494152662525e-05, + "loss": 1.2657, + "step": 4582 + }, + { + "epoch": 0.2873534390870901, + "grad_norm": 3.0006661415100098, + "learning_rate": 1.6723438414228663e-05, + "loss": 1.1534, + "step": 4583 + }, + { + "epoch": 0.28741613894288043, + "grad_norm": 2.8985848426818848, + "learning_rate": 1.672193502455718e-05, + "loss": 0.9885, + "step": 4584 + }, + { + "epoch": 0.28747883879867075, + "grad_norm": 2.950086832046509, + "learning_rate": 1.6720431357672803e-05, + "loss": 1.1608, + "step": 4585 + }, + { + "epoch": 0.2875415386544611, + "grad_norm": 2.8196170330047607, + "learning_rate": 1.6718927413637546e-05, + "loss": 1.0572, + "step": 4586 + }, + { + "epoch": 0.2876042385102514, + "grad_norm": 2.952190637588501, + "learning_rate": 1.6717423192513424e-05, + "loss": 1.0521, + "step": 4587 + }, + { + "epoch": 0.28766693836604174, + "grad_norm": 2.7561211585998535, + "learning_rate": 1.6715918694362478e-05, + "loss": 1.2432, + "step": 4588 + }, + { + "epoch": 0.28772963822183206, + "grad_norm": 3.0129177570343018, + "learning_rate": 1.6714413919246754e-05, + "loss": 1.2799, + "step": 4589 + }, + { + "epoch": 0.28779233807762244, + "grad_norm": 2.8305132389068604, + "learning_rate": 1.671290886722831e-05, + "loss": 1.2082, + "step": 4590 + }, + { + "epoch": 0.28785503793341277, + "grad_norm": 2.945919990539551, + "learning_rate": 1.6711403538369204e-05, + "loss": 1.0717, + "step": 4591 + }, + { + "epoch": 0.2879177377892031, + "grad_norm": 3.0653419494628906, + "learning_rate": 1.670989793273153e-05, + "loss": 1.2447, + "step": 4592 + }, + { + "epoch": 0.2879804376449934, + "grad_norm": 3.053446054458618, + "learning_rate": 1.6708392050377365e-05, + "loss": 1.3128, + "step": 4593 + }, + { + "epoch": 0.28804313750078375, + "grad_norm": 3.1841557025909424, + "learning_rate": 1.6706885891368823e-05, + "loss": 1.1787, + "step": 4594 + }, + { + "epoch": 0.2881058373565741, + "grad_norm": 3.12251615524292, + "learning_rate": 1.6705379455768012e-05, + "loss": 1.3803, + "step": 4595 + }, + { + "epoch": 0.2881685372123644, + "grad_norm": 2.904279947280884, + "learning_rate": 1.670387274363706e-05, + "loss": 1.1657, + "step": 4596 + }, + { + "epoch": 0.28823123706815473, + "grad_norm": 2.745378017425537, + "learning_rate": 1.67023657550381e-05, + "loss": 1.1387, + "step": 4597 + }, + { + "epoch": 0.28829393692394506, + "grad_norm": 2.9074137210845947, + "learning_rate": 1.670085849003329e-05, + "loss": 1.1012, + "step": 4598 + }, + { + "epoch": 0.2883566367797354, + "grad_norm": 2.7153289318084717, + "learning_rate": 1.669935094868478e-05, + "loss": 1.1719, + "step": 4599 + }, + { + "epoch": 0.2884193366355257, + "grad_norm": 2.953498363494873, + "learning_rate": 1.6697843131054746e-05, + "loss": 1.1652, + "step": 4600 + }, + { + "epoch": 0.2884820364913161, + "grad_norm": 2.7448768615722656, + "learning_rate": 1.6696335037205367e-05, + "loss": 1.0835, + "step": 4601 + }, + { + "epoch": 0.2885447363471064, + "grad_norm": 2.9224853515625, + "learning_rate": 1.669482666719884e-05, + "loss": 1.0596, + "step": 4602 + }, + { + "epoch": 0.28860743620289675, + "grad_norm": 3.0791995525360107, + "learning_rate": 1.6693318021097366e-05, + "loss": 1.3043, + "step": 4603 + }, + { + "epoch": 0.2886701360586871, + "grad_norm": 2.907956123352051, + "learning_rate": 1.669180909896317e-05, + "loss": 1.0181, + "step": 4604 + }, + { + "epoch": 0.2887328359144774, + "grad_norm": 2.9405641555786133, + "learning_rate": 1.6690299900858468e-05, + "loss": 1.0874, + "step": 4605 + }, + { + "epoch": 0.28879553577026773, + "grad_norm": 2.805778980255127, + "learning_rate": 1.668879042684551e-05, + "loss": 1.0734, + "step": 4606 + }, + { + "epoch": 0.28885823562605806, + "grad_norm": 3.3697617053985596, + "learning_rate": 1.668728067698654e-05, + "loss": 1.3208, + "step": 4607 + }, + { + "epoch": 0.2889209354818484, + "grad_norm": 2.9941482543945312, + "learning_rate": 1.6685770651343823e-05, + "loss": 0.9621, + "step": 4608 + }, + { + "epoch": 0.2889836353376387, + "grad_norm": 2.785133123397827, + "learning_rate": 1.6684260349979637e-05, + "loss": 0.9006, + "step": 4609 + }, + { + "epoch": 0.28904633519342904, + "grad_norm": 3.0860044956207275, + "learning_rate": 1.668274977295626e-05, + "loss": 1.2523, + "step": 4610 + }, + { + "epoch": 0.28910903504921937, + "grad_norm": 3.103822708129883, + "learning_rate": 1.6681238920335988e-05, + "loss": 1.1638, + "step": 4611 + }, + { + "epoch": 0.2891717349050097, + "grad_norm": 2.8873813152313232, + "learning_rate": 1.6679727792181136e-05, + "loss": 1.0964, + "step": 4612 + }, + { + "epoch": 0.2892344347608001, + "grad_norm": 3.196582078933716, + "learning_rate": 1.6678216388554012e-05, + "loss": 1.0, + "step": 4613 + }, + { + "epoch": 0.2892971346165904, + "grad_norm": 3.0496997833251953, + "learning_rate": 1.667670470951696e-05, + "loss": 1.087, + "step": 4614 + }, + { + "epoch": 0.28935983447238073, + "grad_norm": 3.1050870418548584, + "learning_rate": 1.6675192755132306e-05, + "loss": 1.0293, + "step": 4615 + }, + { + "epoch": 0.28942253432817105, + "grad_norm": 3.7115883827209473, + "learning_rate": 1.6673680525462416e-05, + "loss": 1.1312, + "step": 4616 + }, + { + "epoch": 0.2894852341839614, + "grad_norm": 2.9330079555511475, + "learning_rate": 1.6672168020569648e-05, + "loss": 1.2437, + "step": 4617 + }, + { + "epoch": 0.2895479340397517, + "grad_norm": 2.9557993412017822, + "learning_rate": 1.667065524051638e-05, + "loss": 1.2701, + "step": 4618 + }, + { + "epoch": 0.28961063389554204, + "grad_norm": 2.8200325965881348, + "learning_rate": 1.6669142185365e-05, + "loss": 1.3305, + "step": 4619 + }, + { + "epoch": 0.28967333375133236, + "grad_norm": 3.373678207397461, + "learning_rate": 1.6667628855177904e-05, + "loss": 1.1699, + "step": 4620 + }, + { + "epoch": 0.2897360336071227, + "grad_norm": 2.933971405029297, + "learning_rate": 1.6666115250017503e-05, + "loss": 1.1982, + "step": 4621 + }, + { + "epoch": 0.289798733462913, + "grad_norm": 3.073349952697754, + "learning_rate": 1.6664601369946216e-05, + "loss": 1.2567, + "step": 4622 + }, + { + "epoch": 0.28986143331870334, + "grad_norm": 3.135640859603882, + "learning_rate": 1.666308721502648e-05, + "loss": 1.0714, + "step": 4623 + }, + { + "epoch": 0.2899241331744937, + "grad_norm": 3.31315279006958, + "learning_rate": 1.6661572785320736e-05, + "loss": 0.967, + "step": 4624 + }, + { + "epoch": 0.28998683303028405, + "grad_norm": 3.1121981143951416, + "learning_rate": 1.6660058080891438e-05, + "loss": 1.1426, + "step": 4625 + }, + { + "epoch": 0.2900495328860744, + "grad_norm": 2.6732406616210938, + "learning_rate": 1.6658543101801055e-05, + "loss": 1.1479, + "step": 4626 + }, + { + "epoch": 0.2901122327418647, + "grad_norm": 2.7797634601593018, + "learning_rate": 1.6657027848112064e-05, + "loss": 1.1919, + "step": 4627 + }, + { + "epoch": 0.29017493259765503, + "grad_norm": 2.9234254360198975, + "learning_rate": 1.6655512319886952e-05, + "loss": 0.942, + "step": 4628 + }, + { + "epoch": 0.29023763245344536, + "grad_norm": 2.799129009246826, + "learning_rate": 1.6653996517188224e-05, + "loss": 1.0634, + "step": 4629 + }, + { + "epoch": 0.2903003323092357, + "grad_norm": 2.8246707916259766, + "learning_rate": 1.6652480440078386e-05, + "loss": 1.0863, + "step": 4630 + }, + { + "epoch": 0.290363032165026, + "grad_norm": 3.0070960521698, + "learning_rate": 1.6650964088619967e-05, + "loss": 0.9962, + "step": 4631 + }, + { + "epoch": 0.29042573202081634, + "grad_norm": 2.644524574279785, + "learning_rate": 1.6649447462875495e-05, + "loss": 1.2075, + "step": 4632 + }, + { + "epoch": 0.29048843187660667, + "grad_norm": 3.0827667713165283, + "learning_rate": 1.664793056290752e-05, + "loss": 1.183, + "step": 4633 + }, + { + "epoch": 0.290551131732397, + "grad_norm": 2.6823830604553223, + "learning_rate": 1.66464133887786e-05, + "loss": 1.1467, + "step": 4634 + }, + { + "epoch": 0.2906138315881873, + "grad_norm": 2.7071382999420166, + "learning_rate": 1.6644895940551304e-05, + "loss": 1.0353, + "step": 4635 + }, + { + "epoch": 0.2906765314439777, + "grad_norm": 3.4001126289367676, + "learning_rate": 1.6643378218288204e-05, + "loss": 1.2761, + "step": 4636 + }, + { + "epoch": 0.29073923129976803, + "grad_norm": 2.752060651779175, + "learning_rate": 1.66418602220519e-05, + "loss": 1.0697, + "step": 4637 + }, + { + "epoch": 0.29080193115555836, + "grad_norm": 3.1681149005889893, + "learning_rate": 1.664034195190499e-05, + "loss": 1.2635, + "step": 4638 + }, + { + "epoch": 0.2908646310113487, + "grad_norm": 2.7078888416290283, + "learning_rate": 1.6638823407910085e-05, + "loss": 1.1435, + "step": 4639 + }, + { + "epoch": 0.290927330867139, + "grad_norm": 2.7269742488861084, + "learning_rate": 1.6637304590129818e-05, + "loss": 1.1155, + "step": 4640 + }, + { + "epoch": 0.29099003072292934, + "grad_norm": 3.037458896636963, + "learning_rate": 1.6635785498626815e-05, + "loss": 1.235, + "step": 4641 + }, + { + "epoch": 0.29105273057871967, + "grad_norm": 2.958343505859375, + "learning_rate": 1.6634266133463727e-05, + "loss": 1.2701, + "step": 4642 + }, + { + "epoch": 0.29111543043451, + "grad_norm": 3.0291192531585693, + "learning_rate": 1.663274649470322e-05, + "loss": 1.0691, + "step": 4643 + }, + { + "epoch": 0.2911781302903003, + "grad_norm": 2.9885783195495605, + "learning_rate": 1.6631226582407954e-05, + "loss": 1.1862, + "step": 4644 + }, + { + "epoch": 0.29124083014609065, + "grad_norm": 3.061694622039795, + "learning_rate": 1.6629706396640614e-05, + "loss": 1.0999, + "step": 4645 + }, + { + "epoch": 0.291303530001881, + "grad_norm": 2.996260643005371, + "learning_rate": 1.6628185937463896e-05, + "loss": 1.054, + "step": 4646 + }, + { + "epoch": 0.29136622985767135, + "grad_norm": 2.906700849533081, + "learning_rate": 1.6626665204940495e-05, + "loss": 1.0757, + "step": 4647 + }, + { + "epoch": 0.2914289297134617, + "grad_norm": 3.172981023788452, + "learning_rate": 1.6625144199133138e-05, + "loss": 1.1134, + "step": 4648 + }, + { + "epoch": 0.291491629569252, + "grad_norm": 3.1230123043060303, + "learning_rate": 1.662362292010454e-05, + "loss": 1.0632, + "step": 4649 + }, + { + "epoch": 0.29155432942504234, + "grad_norm": 2.7884130477905273, + "learning_rate": 1.6622101367917442e-05, + "loss": 1.248, + "step": 4650 + }, + { + "epoch": 0.29161702928083266, + "grad_norm": 2.805401563644409, + "learning_rate": 1.66205795426346e-05, + "loss": 1.2562, + "step": 4651 + }, + { + "epoch": 0.291679729136623, + "grad_norm": 3.2469046115875244, + "learning_rate": 1.6619057444318766e-05, + "loss": 1.094, + "step": 4652 + }, + { + "epoch": 0.2917424289924133, + "grad_norm": 3.482905864715576, + "learning_rate": 1.6617535073032713e-05, + "loss": 1.0478, + "step": 4653 + }, + { + "epoch": 0.29180512884820364, + "grad_norm": 2.8938798904418945, + "learning_rate": 1.6616012428839226e-05, + "loss": 1.2811, + "step": 4654 + }, + { + "epoch": 0.29186782870399397, + "grad_norm": 2.679309129714966, + "learning_rate": 1.6614489511801096e-05, + "loss": 1.1178, + "step": 4655 + }, + { + "epoch": 0.2919305285597843, + "grad_norm": 2.9056148529052734, + "learning_rate": 1.6612966321981132e-05, + "loss": 1.1649, + "step": 4656 + }, + { + "epoch": 0.2919932284155746, + "grad_norm": 3.2016968727111816, + "learning_rate": 1.661144285944215e-05, + "loss": 1.0954, + "step": 4657 + }, + { + "epoch": 0.29205592827136495, + "grad_norm": 2.883207082748413, + "learning_rate": 1.660991912424697e-05, + "loss": 1.0715, + "step": 4658 + }, + { + "epoch": 0.29211862812715533, + "grad_norm": 3.239159345626831, + "learning_rate": 1.660839511645844e-05, + "loss": 1.002, + "step": 4659 + }, + { + "epoch": 0.29218132798294566, + "grad_norm": 2.81304669380188, + "learning_rate": 1.6606870836139405e-05, + "loss": 1.1165, + "step": 4660 + }, + { + "epoch": 0.292244027838736, + "grad_norm": 3.0447187423706055, + "learning_rate": 1.660534628335273e-05, + "loss": 1.051, + "step": 4661 + }, + { + "epoch": 0.2923067276945263, + "grad_norm": 3.0661041736602783, + "learning_rate": 1.6603821458161286e-05, + "loss": 1.4345, + "step": 4662 + }, + { + "epoch": 0.29236942755031664, + "grad_norm": 3.000147581100464, + "learning_rate": 1.660229636062795e-05, + "loss": 1.1584, + "step": 4663 + }, + { + "epoch": 0.29243212740610697, + "grad_norm": 2.63140869140625, + "learning_rate": 1.6600770990815635e-05, + "loss": 0.9771, + "step": 4664 + }, + { + "epoch": 0.2924948272618973, + "grad_norm": 2.8627688884735107, + "learning_rate": 1.659924534878723e-05, + "loss": 1.1635, + "step": 4665 + }, + { + "epoch": 0.2925575271176876, + "grad_norm": 3.052990436553955, + "learning_rate": 1.6597719434605657e-05, + "loss": 1.1796, + "step": 4666 + }, + { + "epoch": 0.29262022697347795, + "grad_norm": 3.0007195472717285, + "learning_rate": 1.659619324833385e-05, + "loss": 1.2177, + "step": 4667 + }, + { + "epoch": 0.2926829268292683, + "grad_norm": 2.989992618560791, + "learning_rate": 1.6594666790034744e-05, + "loss": 1.1694, + "step": 4668 + }, + { + "epoch": 0.2927456266850586, + "grad_norm": 3.14615797996521, + "learning_rate": 1.6593140059771288e-05, + "loss": 1.1719, + "step": 4669 + }, + { + "epoch": 0.29280832654084893, + "grad_norm": 3.126523017883301, + "learning_rate": 1.6591613057606454e-05, + "loss": 1.1378, + "step": 4670 + }, + { + "epoch": 0.2928710263966393, + "grad_norm": 2.8678090572357178, + "learning_rate": 1.6590085783603208e-05, + "loss": 1.2209, + "step": 4671 + }, + { + "epoch": 0.29293372625242964, + "grad_norm": 2.803609848022461, + "learning_rate": 1.6588558237824535e-05, + "loss": 1.1449, + "step": 4672 + }, + { + "epoch": 0.29299642610821997, + "grad_norm": 3.0434932708740234, + "learning_rate": 1.6587030420333433e-05, + "loss": 1.0647, + "step": 4673 + }, + { + "epoch": 0.2930591259640103, + "grad_norm": 3.0731401443481445, + "learning_rate": 1.658550233119291e-05, + "loss": 1.2013, + "step": 4674 + }, + { + "epoch": 0.2931218258198006, + "grad_norm": 3.0461959838867188, + "learning_rate": 1.6583973970465982e-05, + "loss": 1.0004, + "step": 4675 + }, + { + "epoch": 0.29318452567559095, + "grad_norm": 3.308781147003174, + "learning_rate": 1.6582445338215677e-05, + "loss": 1.0695, + "step": 4676 + }, + { + "epoch": 0.2932472255313813, + "grad_norm": 2.773132085800171, + "learning_rate": 1.6580916434505042e-05, + "loss": 1.2884, + "step": 4677 + }, + { + "epoch": 0.2933099253871716, + "grad_norm": 2.918968439102173, + "learning_rate": 1.657938725939713e-05, + "loss": 1.1375, + "step": 4678 + }, + { + "epoch": 0.2933726252429619, + "grad_norm": 2.631891965866089, + "learning_rate": 1.6577857812954994e-05, + "loss": 1.1562, + "step": 4679 + }, + { + "epoch": 0.29343532509875225, + "grad_norm": 2.658294439315796, + "learning_rate": 1.6576328095241715e-05, + "loss": 1.1286, + "step": 4680 + }, + { + "epoch": 0.2934980249545426, + "grad_norm": 2.7018158435821533, + "learning_rate": 1.657479810632038e-05, + "loss": 1.2013, + "step": 4681 + }, + { + "epoch": 0.29356072481033296, + "grad_norm": 2.8861138820648193, + "learning_rate": 1.6573267846254086e-05, + "loss": 1.1794, + "step": 4682 + }, + { + "epoch": 0.2936234246661233, + "grad_norm": 2.664355516433716, + "learning_rate": 1.6571737315105937e-05, + "loss": 1.2133, + "step": 4683 + }, + { + "epoch": 0.2936861245219136, + "grad_norm": 3.0436718463897705, + "learning_rate": 1.6570206512939052e-05, + "loss": 1.1035, + "step": 4684 + }, + { + "epoch": 0.29374882437770394, + "grad_norm": 2.9520013332366943, + "learning_rate": 1.6568675439816567e-05, + "loss": 1.166, + "step": 4685 + }, + { + "epoch": 0.29381152423349427, + "grad_norm": 3.0604119300842285, + "learning_rate": 1.6567144095801618e-05, + "loss": 1.091, + "step": 4686 + }, + { + "epoch": 0.2938742240892846, + "grad_norm": 2.9369375705718994, + "learning_rate": 1.656561248095736e-05, + "loss": 1.2822, + "step": 4687 + }, + { + "epoch": 0.2939369239450749, + "grad_norm": 2.7701892852783203, + "learning_rate": 1.6564080595346954e-05, + "loss": 1.1949, + "step": 4688 + }, + { + "epoch": 0.29399962380086525, + "grad_norm": 3.0062761306762695, + "learning_rate": 1.6562548439033583e-05, + "loss": 1.1628, + "step": 4689 + }, + { + "epoch": 0.2940623236566556, + "grad_norm": 3.3057870864868164, + "learning_rate": 1.656101601208042e-05, + "loss": 1.0104, + "step": 4690 + }, + { + "epoch": 0.2941250235124459, + "grad_norm": 2.9677529335021973, + "learning_rate": 1.655948331455068e-05, + "loss": 1.1194, + "step": 4691 + }, + { + "epoch": 0.29418772336823623, + "grad_norm": 3.2042043209075928, + "learning_rate": 1.6557950346507558e-05, + "loss": 1.275, + "step": 4692 + }, + { + "epoch": 0.29425042322402656, + "grad_norm": 3.1633293628692627, + "learning_rate": 1.6556417108014274e-05, + "loss": 1.1693, + "step": 4693 + }, + { + "epoch": 0.29431312307981694, + "grad_norm": 3.0338478088378906, + "learning_rate": 1.6554883599134064e-05, + "loss": 1.1713, + "step": 4694 + }, + { + "epoch": 0.29437582293560727, + "grad_norm": 2.9951224327087402, + "learning_rate": 1.6553349819930167e-05, + "loss": 1.1756, + "step": 4695 + }, + { + "epoch": 0.2944385227913976, + "grad_norm": 3.090406894683838, + "learning_rate": 1.655181577046584e-05, + "loss": 1.1008, + "step": 4696 + }, + { + "epoch": 0.2945012226471879, + "grad_norm": 3.058044195175171, + "learning_rate": 1.6550281450804342e-05, + "loss": 0.9303, + "step": 4697 + }, + { + "epoch": 0.29456392250297825, + "grad_norm": 2.9880294799804688, + "learning_rate": 1.6548746861008952e-05, + "loss": 0.9582, + "step": 4698 + }, + { + "epoch": 0.2946266223587686, + "grad_norm": 2.7511603832244873, + "learning_rate": 1.6547212001142955e-05, + "loss": 1.2069, + "step": 4699 + }, + { + "epoch": 0.2946893222145589, + "grad_norm": 3.0693562030792236, + "learning_rate": 1.654567687126965e-05, + "loss": 1.1325, + "step": 4700 + }, + { + "epoch": 0.29475202207034923, + "grad_norm": 2.9953484535217285, + "learning_rate": 1.6544141471452343e-05, + "loss": 1.1087, + "step": 4701 + }, + { + "epoch": 0.29481472192613956, + "grad_norm": 2.9884369373321533, + "learning_rate": 1.6542605801754357e-05, + "loss": 1.1935, + "step": 4702 + }, + { + "epoch": 0.2948774217819299, + "grad_norm": 2.9719197750091553, + "learning_rate": 1.6541069862239023e-05, + "loss": 0.9655, + "step": 4703 + }, + { + "epoch": 0.2949401216377202, + "grad_norm": 2.7158679962158203, + "learning_rate": 1.6539533652969683e-05, + "loss": 1.2412, + "step": 4704 + }, + { + "epoch": 0.2950028214935106, + "grad_norm": 3.008246898651123, + "learning_rate": 1.6537997174009687e-05, + "loss": 1.0568, + "step": 4705 + }, + { + "epoch": 0.2950655213493009, + "grad_norm": 3.224463939666748, + "learning_rate": 1.6536460425422403e-05, + "loss": 1.2117, + "step": 4706 + }, + { + "epoch": 0.29512822120509125, + "grad_norm": 2.7870876789093018, + "learning_rate": 1.6534923407271208e-05, + "loss": 1.3019, + "step": 4707 + }, + { + "epoch": 0.2951909210608816, + "grad_norm": 2.7593507766723633, + "learning_rate": 1.6533386119619485e-05, + "loss": 1.1062, + "step": 4708 + }, + { + "epoch": 0.2952536209166719, + "grad_norm": 3.095632553100586, + "learning_rate": 1.653184856253063e-05, + "loss": 1.0322, + "step": 4709 + }, + { + "epoch": 0.2953163207724622, + "grad_norm": 2.940577745437622, + "learning_rate": 1.6530310736068057e-05, + "loss": 1.1822, + "step": 4710 + }, + { + "epoch": 0.29537902062825255, + "grad_norm": 2.6574771404266357, + "learning_rate": 1.6528772640295187e-05, + "loss": 1.2097, + "step": 4711 + }, + { + "epoch": 0.2954417204840429, + "grad_norm": 2.862713575363159, + "learning_rate": 1.6527234275275445e-05, + "loss": 1.274, + "step": 4712 + }, + { + "epoch": 0.2955044203398332, + "grad_norm": 3.115065336227417, + "learning_rate": 1.6525695641072275e-05, + "loss": 1.086, + "step": 4713 + }, + { + "epoch": 0.29556712019562353, + "grad_norm": 2.943509817123413, + "learning_rate": 1.6524156737749132e-05, + "loss": 1.0185, + "step": 4714 + }, + { + "epoch": 0.29562982005141386, + "grad_norm": 2.9077208042144775, + "learning_rate": 1.6522617565369484e-05, + "loss": 0.9565, + "step": 4715 + }, + { + "epoch": 0.2956925199072042, + "grad_norm": 2.8731932640075684, + "learning_rate": 1.65210781239968e-05, + "loss": 1.1302, + "step": 4716 + }, + { + "epoch": 0.29575521976299457, + "grad_norm": 2.9321506023406982, + "learning_rate": 1.651953841369457e-05, + "loss": 1.0506, + "step": 4717 + }, + { + "epoch": 0.2958179196187849, + "grad_norm": 2.9246826171875, + "learning_rate": 1.6517998434526294e-05, + "loss": 1.1083, + "step": 4718 + }, + { + "epoch": 0.2958806194745752, + "grad_norm": 2.985750436782837, + "learning_rate": 1.651645818655547e-05, + "loss": 1.1449, + "step": 4719 + }, + { + "epoch": 0.29594331933036555, + "grad_norm": 3.0384347438812256, + "learning_rate": 1.6514917669845635e-05, + "loss": 1.1194, + "step": 4720 + }, + { + "epoch": 0.2960060191861559, + "grad_norm": 3.027693510055542, + "learning_rate": 1.651337688446031e-05, + "loss": 1.2684, + "step": 4721 + }, + { + "epoch": 0.2960687190419462, + "grad_norm": 2.9449925422668457, + "learning_rate": 1.6511835830463034e-05, + "loss": 1.1355, + "step": 4722 + }, + { + "epoch": 0.29613141889773653, + "grad_norm": 3.4747910499572754, + "learning_rate": 1.6510294507917364e-05, + "loss": 1.0857, + "step": 4723 + }, + { + "epoch": 0.29619411875352686, + "grad_norm": 2.891045570373535, + "learning_rate": 1.6508752916886862e-05, + "loss": 1.3569, + "step": 4724 + }, + { + "epoch": 0.2962568186093172, + "grad_norm": 3.228752851486206, + "learning_rate": 1.650721105743511e-05, + "loss": 1.0766, + "step": 4725 + }, + { + "epoch": 0.2963195184651075, + "grad_norm": 2.8095247745513916, + "learning_rate": 1.650566892962569e-05, + "loss": 1.217, + "step": 4726 + }, + { + "epoch": 0.29638221832089784, + "grad_norm": 2.7012546062469482, + "learning_rate": 1.6504126533522195e-05, + "loss": 1.1435, + "step": 4727 + }, + { + "epoch": 0.2964449181766882, + "grad_norm": 2.817426919937134, + "learning_rate": 1.6502583869188243e-05, + "loss": 1.0648, + "step": 4728 + }, + { + "epoch": 0.29650761803247855, + "grad_norm": 3.0549070835113525, + "learning_rate": 1.6501040936687444e-05, + "loss": 1.1682, + "step": 4729 + }, + { + "epoch": 0.2965703178882689, + "grad_norm": 2.6708006858825684, + "learning_rate": 1.6499497736083434e-05, + "loss": 1.3422, + "step": 4730 + }, + { + "epoch": 0.2966330177440592, + "grad_norm": 2.754185199737549, + "learning_rate": 1.6497954267439853e-05, + "loss": 1.2213, + "step": 4731 + }, + { + "epoch": 0.29669571759984953, + "grad_norm": 2.948115825653076, + "learning_rate": 1.6496410530820357e-05, + "loss": 1.1924, + "step": 4732 + }, + { + "epoch": 0.29675841745563986, + "grad_norm": 2.9484403133392334, + "learning_rate": 1.6494866526288605e-05, + "loss": 1.0913, + "step": 4733 + }, + { + "epoch": 0.2968211173114302, + "grad_norm": 3.1025192737579346, + "learning_rate": 1.6493322253908274e-05, + "loss": 1.1129, + "step": 4734 + }, + { + "epoch": 0.2968838171672205, + "grad_norm": 3.185666799545288, + "learning_rate": 1.649177771374305e-05, + "loss": 1.1027, + "step": 4735 + }, + { + "epoch": 0.29694651702301084, + "grad_norm": 2.9378464221954346, + "learning_rate": 1.649023290585663e-05, + "loss": 1.1659, + "step": 4736 + }, + { + "epoch": 0.29700921687880116, + "grad_norm": 2.741720199584961, + "learning_rate": 1.6488687830312718e-05, + "loss": 1.1759, + "step": 4737 + }, + { + "epoch": 0.2970719167345915, + "grad_norm": 2.7542126178741455, + "learning_rate": 1.648714248717504e-05, + "loss": 1.124, + "step": 4738 + }, + { + "epoch": 0.2971346165903818, + "grad_norm": 3.039116144180298, + "learning_rate": 1.648559687650732e-05, + "loss": 1.1098, + "step": 4739 + }, + { + "epoch": 0.2971973164461722, + "grad_norm": 2.771771192550659, + "learning_rate": 1.6484050998373303e-05, + "loss": 1.1263, + "step": 4740 + }, + { + "epoch": 0.2972600163019625, + "grad_norm": 3.1238412857055664, + "learning_rate": 1.6482504852836744e-05, + "loss": 1.1048, + "step": 4741 + }, + { + "epoch": 0.29732271615775285, + "grad_norm": 3.2210824489593506, + "learning_rate": 1.6480958439961393e-05, + "loss": 0.9899, + "step": 4742 + }, + { + "epoch": 0.2973854160135432, + "grad_norm": 3.0712740421295166, + "learning_rate": 1.6479411759811038e-05, + "loss": 1.1683, + "step": 4743 + }, + { + "epoch": 0.2974481158693335, + "grad_norm": 2.901282787322998, + "learning_rate": 1.647786481244946e-05, + "loss": 1.0433, + "step": 4744 + }, + { + "epoch": 0.29751081572512383, + "grad_norm": 2.812190055847168, + "learning_rate": 1.6476317597940453e-05, + "loss": 1.1771, + "step": 4745 + }, + { + "epoch": 0.29757351558091416, + "grad_norm": 3.2271695137023926, + "learning_rate": 1.6474770116347824e-05, + "loss": 1.1082, + "step": 4746 + }, + { + "epoch": 0.2976362154367045, + "grad_norm": 2.7078444957733154, + "learning_rate": 1.6473222367735394e-05, + "loss": 1.1724, + "step": 4747 + }, + { + "epoch": 0.2976989152924948, + "grad_norm": 3.2500219345092773, + "learning_rate": 1.6471674352166995e-05, + "loss": 1.1604, + "step": 4748 + }, + { + "epoch": 0.29776161514828514, + "grad_norm": 3.098841428756714, + "learning_rate": 1.6470126069706456e-05, + "loss": 0.9774, + "step": 4749 + }, + { + "epoch": 0.29782431500407547, + "grad_norm": 2.911318778991699, + "learning_rate": 1.646857752041764e-05, + "loss": 1.063, + "step": 4750 + }, + { + "epoch": 0.2978870148598658, + "grad_norm": 3.1443705558776855, + "learning_rate": 1.6467028704364403e-05, + "loss": 1.1489, + "step": 4751 + }, + { + "epoch": 0.2979497147156562, + "grad_norm": 3.2767539024353027, + "learning_rate": 1.646547962161062e-05, + "loss": 0.9899, + "step": 4752 + }, + { + "epoch": 0.2980124145714465, + "grad_norm": 3.1567723751068115, + "learning_rate": 1.6463930272220176e-05, + "loss": 1.1296, + "step": 4753 + }, + { + "epoch": 0.29807511442723683, + "grad_norm": 3.108245849609375, + "learning_rate": 1.6462380656256962e-05, + "loss": 1.1558, + "step": 4754 + }, + { + "epoch": 0.29813781428302716, + "grad_norm": 2.9893808364868164, + "learning_rate": 1.646083077378489e-05, + "loss": 1.1704, + "step": 4755 + }, + { + "epoch": 0.2982005141388175, + "grad_norm": 3.091254711151123, + "learning_rate": 1.6459280624867876e-05, + "loss": 1.0446, + "step": 4756 + }, + { + "epoch": 0.2982632139946078, + "grad_norm": 3.50152850151062, + "learning_rate": 1.6457730209569845e-05, + "loss": 1.1599, + "step": 4757 + }, + { + "epoch": 0.29832591385039814, + "grad_norm": 2.7924530506134033, + "learning_rate": 1.6456179527954737e-05, + "loss": 1.1613, + "step": 4758 + }, + { + "epoch": 0.29838861370618847, + "grad_norm": 2.9884345531463623, + "learning_rate": 1.6454628580086506e-05, + "loss": 1.0552, + "step": 4759 + }, + { + "epoch": 0.2984513135619788, + "grad_norm": 3.2257425785064697, + "learning_rate": 1.645307736602911e-05, + "loss": 1.183, + "step": 4760 + }, + { + "epoch": 0.2985140134177691, + "grad_norm": 3.099396228790283, + "learning_rate": 1.645152588584652e-05, + "loss": 1.1425, + "step": 4761 + }, + { + "epoch": 0.29857671327355945, + "grad_norm": 3.144315004348755, + "learning_rate": 1.644997413960272e-05, + "loss": 0.9798, + "step": 4762 + }, + { + "epoch": 0.29863941312934983, + "grad_norm": 3.443079710006714, + "learning_rate": 1.6448422127361707e-05, + "loss": 1.114, + "step": 4763 + }, + { + "epoch": 0.29870211298514016, + "grad_norm": 3.001220703125, + "learning_rate": 1.6446869849187486e-05, + "loss": 1.1954, + "step": 4764 + }, + { + "epoch": 0.2987648128409305, + "grad_norm": 2.9820191860198975, + "learning_rate": 1.6445317305144066e-05, + "loss": 1.1694, + "step": 4765 + }, + { + "epoch": 0.2988275126967208, + "grad_norm": 3.188891887664795, + "learning_rate": 1.6443764495295484e-05, + "loss": 1.0795, + "step": 4766 + }, + { + "epoch": 0.29889021255251114, + "grad_norm": 3.037381887435913, + "learning_rate": 1.6442211419705767e-05, + "loss": 1.1412, + "step": 4767 + }, + { + "epoch": 0.29895291240830146, + "grad_norm": 3.1166136264801025, + "learning_rate": 1.6440658078438973e-05, + "loss": 1.1034, + "step": 4768 + }, + { + "epoch": 0.2990156122640918, + "grad_norm": 2.866335868835449, + "learning_rate": 1.6439104471559157e-05, + "loss": 1.2567, + "step": 4769 + }, + { + "epoch": 0.2990783121198821, + "grad_norm": 3.0641021728515625, + "learning_rate": 1.643755059913039e-05, + "loss": 0.9608, + "step": 4770 + }, + { + "epoch": 0.29914101197567244, + "grad_norm": 2.823380708694458, + "learning_rate": 1.643599646121676e-05, + "loss": 1.2011, + "step": 4771 + }, + { + "epoch": 0.29920371183146277, + "grad_norm": 3.1314661502838135, + "learning_rate": 1.6434442057882354e-05, + "loss": 1.1794, + "step": 4772 + }, + { + "epoch": 0.2992664116872531, + "grad_norm": 2.920621633529663, + "learning_rate": 1.6432887389191274e-05, + "loss": 1.1542, + "step": 4773 + }, + { + "epoch": 0.2993291115430434, + "grad_norm": 3.238917827606201, + "learning_rate": 1.643133245520764e-05, + "loss": 1.028, + "step": 4774 + }, + { + "epoch": 0.2993918113988338, + "grad_norm": 2.5844178199768066, + "learning_rate": 1.6429777255995573e-05, + "loss": 0.9787, + "step": 4775 + }, + { + "epoch": 0.29945451125462413, + "grad_norm": 2.6471855640411377, + "learning_rate": 1.642822179161921e-05, + "loss": 1.051, + "step": 4776 + }, + { + "epoch": 0.29951721111041446, + "grad_norm": 2.8286478519439697, + "learning_rate": 1.64266660621427e-05, + "loss": 1.1922, + "step": 4777 + }, + { + "epoch": 0.2995799109662048, + "grad_norm": 2.990877628326416, + "learning_rate": 1.6425110067630204e-05, + "loss": 1.1962, + "step": 4778 + }, + { + "epoch": 0.2996426108219951, + "grad_norm": 3.339681386947632, + "learning_rate": 1.6423553808145886e-05, + "loss": 1.0183, + "step": 4779 + }, + { + "epoch": 0.29970531067778544, + "grad_norm": 3.146855115890503, + "learning_rate": 1.6421997283753928e-05, + "loss": 1.2356, + "step": 4780 + }, + { + "epoch": 0.29976801053357577, + "grad_norm": 3.059840202331543, + "learning_rate": 1.642044049451852e-05, + "loss": 1.3369, + "step": 4781 + }, + { + "epoch": 0.2998307103893661, + "grad_norm": 2.9156970977783203, + "learning_rate": 1.6418883440503868e-05, + "loss": 1.2497, + "step": 4782 + }, + { + "epoch": 0.2998934102451564, + "grad_norm": 3.0387110710144043, + "learning_rate": 1.6417326121774184e-05, + "loss": 0.9625, + "step": 4783 + }, + { + "epoch": 0.29995611010094675, + "grad_norm": 2.817518711090088, + "learning_rate": 1.641576853839369e-05, + "loss": 1.2422, + "step": 4784 + }, + { + "epoch": 0.3000188099567371, + "grad_norm": 2.8773019313812256, + "learning_rate": 1.6414210690426618e-05, + "loss": 1.2216, + "step": 4785 + }, + { + "epoch": 0.30008150981252746, + "grad_norm": 2.9476733207702637, + "learning_rate": 1.641265257793722e-05, + "loss": 1.0645, + "step": 4786 + }, + { + "epoch": 0.3001442096683178, + "grad_norm": 3.118212938308716, + "learning_rate": 1.6411094200989744e-05, + "loss": 1.0355, + "step": 4787 + }, + { + "epoch": 0.3002069095241081, + "grad_norm": 3.124499797821045, + "learning_rate": 1.640953555964847e-05, + "loss": 1.1608, + "step": 4788 + }, + { + "epoch": 0.30026960937989844, + "grad_norm": 3.128147840499878, + "learning_rate": 1.640797665397767e-05, + "loss": 1.0818, + "step": 4789 + }, + { + "epoch": 0.30033230923568877, + "grad_norm": 2.9837677478790283, + "learning_rate": 1.640641748404163e-05, + "loss": 1.0931, + "step": 4790 + }, + { + "epoch": 0.3003950090914791, + "grad_norm": 2.907759666442871, + "learning_rate": 1.640485804990465e-05, + "loss": 1.2756, + "step": 4791 + }, + { + "epoch": 0.3004577089472694, + "grad_norm": 2.957749128341675, + "learning_rate": 1.640329835163105e-05, + "loss": 1.2126, + "step": 4792 + }, + { + "epoch": 0.30052040880305975, + "grad_norm": 3.1249966621398926, + "learning_rate": 1.6401738389285145e-05, + "loss": 0.9622, + "step": 4793 + }, + { + "epoch": 0.3005831086588501, + "grad_norm": 3.278261661529541, + "learning_rate": 1.640017816293127e-05, + "loss": 1.1069, + "step": 4794 + }, + { + "epoch": 0.3006458085146404, + "grad_norm": 2.9189929962158203, + "learning_rate": 1.6398617672633765e-05, + "loss": 1.1421, + "step": 4795 + }, + { + "epoch": 0.3007085083704307, + "grad_norm": 2.887561082839966, + "learning_rate": 1.6397056918456987e-05, + "loss": 1.1398, + "step": 4796 + }, + { + "epoch": 0.30077120822622105, + "grad_norm": 2.807559013366699, + "learning_rate": 1.6395495900465306e-05, + "loss": 1.2957, + "step": 4797 + }, + { + "epoch": 0.30083390808201144, + "grad_norm": 2.6936306953430176, + "learning_rate": 1.6393934618723096e-05, + "loss": 1.1723, + "step": 4798 + }, + { + "epoch": 0.30089660793780176, + "grad_norm": 2.9889063835144043, + "learning_rate": 1.639237307329474e-05, + "loss": 1.04, + "step": 4799 + }, + { + "epoch": 0.3009593077935921, + "grad_norm": 2.880941867828369, + "learning_rate": 1.639081126424464e-05, + "loss": 1.132, + "step": 4800 + }, + { + "epoch": 0.3010220076493824, + "grad_norm": 3.3041157722473145, + "learning_rate": 1.6389249191637203e-05, + "loss": 1.3245, + "step": 4801 + }, + { + "epoch": 0.30108470750517274, + "grad_norm": 3.1532599925994873, + "learning_rate": 1.638768685553685e-05, + "loss": 1.1774, + "step": 4802 + }, + { + "epoch": 0.30114740736096307, + "grad_norm": 2.667027473449707, + "learning_rate": 1.6386124256008015e-05, + "loss": 1.1188, + "step": 4803 + }, + { + "epoch": 0.3012101072167534, + "grad_norm": 2.9612228870391846, + "learning_rate": 1.6384561393115135e-05, + "loss": 1.1654, + "step": 4804 + }, + { + "epoch": 0.3012728070725437, + "grad_norm": 3.2230587005615234, + "learning_rate": 1.6382998266922664e-05, + "loss": 1.0983, + "step": 4805 + }, + { + "epoch": 0.30133550692833405, + "grad_norm": 2.8891708850860596, + "learning_rate": 1.638143487749507e-05, + "loss": 1.2389, + "step": 4806 + }, + { + "epoch": 0.3013982067841244, + "grad_norm": 2.675114154815674, + "learning_rate": 1.6379871224896815e-05, + "loss": 1.153, + "step": 4807 + }, + { + "epoch": 0.3014609066399147, + "grad_norm": 2.7618606090545654, + "learning_rate": 1.6378307309192397e-05, + "loss": 1.1419, + "step": 4808 + }, + { + "epoch": 0.30152360649570503, + "grad_norm": 2.7372241020202637, + "learning_rate": 1.637674313044631e-05, + "loss": 1.1692, + "step": 4809 + }, + { + "epoch": 0.3015863063514954, + "grad_norm": 2.81864070892334, + "learning_rate": 1.6375178688723054e-05, + "loss": 1.2554, + "step": 4810 + }, + { + "epoch": 0.30164900620728574, + "grad_norm": 3.0159294605255127, + "learning_rate": 1.6373613984087148e-05, + "loss": 1.0217, + "step": 4811 + }, + { + "epoch": 0.30171170606307607, + "grad_norm": 2.920686960220337, + "learning_rate": 1.637204901660313e-05, + "loss": 1.2115, + "step": 4812 + }, + { + "epoch": 0.3017744059188664, + "grad_norm": 2.8094019889831543, + "learning_rate": 1.6370483786335524e-05, + "loss": 1.0779, + "step": 4813 + }, + { + "epoch": 0.3018371057746567, + "grad_norm": 3.5910096168518066, + "learning_rate": 1.6368918293348893e-05, + "loss": 1.2923, + "step": 4814 + }, + { + "epoch": 0.30189980563044705, + "grad_norm": 3.0891757011413574, + "learning_rate": 1.6367352537707795e-05, + "loss": 1.1296, + "step": 4815 + }, + { + "epoch": 0.3019625054862374, + "grad_norm": 2.872934103012085, + "learning_rate": 1.6365786519476796e-05, + "loss": 1.0661, + "step": 4816 + }, + { + "epoch": 0.3020252053420277, + "grad_norm": 2.9149351119995117, + "learning_rate": 1.6364220238720484e-05, + "loss": 1.159, + "step": 4817 + }, + { + "epoch": 0.30208790519781803, + "grad_norm": 2.911684513092041, + "learning_rate": 1.6362653695503456e-05, + "loss": 1.0272, + "step": 4818 + }, + { + "epoch": 0.30215060505360836, + "grad_norm": 3.0301408767700195, + "learning_rate": 1.6361086889890307e-05, + "loss": 1.1188, + "step": 4819 + }, + { + "epoch": 0.3022133049093987, + "grad_norm": 3.0770411491394043, + "learning_rate": 1.6359519821945658e-05, + "loss": 1.1501, + "step": 4820 + }, + { + "epoch": 0.30227600476518907, + "grad_norm": 3.174708366394043, + "learning_rate": 1.6357952491734136e-05, + "loss": 1.1912, + "step": 4821 + }, + { + "epoch": 0.3023387046209794, + "grad_norm": 2.8434324264526367, + "learning_rate": 1.6356384899320373e-05, + "loss": 1.3007, + "step": 4822 + }, + { + "epoch": 0.3024014044767697, + "grad_norm": 2.8805816173553467, + "learning_rate": 1.635481704476902e-05, + "loss": 1.1573, + "step": 4823 + }, + { + "epoch": 0.30246410433256005, + "grad_norm": 3.050292730331421, + "learning_rate": 1.6353248928144733e-05, + "loss": 1.1953, + "step": 4824 + }, + { + "epoch": 0.3025268041883504, + "grad_norm": 3.106548547744751, + "learning_rate": 1.6351680549512184e-05, + "loss": 1.1803, + "step": 4825 + }, + { + "epoch": 0.3025895040441407, + "grad_norm": 3.087491512298584, + "learning_rate": 1.6350111908936054e-05, + "loss": 1.1145, + "step": 4826 + }, + { + "epoch": 0.302652203899931, + "grad_norm": 2.6984307765960693, + "learning_rate": 1.6348543006481027e-05, + "loss": 1.3456, + "step": 4827 + }, + { + "epoch": 0.30271490375572135, + "grad_norm": 2.8334057331085205, + "learning_rate": 1.6346973842211813e-05, + "loss": 1.1634, + "step": 4828 + }, + { + "epoch": 0.3027776036115117, + "grad_norm": 2.8475382328033447, + "learning_rate": 1.6345404416193117e-05, + "loss": 1.1196, + "step": 4829 + }, + { + "epoch": 0.302840303467302, + "grad_norm": 2.895124673843384, + "learning_rate": 1.634383472848967e-05, + "loss": 1.274, + "step": 4830 + }, + { + "epoch": 0.30290300332309233, + "grad_norm": 2.759932041168213, + "learning_rate": 1.63422647791662e-05, + "loss": 1.0094, + "step": 4831 + }, + { + "epoch": 0.30296570317888266, + "grad_norm": 2.8092215061187744, + "learning_rate": 1.6340694568287453e-05, + "loss": 1.1013, + "step": 4832 + }, + { + "epoch": 0.30302840303467304, + "grad_norm": 2.7832040786743164, + "learning_rate": 1.6339124095918187e-05, + "loss": 1.0429, + "step": 4833 + }, + { + "epoch": 0.30309110289046337, + "grad_norm": 2.8339805603027344, + "learning_rate": 1.6337553362123165e-05, + "loss": 1.4178, + "step": 4834 + }, + { + "epoch": 0.3031538027462537, + "grad_norm": 2.8872108459472656, + "learning_rate": 1.6335982366967163e-05, + "loss": 1.098, + "step": 4835 + }, + { + "epoch": 0.303216502602044, + "grad_norm": 3.112769842147827, + "learning_rate": 1.6334411110514975e-05, + "loss": 1.1658, + "step": 4836 + }, + { + "epoch": 0.30327920245783435, + "grad_norm": 2.753420114517212, + "learning_rate": 1.6332839592831394e-05, + "loss": 1.2489, + "step": 4837 + }, + { + "epoch": 0.3033419023136247, + "grad_norm": 2.9305124282836914, + "learning_rate": 1.6331267813981234e-05, + "loss": 1.2093, + "step": 4838 + }, + { + "epoch": 0.303404602169415, + "grad_norm": 3.12213397026062, + "learning_rate": 1.632969577402931e-05, + "loss": 1.0292, + "step": 4839 + }, + { + "epoch": 0.30346730202520533, + "grad_norm": 2.996034860610962, + "learning_rate": 1.632812347304046e-05, + "loss": 1.3117, + "step": 4840 + }, + { + "epoch": 0.30353000188099566, + "grad_norm": 3.0015006065368652, + "learning_rate": 1.6326550911079518e-05, + "loss": 1.1853, + "step": 4841 + }, + { + "epoch": 0.303592701736786, + "grad_norm": 2.736250162124634, + "learning_rate": 1.632497808821134e-05, + "loss": 1.1199, + "step": 4842 + }, + { + "epoch": 0.3036554015925763, + "grad_norm": 2.945268392562866, + "learning_rate": 1.632340500450079e-05, + "loss": 1.0093, + "step": 4843 + }, + { + "epoch": 0.3037181014483667, + "grad_norm": 2.877615451812744, + "learning_rate": 1.6321831660012746e-05, + "loss": 1.1851, + "step": 4844 + }, + { + "epoch": 0.303780801304157, + "grad_norm": 3.108316421508789, + "learning_rate": 1.632025805481208e-05, + "loss": 1.3658, + "step": 4845 + }, + { + "epoch": 0.30384350115994735, + "grad_norm": 2.832277774810791, + "learning_rate": 1.6318684188963706e-05, + "loss": 1.3498, + "step": 4846 + }, + { + "epoch": 0.3039062010157377, + "grad_norm": 2.8849098682403564, + "learning_rate": 1.631711006253251e-05, + "loss": 1.1565, + "step": 4847 + }, + { + "epoch": 0.303968900871528, + "grad_norm": 2.6783287525177, + "learning_rate": 1.6315535675583425e-05, + "loss": 1.2488, + "step": 4848 + }, + { + "epoch": 0.30403160072731833, + "grad_norm": 2.952735424041748, + "learning_rate": 1.6313961028181372e-05, + "loss": 1.1356, + "step": 4849 + }, + { + "epoch": 0.30409430058310866, + "grad_norm": 2.981649398803711, + "learning_rate": 1.631238612039129e-05, + "loss": 1.1675, + "step": 4850 + }, + { + "epoch": 0.304157000438899, + "grad_norm": 2.8865432739257812, + "learning_rate": 1.631081095227813e-05, + "loss": 1.2032, + "step": 4851 + }, + { + "epoch": 0.3042197002946893, + "grad_norm": 3.1037001609802246, + "learning_rate": 1.6309235523906844e-05, + "loss": 1.058, + "step": 4852 + }, + { + "epoch": 0.30428240015047964, + "grad_norm": 2.9016871452331543, + "learning_rate": 1.6307659835342416e-05, + "loss": 1.0753, + "step": 4853 + }, + { + "epoch": 0.30434510000626996, + "grad_norm": 2.855026960372925, + "learning_rate": 1.6306083886649823e-05, + "loss": 1.0677, + "step": 4854 + }, + { + "epoch": 0.3044077998620603, + "grad_norm": 2.9147582054138184, + "learning_rate": 1.630450767789405e-05, + "loss": 1.1827, + "step": 4855 + }, + { + "epoch": 0.3044704997178507, + "grad_norm": 2.8744752407073975, + "learning_rate": 1.630293120914011e-05, + "loss": 1.0896, + "step": 4856 + }, + { + "epoch": 0.304533199573641, + "grad_norm": 3.342801332473755, + "learning_rate": 1.6301354480453005e-05, + "loss": 1.1236, + "step": 4857 + }, + { + "epoch": 0.3045958994294313, + "grad_norm": 3.1979997158050537, + "learning_rate": 1.6299777491897772e-05, + "loss": 1.007, + "step": 4858 + }, + { + "epoch": 0.30465859928522165, + "grad_norm": 2.8436293601989746, + "learning_rate": 1.6298200243539434e-05, + "loss": 1.165, + "step": 4859 + }, + { + "epoch": 0.304721299141012, + "grad_norm": 3.041818380355835, + "learning_rate": 1.6296622735443047e-05, + "loss": 1.1366, + "step": 4860 + }, + { + "epoch": 0.3047839989968023, + "grad_norm": 3.0087366104125977, + "learning_rate": 1.6295044967673664e-05, + "loss": 1.1271, + "step": 4861 + }, + { + "epoch": 0.30484669885259263, + "grad_norm": 3.149611711502075, + "learning_rate": 1.6293466940296353e-05, + "loss": 1.1117, + "step": 4862 + }, + { + "epoch": 0.30490939870838296, + "grad_norm": 3.2971980571746826, + "learning_rate": 1.6291888653376187e-05, + "loss": 1.1829, + "step": 4863 + }, + { + "epoch": 0.3049720985641733, + "grad_norm": 2.9726357460021973, + "learning_rate": 1.629031010697826e-05, + "loss": 1.162, + "step": 4864 + }, + { + "epoch": 0.3050347984199636, + "grad_norm": 2.853191375732422, + "learning_rate": 1.6288731301167667e-05, + "loss": 1.1691, + "step": 4865 + }, + { + "epoch": 0.30509749827575394, + "grad_norm": 2.9629452228546143, + "learning_rate": 1.6287152236009526e-05, + "loss": 1.1979, + "step": 4866 + }, + { + "epoch": 0.3051601981315443, + "grad_norm": 3.093019962310791, + "learning_rate": 1.628557291156895e-05, + "loss": 1.1122, + "step": 4867 + }, + { + "epoch": 0.30522289798733465, + "grad_norm": 2.8002967834472656, + "learning_rate": 1.6283993327911068e-05, + "loss": 1.1523, + "step": 4868 + }, + { + "epoch": 0.305285597843125, + "grad_norm": 2.855384111404419, + "learning_rate": 1.628241348510103e-05, + "loss": 1.0952, + "step": 4869 + }, + { + "epoch": 0.3053482976989153, + "grad_norm": 2.8964409828186035, + "learning_rate": 1.6280833383203986e-05, + "loss": 1.0524, + "step": 4870 + }, + { + "epoch": 0.30541099755470563, + "grad_norm": 2.9907474517822266, + "learning_rate": 1.6279253022285098e-05, + "loss": 1.1203, + "step": 4871 + }, + { + "epoch": 0.30547369741049596, + "grad_norm": 3.0813019275665283, + "learning_rate": 1.6277672402409543e-05, + "loss": 1.1993, + "step": 4872 + }, + { + "epoch": 0.3055363972662863, + "grad_norm": 2.9283430576324463, + "learning_rate": 1.6276091523642504e-05, + "loss": 1.2597, + "step": 4873 + }, + { + "epoch": 0.3055990971220766, + "grad_norm": 2.7759246826171875, + "learning_rate": 1.6274510386049173e-05, + "loss": 1.0485, + "step": 4874 + }, + { + "epoch": 0.30566179697786694, + "grad_norm": 3.140223741531372, + "learning_rate": 1.6272928989694764e-05, + "loss": 1.2408, + "step": 4875 + }, + { + "epoch": 0.30572449683365727, + "grad_norm": 2.656252861022949, + "learning_rate": 1.6271347334644486e-05, + "loss": 1.1846, + "step": 4876 + }, + { + "epoch": 0.3057871966894476, + "grad_norm": 3.389265537261963, + "learning_rate": 1.6269765420963573e-05, + "loss": 1.2389, + "step": 4877 + }, + { + "epoch": 0.3058498965452379, + "grad_norm": 2.8147199153900146, + "learning_rate": 1.6268183248717257e-05, + "loss": 1.1591, + "step": 4878 + }, + { + "epoch": 0.3059125964010283, + "grad_norm": 3.2950785160064697, + "learning_rate": 1.6266600817970794e-05, + "loss": 1.1574, + "step": 4879 + }, + { + "epoch": 0.30597529625681863, + "grad_norm": 2.807218551635742, + "learning_rate": 1.6265018128789435e-05, + "loss": 0.9713, + "step": 4880 + }, + { + "epoch": 0.30603799611260896, + "grad_norm": 2.8054614067077637, + "learning_rate": 1.6263435181238457e-05, + "loss": 1.1417, + "step": 4881 + }, + { + "epoch": 0.3061006959683993, + "grad_norm": 3.1702017784118652, + "learning_rate": 1.626185197538314e-05, + "loss": 1.1821, + "step": 4882 + }, + { + "epoch": 0.3061633958241896, + "grad_norm": 3.3319950103759766, + "learning_rate": 1.6260268511288772e-05, + "loss": 1.0377, + "step": 4883 + }, + { + "epoch": 0.30622609567997994, + "grad_norm": 3.0928573608398438, + "learning_rate": 1.6258684789020657e-05, + "loss": 0.9643, + "step": 4884 + }, + { + "epoch": 0.30628879553577026, + "grad_norm": 2.9040307998657227, + "learning_rate": 1.625710080864411e-05, + "loss": 1.1875, + "step": 4885 + }, + { + "epoch": 0.3063514953915606, + "grad_norm": 3.121001720428467, + "learning_rate": 1.625551657022445e-05, + "loss": 1.1054, + "step": 4886 + }, + { + "epoch": 0.3064141952473509, + "grad_norm": 3.4327635765075684, + "learning_rate": 1.6253932073827017e-05, + "loss": 0.9173, + "step": 4887 + }, + { + "epoch": 0.30647689510314124, + "grad_norm": 2.7859160900115967, + "learning_rate": 1.6252347319517147e-05, + "loss": 1.1384, + "step": 4888 + }, + { + "epoch": 0.30653959495893157, + "grad_norm": 3.153935194015503, + "learning_rate": 1.6250762307360206e-05, + "loss": 1.3214, + "step": 4889 + }, + { + "epoch": 0.3066022948147219, + "grad_norm": 3.3996222019195557, + "learning_rate": 1.624917703742155e-05, + "loss": 1.145, + "step": 4890 + }, + { + "epoch": 0.3066649946705123, + "grad_norm": 2.969505548477173, + "learning_rate": 1.624759150976656e-05, + "loss": 1.0395, + "step": 4891 + }, + { + "epoch": 0.3067276945263026, + "grad_norm": 2.8815479278564453, + "learning_rate": 1.6246005724460626e-05, + "loss": 1.1069, + "step": 4892 + }, + { + "epoch": 0.30679039438209293, + "grad_norm": 2.7891976833343506, + "learning_rate": 1.6244419681569146e-05, + "loss": 1.176, + "step": 4893 + }, + { + "epoch": 0.30685309423788326, + "grad_norm": 3.0722718238830566, + "learning_rate": 1.624283338115752e-05, + "loss": 1.0103, + "step": 4894 + }, + { + "epoch": 0.3069157940936736, + "grad_norm": 3.351494550704956, + "learning_rate": 1.624124682329118e-05, + "loss": 1.0233, + "step": 4895 + }, + { + "epoch": 0.3069784939494639, + "grad_norm": 3.3040902614593506, + "learning_rate": 1.623966000803554e-05, + "loss": 1.1513, + "step": 4896 + }, + { + "epoch": 0.30704119380525424, + "grad_norm": 3.0759284496307373, + "learning_rate": 1.6238072935456055e-05, + "loss": 1.0473, + "step": 4897 + }, + { + "epoch": 0.30710389366104457, + "grad_norm": 3.0299811363220215, + "learning_rate": 1.6236485605618164e-05, + "loss": 1.2446, + "step": 4898 + }, + { + "epoch": 0.3071665935168349, + "grad_norm": 2.8624072074890137, + "learning_rate": 1.6234898018587336e-05, + "loss": 1.1851, + "step": 4899 + }, + { + "epoch": 0.3072292933726252, + "grad_norm": 3.002704381942749, + "learning_rate": 1.6233310174429044e-05, + "loss": 1.0703, + "step": 4900 + }, + { + "epoch": 0.30729199322841555, + "grad_norm": 3.046074628829956, + "learning_rate": 1.6231722073208766e-05, + "loss": 1.1088, + "step": 4901 + }, + { + "epoch": 0.30735469308420593, + "grad_norm": 3.061746597290039, + "learning_rate": 1.6230133714992e-05, + "loss": 1.291, + "step": 4902 + }, + { + "epoch": 0.30741739293999626, + "grad_norm": 3.26965069770813, + "learning_rate": 1.6228545099844244e-05, + "loss": 1.1131, + "step": 4903 + }, + { + "epoch": 0.3074800927957866, + "grad_norm": 3.0791053771972656, + "learning_rate": 1.6226956227831018e-05, + "loss": 1.1194, + "step": 4904 + }, + { + "epoch": 0.3075427926515769, + "grad_norm": 3.1531808376312256, + "learning_rate": 1.6225367099017845e-05, + "loss": 1.2378, + "step": 4905 + }, + { + "epoch": 0.30760549250736724, + "grad_norm": 2.807182788848877, + "learning_rate": 1.622377771347026e-05, + "loss": 1.2828, + "step": 4906 + }, + { + "epoch": 0.30766819236315757, + "grad_norm": 3.0098211765289307, + "learning_rate": 1.6222188071253813e-05, + "loss": 1.1398, + "step": 4907 + }, + { + "epoch": 0.3077308922189479, + "grad_norm": 2.9467689990997314, + "learning_rate": 1.6220598172434058e-05, + "loss": 1.1688, + "step": 4908 + }, + { + "epoch": 0.3077935920747382, + "grad_norm": 3.170933485031128, + "learning_rate": 1.621900801707656e-05, + "loss": 1.1864, + "step": 4909 + }, + { + "epoch": 0.30785629193052855, + "grad_norm": 3.03954815864563, + "learning_rate": 1.6217417605246903e-05, + "loss": 1.1813, + "step": 4910 + }, + { + "epoch": 0.3079189917863189, + "grad_norm": 2.9502837657928467, + "learning_rate": 1.621582693701067e-05, + "loss": 1.2318, + "step": 4911 + }, + { + "epoch": 0.3079816916421092, + "grad_norm": 3.0553553104400635, + "learning_rate": 1.6214236012433464e-05, + "loss": 1.0112, + "step": 4912 + }, + { + "epoch": 0.30804439149789953, + "grad_norm": 2.9115123748779297, + "learning_rate": 1.6212644831580893e-05, + "loss": 1.0919, + "step": 4913 + }, + { + "epoch": 0.3081070913536899, + "grad_norm": 2.896085500717163, + "learning_rate": 1.6211053394518578e-05, + "loss": 1.2455, + "step": 4914 + }, + { + "epoch": 0.30816979120948024, + "grad_norm": 2.748431444168091, + "learning_rate": 1.6209461701312148e-05, + "loss": 1.189, + "step": 4915 + }, + { + "epoch": 0.30823249106527056, + "grad_norm": 2.892507553100586, + "learning_rate": 1.6207869752027248e-05, + "loss": 1.3313, + "step": 4916 + }, + { + "epoch": 0.3082951909210609, + "grad_norm": 2.9766178131103516, + "learning_rate": 1.6206277546729526e-05, + "loss": 1.2318, + "step": 4917 + }, + { + "epoch": 0.3083578907768512, + "grad_norm": 3.2093098163604736, + "learning_rate": 1.6204685085484652e-05, + "loss": 1.2407, + "step": 4918 + }, + { + "epoch": 0.30842059063264154, + "grad_norm": 2.7790958881378174, + "learning_rate": 1.620309236835829e-05, + "loss": 1.1373, + "step": 4919 + }, + { + "epoch": 0.30848329048843187, + "grad_norm": 2.9302072525024414, + "learning_rate": 1.6201499395416126e-05, + "loss": 1.1189, + "step": 4920 + }, + { + "epoch": 0.3085459903442222, + "grad_norm": 3.078683614730835, + "learning_rate": 1.6199906166723864e-05, + "loss": 1.1586, + "step": 4921 + }, + { + "epoch": 0.3086086902000125, + "grad_norm": 2.9317619800567627, + "learning_rate": 1.6198312682347194e-05, + "loss": 1.1944, + "step": 4922 + }, + { + "epoch": 0.30867139005580285, + "grad_norm": 2.5821332931518555, + "learning_rate": 1.6196718942351838e-05, + "loss": 1.2395, + "step": 4923 + }, + { + "epoch": 0.3087340899115932, + "grad_norm": 3.0418732166290283, + "learning_rate": 1.6195124946803527e-05, + "loss": 1.2289, + "step": 4924 + }, + { + "epoch": 0.30879678976738356, + "grad_norm": 3.111846923828125, + "learning_rate": 1.619353069576799e-05, + "loss": 1.1743, + "step": 4925 + }, + { + "epoch": 0.3088594896231739, + "grad_norm": 3.0934267044067383, + "learning_rate": 1.6191936189310978e-05, + "loss": 1.1861, + "step": 4926 + }, + { + "epoch": 0.3089221894789642, + "grad_norm": 3.0724246501922607, + "learning_rate": 1.6190341427498245e-05, + "loss": 1.0659, + "step": 4927 + }, + { + "epoch": 0.30898488933475454, + "grad_norm": 3.3185932636260986, + "learning_rate": 1.6188746410395563e-05, + "loss": 1.0622, + "step": 4928 + }, + { + "epoch": 0.30904758919054487, + "grad_norm": 3.087401866912842, + "learning_rate": 1.6187151138068707e-05, + "loss": 1.1207, + "step": 4929 + }, + { + "epoch": 0.3091102890463352, + "grad_norm": 2.81744122505188, + "learning_rate": 1.6185555610583466e-05, + "loss": 1.1682, + "step": 4930 + }, + { + "epoch": 0.3091729889021255, + "grad_norm": 2.907259702682495, + "learning_rate": 1.6183959828005647e-05, + "loss": 1.1651, + "step": 4931 + }, + { + "epoch": 0.30923568875791585, + "grad_norm": 2.8483870029449463, + "learning_rate": 1.618236379040105e-05, + "loss": 1.1971, + "step": 4932 + }, + { + "epoch": 0.3092983886137062, + "grad_norm": 3.261059284210205, + "learning_rate": 1.6180767497835503e-05, + "loss": 0.9477, + "step": 4933 + }, + { + "epoch": 0.3093610884694965, + "grad_norm": 2.738413095474243, + "learning_rate": 1.6179170950374833e-05, + "loss": 1.1877, + "step": 4934 + }, + { + "epoch": 0.30942378832528683, + "grad_norm": 2.89949893951416, + "learning_rate": 1.617757414808488e-05, + "loss": 1.1033, + "step": 4935 + }, + { + "epoch": 0.30948648818107716, + "grad_norm": 2.8925693035125732, + "learning_rate": 1.6175977091031508e-05, + "loss": 1.2078, + "step": 4936 + }, + { + "epoch": 0.30954918803686754, + "grad_norm": 2.8217928409576416, + "learning_rate": 1.6174379779280565e-05, + "loss": 1.1979, + "step": 4937 + }, + { + "epoch": 0.30961188789265787, + "grad_norm": 2.9889495372772217, + "learning_rate": 1.617278221289793e-05, + "loss": 1.23, + "step": 4938 + }, + { + "epoch": 0.3096745877484482, + "grad_norm": 2.874648332595825, + "learning_rate": 1.6171184391949492e-05, + "loss": 1.0568, + "step": 4939 + }, + { + "epoch": 0.3097372876042385, + "grad_norm": 2.867734909057617, + "learning_rate": 1.6169586316501135e-05, + "loss": 1.1415, + "step": 4940 + }, + { + "epoch": 0.30979998746002885, + "grad_norm": 2.850741386413574, + "learning_rate": 1.616798798661877e-05, + "loss": 1.2829, + "step": 4941 + }, + { + "epoch": 0.3098626873158192, + "grad_norm": 3.0809149742126465, + "learning_rate": 1.6166389402368312e-05, + "loss": 1.2278, + "step": 4942 + }, + { + "epoch": 0.3099253871716095, + "grad_norm": 3.181795835494995, + "learning_rate": 1.6164790563815685e-05, + "loss": 1.0492, + "step": 4943 + }, + { + "epoch": 0.30998808702739983, + "grad_norm": 3.1695144176483154, + "learning_rate": 1.6163191471026826e-05, + "loss": 1.0838, + "step": 4944 + }, + { + "epoch": 0.31005078688319015, + "grad_norm": 2.661731243133545, + "learning_rate": 1.6161592124067683e-05, + "loss": 1.0704, + "step": 4945 + }, + { + "epoch": 0.3101134867389805, + "grad_norm": 3.1007637977600098, + "learning_rate": 1.615999252300421e-05, + "loss": 1.1678, + "step": 4946 + }, + { + "epoch": 0.3101761865947708, + "grad_norm": 3.2349886894226074, + "learning_rate": 1.6158392667902378e-05, + "loss": 1.3499, + "step": 4947 + }, + { + "epoch": 0.3102388864505612, + "grad_norm": 2.878270149230957, + "learning_rate": 1.615679255882816e-05, + "loss": 1.1217, + "step": 4948 + }, + { + "epoch": 0.3103015863063515, + "grad_norm": 2.7776663303375244, + "learning_rate": 1.6155192195847552e-05, + "loss": 1.1681, + "step": 4949 + }, + { + "epoch": 0.31036428616214184, + "grad_norm": 3.1212029457092285, + "learning_rate": 1.6153591579026545e-05, + "loss": 1.1171, + "step": 4950 + }, + { + "epoch": 0.31042698601793217, + "grad_norm": 2.851928234100342, + "learning_rate": 1.6151990708431157e-05, + "loss": 1.2736, + "step": 4951 + }, + { + "epoch": 0.3104896858737225, + "grad_norm": 2.9722540378570557, + "learning_rate": 1.6150389584127402e-05, + "loss": 1.0698, + "step": 4952 + }, + { + "epoch": 0.3105523857295128, + "grad_norm": 3.2928242683410645, + "learning_rate": 1.614878820618131e-05, + "loss": 1.2307, + "step": 4953 + }, + { + "epoch": 0.31061508558530315, + "grad_norm": 2.8707759380340576, + "learning_rate": 1.6147186574658924e-05, + "loss": 1.0893, + "step": 4954 + }, + { + "epoch": 0.3106777854410935, + "grad_norm": 3.0735156536102295, + "learning_rate": 1.6145584689626297e-05, + "loss": 1.2016, + "step": 4955 + }, + { + "epoch": 0.3107404852968838, + "grad_norm": 3.37278413772583, + "learning_rate": 1.614398255114949e-05, + "loss": 1.2199, + "step": 4956 + }, + { + "epoch": 0.31080318515267413, + "grad_norm": 2.842963218688965, + "learning_rate": 1.6142380159294573e-05, + "loss": 1.0765, + "step": 4957 + }, + { + "epoch": 0.31086588500846446, + "grad_norm": 2.8192484378814697, + "learning_rate": 1.6140777514127627e-05, + "loss": 1.1762, + "step": 4958 + }, + { + "epoch": 0.3109285848642548, + "grad_norm": 2.9486501216888428, + "learning_rate": 1.6139174615714753e-05, + "loss": 1.1793, + "step": 4959 + }, + { + "epoch": 0.31099128472004517, + "grad_norm": 2.8240346908569336, + "learning_rate": 1.6137571464122047e-05, + "loss": 1.0572, + "step": 4960 + }, + { + "epoch": 0.3110539845758355, + "grad_norm": 2.712554931640625, + "learning_rate": 1.6135968059415626e-05, + "loss": 1.1209, + "step": 4961 + }, + { + "epoch": 0.3111166844316258, + "grad_norm": 3.3011395931243896, + "learning_rate": 1.6134364401661615e-05, + "loss": 1.0791, + "step": 4962 + }, + { + "epoch": 0.31117938428741615, + "grad_norm": 2.9248104095458984, + "learning_rate": 1.613276049092615e-05, + "loss": 1.1281, + "step": 4963 + }, + { + "epoch": 0.3112420841432065, + "grad_norm": 3.384871244430542, + "learning_rate": 1.6131156327275372e-05, + "loss": 1.1923, + "step": 4964 + }, + { + "epoch": 0.3113047839989968, + "grad_norm": 3.4363977909088135, + "learning_rate": 1.6129551910775437e-05, + "loss": 0.8858, + "step": 4965 + }, + { + "epoch": 0.31136748385478713, + "grad_norm": 2.8586087226867676, + "learning_rate": 1.612794724149252e-05, + "loss": 1.111, + "step": 4966 + }, + { + "epoch": 0.31143018371057746, + "grad_norm": 2.97851824760437, + "learning_rate": 1.6126342319492784e-05, + "loss": 1.2179, + "step": 4967 + }, + { + "epoch": 0.3114928835663678, + "grad_norm": 3.080761432647705, + "learning_rate": 1.6124737144842427e-05, + "loss": 1.146, + "step": 4968 + }, + { + "epoch": 0.3115555834221581, + "grad_norm": 2.9080801010131836, + "learning_rate": 1.6123131717607644e-05, + "loss": 1.1721, + "step": 4969 + }, + { + "epoch": 0.31161828327794844, + "grad_norm": 3.043828010559082, + "learning_rate": 1.612152603785464e-05, + "loss": 1.1436, + "step": 4970 + }, + { + "epoch": 0.31168098313373876, + "grad_norm": 3.3948814868927, + "learning_rate": 1.6119920105649636e-05, + "loss": 1.0121, + "step": 4971 + }, + { + "epoch": 0.31174368298952915, + "grad_norm": 2.975037097930908, + "learning_rate": 1.6118313921058856e-05, + "loss": 1.0716, + "step": 4972 + }, + { + "epoch": 0.3118063828453195, + "grad_norm": 2.9316627979278564, + "learning_rate": 1.611670748414855e-05, + "loss": 1.2736, + "step": 4973 + }, + { + "epoch": 0.3118690827011098, + "grad_norm": 2.9211714267730713, + "learning_rate": 1.6115100794984952e-05, + "loss": 1.0143, + "step": 4974 + }, + { + "epoch": 0.31193178255690013, + "grad_norm": 3.2120399475097656, + "learning_rate": 1.6113493853634334e-05, + "loss": 0.9841, + "step": 4975 + }, + { + "epoch": 0.31199448241269045, + "grad_norm": 3.091036081314087, + "learning_rate": 1.611188666016296e-05, + "loss": 1.1538, + "step": 4976 + }, + { + "epoch": 0.3120571822684808, + "grad_norm": 2.821974992752075, + "learning_rate": 1.6110279214637115e-05, + "loss": 1.0037, + "step": 4977 + }, + { + "epoch": 0.3121198821242711, + "grad_norm": 3.225315570831299, + "learning_rate": 1.610867151712309e-05, + "loss": 1.0974, + "step": 4978 + }, + { + "epoch": 0.31218258198006144, + "grad_norm": 2.829653263092041, + "learning_rate": 1.6107063567687183e-05, + "loss": 1.1596, + "step": 4979 + }, + { + "epoch": 0.31224528183585176, + "grad_norm": 3.359950065612793, + "learning_rate": 1.610545536639571e-05, + "loss": 1.0937, + "step": 4980 + }, + { + "epoch": 0.3123079816916421, + "grad_norm": 2.956684112548828, + "learning_rate": 1.610384691331499e-05, + "loss": 1.1923, + "step": 4981 + }, + { + "epoch": 0.3123706815474324, + "grad_norm": 2.9347245693206787, + "learning_rate": 1.6102238208511357e-05, + "loss": 1.0026, + "step": 4982 + }, + { + "epoch": 0.3124333814032228, + "grad_norm": 3.421922206878662, + "learning_rate": 1.610062925205115e-05, + "loss": 1.2472, + "step": 4983 + }, + { + "epoch": 0.3124960812590131, + "grad_norm": 2.584181308746338, + "learning_rate": 1.609902004400073e-05, + "loss": 1.2076, + "step": 4984 + }, + { + "epoch": 0.31255878111480345, + "grad_norm": 2.859154224395752, + "learning_rate": 1.6097410584426455e-05, + "loss": 1.1229, + "step": 4985 + }, + { + "epoch": 0.3126214809705938, + "grad_norm": 3.109792470932007, + "learning_rate": 1.6095800873394702e-05, + "loss": 1.2233, + "step": 4986 + }, + { + "epoch": 0.3126841808263841, + "grad_norm": 2.652130126953125, + "learning_rate": 1.6094190910971855e-05, + "loss": 1.2, + "step": 4987 + }, + { + "epoch": 0.31274688068217443, + "grad_norm": 3.1081461906433105, + "learning_rate": 1.609258069722431e-05, + "loss": 0.9686, + "step": 4988 + }, + { + "epoch": 0.31280958053796476, + "grad_norm": 2.919717311859131, + "learning_rate": 1.609097023221847e-05, + "loss": 1.1062, + "step": 4989 + }, + { + "epoch": 0.3128722803937551, + "grad_norm": 3.117673873901367, + "learning_rate": 1.6089359516020752e-05, + "loss": 1.1477, + "step": 4990 + }, + { + "epoch": 0.3129349802495454, + "grad_norm": 3.0663552284240723, + "learning_rate": 1.6087748548697582e-05, + "loss": 1.2685, + "step": 4991 + }, + { + "epoch": 0.31299768010533574, + "grad_norm": 3.2102372646331787, + "learning_rate": 1.608613733031539e-05, + "loss": 1.2888, + "step": 4992 + }, + { + "epoch": 0.31306037996112607, + "grad_norm": 2.960695266723633, + "learning_rate": 1.608452586094064e-05, + "loss": 1.0005, + "step": 4993 + }, + { + "epoch": 0.3131230798169164, + "grad_norm": 3.0284006595611572, + "learning_rate": 1.6082914140639768e-05, + "loss": 1.1743, + "step": 4994 + }, + { + "epoch": 0.3131857796727068, + "grad_norm": 3.2042999267578125, + "learning_rate": 1.6081302169479255e-05, + "loss": 1.2447, + "step": 4995 + }, + { + "epoch": 0.3132484795284971, + "grad_norm": 3.2135818004608154, + "learning_rate": 1.6079689947525574e-05, + "loss": 1.1103, + "step": 4996 + }, + { + "epoch": 0.31331117938428743, + "grad_norm": 3.146771192550659, + "learning_rate": 1.6078077474845215e-05, + "loss": 1.1906, + "step": 4997 + }, + { + "epoch": 0.31337387924007776, + "grad_norm": 2.831664562225342, + "learning_rate": 1.6076464751504676e-05, + "loss": 1.1815, + "step": 4998 + }, + { + "epoch": 0.3134365790958681, + "grad_norm": 2.9566378593444824, + "learning_rate": 1.6074851777570462e-05, + "loss": 1.4534, + "step": 4999 + }, + { + "epoch": 0.3134992789516584, + "grad_norm": 2.6972241401672363, + "learning_rate": 1.60732385531091e-05, + "loss": 1.1131, + "step": 5000 + }, + { + "epoch": 0.3134992789516584, + "eval_loss": 1.1579830646514893, + "eval_runtime": 144.11, + "eval_samples_per_second": 4.372, + "eval_steps_per_second": 1.096, + "step": 5000 + }, + { + "epoch": 0.31356197880744874, + "grad_norm": 3.158693790435791, + "learning_rate": 1.6071625078187113e-05, + "loss": 1.0801, + "step": 5001 + }, + { + "epoch": 0.31362467866323906, + "grad_norm": 3.1079938411712646, + "learning_rate": 1.6070011352871043e-05, + "loss": 1.0434, + "step": 5002 + }, + { + "epoch": 0.3136873785190294, + "grad_norm": 3.1619811058044434, + "learning_rate": 1.606839737722744e-05, + "loss": 1.1785, + "step": 5003 + }, + { + "epoch": 0.3137500783748197, + "grad_norm": 3.2835824489593506, + "learning_rate": 1.6066783151322863e-05, + "loss": 1.1385, + "step": 5004 + }, + { + "epoch": 0.31381277823061005, + "grad_norm": 2.889157772064209, + "learning_rate": 1.6065168675223883e-05, + "loss": 1.2221, + "step": 5005 + }, + { + "epoch": 0.31387547808640043, + "grad_norm": 2.7740321159362793, + "learning_rate": 1.6063553948997084e-05, + "loss": 1.166, + "step": 5006 + }, + { + "epoch": 0.31393817794219075, + "grad_norm": 2.991572380065918, + "learning_rate": 1.6061938972709055e-05, + "loss": 1.12, + "step": 5007 + }, + { + "epoch": 0.3140008777979811, + "grad_norm": 3.022333860397339, + "learning_rate": 1.6060323746426398e-05, + "loss": 1.0059, + "step": 5008 + }, + { + "epoch": 0.3140635776537714, + "grad_norm": 2.9256808757781982, + "learning_rate": 1.6058708270215722e-05, + "loss": 1.2115, + "step": 5009 + }, + { + "epoch": 0.31412627750956174, + "grad_norm": 2.989197254180908, + "learning_rate": 1.6057092544143656e-05, + "loss": 1.1122, + "step": 5010 + }, + { + "epoch": 0.31418897736535206, + "grad_norm": 2.691293716430664, + "learning_rate": 1.605547656827683e-05, + "loss": 1.3609, + "step": 5011 + }, + { + "epoch": 0.3142516772211424, + "grad_norm": 2.940919876098633, + "learning_rate": 1.6053860342681883e-05, + "loss": 1.071, + "step": 5012 + }, + { + "epoch": 0.3143143770769327, + "grad_norm": 3.143937826156616, + "learning_rate": 1.605224386742547e-05, + "loss": 1.1091, + "step": 5013 + }, + { + "epoch": 0.31437707693272304, + "grad_norm": 3.079259157180786, + "learning_rate": 1.605062714257426e-05, + "loss": 1.1864, + "step": 5014 + }, + { + "epoch": 0.31443977678851337, + "grad_norm": 3.085209369659424, + "learning_rate": 1.604901016819492e-05, + "loss": 1.1279, + "step": 5015 + }, + { + "epoch": 0.3145024766443037, + "grad_norm": 3.4791316986083984, + "learning_rate": 1.6047392944354136e-05, + "loss": 1.145, + "step": 5016 + }, + { + "epoch": 0.314565176500094, + "grad_norm": 3.39395809173584, + "learning_rate": 1.6045775471118603e-05, + "loss": 1.1269, + "step": 5017 + }, + { + "epoch": 0.3146278763558844, + "grad_norm": 3.1508147716522217, + "learning_rate": 1.6044157748555024e-05, + "loss": 1.2651, + "step": 5018 + }, + { + "epoch": 0.31469057621167473, + "grad_norm": 3.0969855785369873, + "learning_rate": 1.6042539776730118e-05, + "loss": 1.0399, + "step": 5019 + }, + { + "epoch": 0.31475327606746506, + "grad_norm": 2.74198842048645, + "learning_rate": 1.6040921555710607e-05, + "loss": 1.0433, + "step": 5020 + }, + { + "epoch": 0.3148159759232554, + "grad_norm": 3.4178571701049805, + "learning_rate": 1.603930308556323e-05, + "loss": 1.187, + "step": 5021 + }, + { + "epoch": 0.3148786757790457, + "grad_norm": 3.158886194229126, + "learning_rate": 1.6037684366354723e-05, + "loss": 1.1176, + "step": 5022 + }, + { + "epoch": 0.31494137563483604, + "grad_norm": 3.2432546615600586, + "learning_rate": 1.6036065398151857e-05, + "loss": 1.1382, + "step": 5023 + }, + { + "epoch": 0.31500407549062637, + "grad_norm": 3.305644989013672, + "learning_rate": 1.6034446181021386e-05, + "loss": 1.1056, + "step": 5024 + }, + { + "epoch": 0.3150667753464167, + "grad_norm": 2.680689811706543, + "learning_rate": 1.603282671503009e-05, + "loss": 1.1387, + "step": 5025 + }, + { + "epoch": 0.315129475202207, + "grad_norm": 2.8218297958374023, + "learning_rate": 1.603120700024476e-05, + "loss": 1.1359, + "step": 5026 + }, + { + "epoch": 0.31519217505799735, + "grad_norm": 3.090026617050171, + "learning_rate": 1.6029587036732188e-05, + "loss": 1.1634, + "step": 5027 + }, + { + "epoch": 0.3152548749137877, + "grad_norm": 2.8817734718322754, + "learning_rate": 1.6027966824559185e-05, + "loss": 1.1851, + "step": 5028 + }, + { + "epoch": 0.31531757476957806, + "grad_norm": 3.0856668949127197, + "learning_rate": 1.6026346363792565e-05, + "loss": 1.2074, + "step": 5029 + }, + { + "epoch": 0.3153802746253684, + "grad_norm": 2.873854398727417, + "learning_rate": 1.602472565449916e-05, + "loss": 1.1031, + "step": 5030 + }, + { + "epoch": 0.3154429744811587, + "grad_norm": 3.095099687576294, + "learning_rate": 1.6023104696745802e-05, + "loss": 1.173, + "step": 5031 + }, + { + "epoch": 0.31550567433694904, + "grad_norm": 3.163376808166504, + "learning_rate": 1.6021483490599344e-05, + "loss": 1.0332, + "step": 5032 + }, + { + "epoch": 0.31556837419273936, + "grad_norm": 2.779737949371338, + "learning_rate": 1.6019862036126648e-05, + "loss": 1.1696, + "step": 5033 + }, + { + "epoch": 0.3156310740485297, + "grad_norm": 2.7977724075317383, + "learning_rate": 1.601824033339458e-05, + "loss": 1.1085, + "step": 5034 + }, + { + "epoch": 0.31569377390432, + "grad_norm": 2.8046486377716064, + "learning_rate": 1.6016618382470014e-05, + "loss": 1.139, + "step": 5035 + }, + { + "epoch": 0.31575647376011035, + "grad_norm": 2.9239931106567383, + "learning_rate": 1.601499618341984e-05, + "loss": 1.0714, + "step": 5036 + }, + { + "epoch": 0.3158191736159007, + "grad_norm": 2.978647470474243, + "learning_rate": 1.6013373736310963e-05, + "loss": 1.1038, + "step": 5037 + }, + { + "epoch": 0.315881873471691, + "grad_norm": 3.0368053913116455, + "learning_rate": 1.601175104121029e-05, + "loss": 1.0997, + "step": 5038 + }, + { + "epoch": 0.3159445733274813, + "grad_norm": 2.7649238109588623, + "learning_rate": 1.6010128098184747e-05, + "loss": 1.115, + "step": 5039 + }, + { + "epoch": 0.31600727318327165, + "grad_norm": 2.953903913497925, + "learning_rate": 1.6008504907301254e-05, + "loss": 1.2639, + "step": 5040 + }, + { + "epoch": 0.31606997303906204, + "grad_norm": 2.868938446044922, + "learning_rate": 1.6006881468626757e-05, + "loss": 1.2225, + "step": 5041 + }, + { + "epoch": 0.31613267289485236, + "grad_norm": 3.2385525703430176, + "learning_rate": 1.6005257782228208e-05, + "loss": 1.0313, + "step": 5042 + }, + { + "epoch": 0.3161953727506427, + "grad_norm": 2.808997631072998, + "learning_rate": 1.6003633848172563e-05, + "loss": 1.1374, + "step": 5043 + }, + { + "epoch": 0.316258072606433, + "grad_norm": 2.9084064960479736, + "learning_rate": 1.6002009666526796e-05, + "loss": 1.1951, + "step": 5044 + }, + { + "epoch": 0.31632077246222334, + "grad_norm": 3.1836230754852295, + "learning_rate": 1.6000385237357886e-05, + "loss": 1.1384, + "step": 5045 + }, + { + "epoch": 0.31638347231801367, + "grad_norm": 2.9289731979370117, + "learning_rate": 1.599876056073283e-05, + "loss": 1.1965, + "step": 5046 + }, + { + "epoch": 0.316446172173804, + "grad_norm": 3.0272512435913086, + "learning_rate": 1.599713563671863e-05, + "loss": 1.0203, + "step": 5047 + }, + { + "epoch": 0.3165088720295943, + "grad_norm": 2.8654277324676514, + "learning_rate": 1.599551046538229e-05, + "loss": 1.2214, + "step": 5048 + }, + { + "epoch": 0.31657157188538465, + "grad_norm": 3.1719329357147217, + "learning_rate": 1.5993885046790837e-05, + "loss": 1.018, + "step": 5049 + }, + { + "epoch": 0.316634271741175, + "grad_norm": 3.0811996459960938, + "learning_rate": 1.5992259381011304e-05, + "loss": 1.0869, + "step": 5050 + }, + { + "epoch": 0.3166969715969653, + "grad_norm": 2.984362840652466, + "learning_rate": 1.599063346811073e-05, + "loss": 1.2778, + "step": 5051 + }, + { + "epoch": 0.31675967145275563, + "grad_norm": 2.878838300704956, + "learning_rate": 1.598900730815617e-05, + "loss": 1.1582, + "step": 5052 + }, + { + "epoch": 0.316822371308546, + "grad_norm": 2.9894232749938965, + "learning_rate": 1.598738090121469e-05, + "loss": 1.0785, + "step": 5053 + }, + { + "epoch": 0.31688507116433634, + "grad_norm": 3.045478582382202, + "learning_rate": 1.598575424735336e-05, + "loss": 1.2963, + "step": 5054 + }, + { + "epoch": 0.31694777102012667, + "grad_norm": 2.760641098022461, + "learning_rate": 1.5984127346639264e-05, + "loss": 1.1624, + "step": 5055 + }, + { + "epoch": 0.317010470875917, + "grad_norm": 2.699169874191284, + "learning_rate": 1.5982500199139494e-05, + "loss": 1.097, + "step": 5056 + }, + { + "epoch": 0.3170731707317073, + "grad_norm": 2.743464708328247, + "learning_rate": 1.598087280492115e-05, + "loss": 1.1369, + "step": 5057 + }, + { + "epoch": 0.31713587058749765, + "grad_norm": 2.7062721252441406, + "learning_rate": 1.5979245164051357e-05, + "loss": 1.0168, + "step": 5058 + }, + { + "epoch": 0.317198570443288, + "grad_norm": 2.898089647293091, + "learning_rate": 1.5977617276597227e-05, + "loss": 1.0975, + "step": 5059 + }, + { + "epoch": 0.3172612702990783, + "grad_norm": 3.2721474170684814, + "learning_rate": 1.5975989142625902e-05, + "loss": 1.2078, + "step": 5060 + }, + { + "epoch": 0.31732397015486863, + "grad_norm": 2.6284737586975098, + "learning_rate": 1.5974360762204527e-05, + "loss": 1.1372, + "step": 5061 + }, + { + "epoch": 0.31738667001065896, + "grad_norm": 2.91690993309021, + "learning_rate": 1.5972732135400247e-05, + "loss": 1.2127, + "step": 5062 + }, + { + "epoch": 0.3174493698664493, + "grad_norm": 2.8012051582336426, + "learning_rate": 1.5971103262280237e-05, + "loss": 1.0556, + "step": 5063 + }, + { + "epoch": 0.31751206972223966, + "grad_norm": 3.091566324234009, + "learning_rate": 1.596947414291167e-05, + "loss": 1.1616, + "step": 5064 + }, + { + "epoch": 0.31757476957803, + "grad_norm": 2.6396780014038086, + "learning_rate": 1.5967844777361723e-05, + "loss": 1.1467, + "step": 5065 + }, + { + "epoch": 0.3176374694338203, + "grad_norm": 2.9715566635131836, + "learning_rate": 1.59662151656976e-05, + "loss": 1.0712, + "step": 5066 + }, + { + "epoch": 0.31770016928961065, + "grad_norm": 3.270676612854004, + "learning_rate": 1.59645853079865e-05, + "loss": 0.9596, + "step": 5067 + }, + { + "epoch": 0.317762869145401, + "grad_norm": 2.9510791301727295, + "learning_rate": 1.5962955204295646e-05, + "loss": 1.0722, + "step": 5068 + }, + { + "epoch": 0.3178255690011913, + "grad_norm": 3.044959306716919, + "learning_rate": 1.5961324854692254e-05, + "loss": 0.99, + "step": 5069 + }, + { + "epoch": 0.3178882688569816, + "grad_norm": 3.1632893085479736, + "learning_rate": 1.5959694259243567e-05, + "loss": 1.0555, + "step": 5070 + }, + { + "epoch": 0.31795096871277195, + "grad_norm": 2.964622974395752, + "learning_rate": 1.5958063418016832e-05, + "loss": 0.9966, + "step": 5071 + }, + { + "epoch": 0.3180136685685623, + "grad_norm": 2.9846444129943848, + "learning_rate": 1.5956432331079293e-05, + "loss": 1.0243, + "step": 5072 + }, + { + "epoch": 0.3180763684243526, + "grad_norm": 2.9187355041503906, + "learning_rate": 1.5954800998498236e-05, + "loss": 1.2102, + "step": 5073 + }, + { + "epoch": 0.31813906828014293, + "grad_norm": 3.1813251972198486, + "learning_rate": 1.5953169420340917e-05, + "loss": 1.1691, + "step": 5074 + }, + { + "epoch": 0.31820176813593326, + "grad_norm": 3.1699817180633545, + "learning_rate": 1.5951537596674632e-05, + "loss": 1.2742, + "step": 5075 + }, + { + "epoch": 0.31826446799172364, + "grad_norm": 3.5720021724700928, + "learning_rate": 1.5949905527566677e-05, + "loss": 1.3082, + "step": 5076 + }, + { + "epoch": 0.31832716784751397, + "grad_norm": 3.061305284500122, + "learning_rate": 1.5948273213084362e-05, + "loss": 0.998, + "step": 5077 + }, + { + "epoch": 0.3183898677033043, + "grad_norm": 2.95068359375, + "learning_rate": 1.5946640653294994e-05, + "loss": 1.108, + "step": 5078 + }, + { + "epoch": 0.3184525675590946, + "grad_norm": 3.198637008666992, + "learning_rate": 1.5945007848265912e-05, + "loss": 0.9356, + "step": 5079 + }, + { + "epoch": 0.31851526741488495, + "grad_norm": 2.5374443531036377, + "learning_rate": 1.5943374798064444e-05, + "loss": 1.2628, + "step": 5080 + }, + { + "epoch": 0.3185779672706753, + "grad_norm": 3.1249120235443115, + "learning_rate": 1.594174150275794e-05, + "loss": 1.0673, + "step": 5081 + }, + { + "epoch": 0.3186406671264656, + "grad_norm": 2.7282814979553223, + "learning_rate": 1.5940107962413755e-05, + "loss": 1.0791, + "step": 5082 + }, + { + "epoch": 0.31870336698225593, + "grad_norm": 3.075327157974243, + "learning_rate": 1.593847417709926e-05, + "loss": 1.1627, + "step": 5083 + }, + { + "epoch": 0.31876606683804626, + "grad_norm": 3.0918586254119873, + "learning_rate": 1.593684014688183e-05, + "loss": 1.1107, + "step": 5084 + }, + { + "epoch": 0.3188287666938366, + "grad_norm": 2.619288444519043, + "learning_rate": 1.5935205871828854e-05, + "loss": 1.2731, + "step": 5085 + }, + { + "epoch": 0.3188914665496269, + "grad_norm": 2.6460278034210205, + "learning_rate": 1.593357135200773e-05, + "loss": 1.2238, + "step": 5086 + }, + { + "epoch": 0.3189541664054173, + "grad_norm": 2.8451788425445557, + "learning_rate": 1.5931936587485865e-05, + "loss": 1.1449, + "step": 5087 + }, + { + "epoch": 0.3190168662612076, + "grad_norm": 2.7147092819213867, + "learning_rate": 1.593030157833067e-05, + "loss": 1.1832, + "step": 5088 + }, + { + "epoch": 0.31907956611699795, + "grad_norm": 3.295731544494629, + "learning_rate": 1.5928666324609584e-05, + "loss": 1.0744, + "step": 5089 + }, + { + "epoch": 0.3191422659727883, + "grad_norm": 3.1202499866485596, + "learning_rate": 1.5927030826390037e-05, + "loss": 1.2531, + "step": 5090 + }, + { + "epoch": 0.3192049658285786, + "grad_norm": 2.895843505859375, + "learning_rate": 1.592539508373948e-05, + "loss": 1.2081, + "step": 5091 + }, + { + "epoch": 0.31926766568436893, + "grad_norm": 3.003322124481201, + "learning_rate": 1.592375909672537e-05, + "loss": 1.0074, + "step": 5092 + }, + { + "epoch": 0.31933036554015926, + "grad_norm": 2.832320213317871, + "learning_rate": 1.5922122865415183e-05, + "loss": 1.1436, + "step": 5093 + }, + { + "epoch": 0.3193930653959496, + "grad_norm": 3.1914355754852295, + "learning_rate": 1.5920486389876383e-05, + "loss": 1.1147, + "step": 5094 + }, + { + "epoch": 0.3194557652517399, + "grad_norm": 3.1552178859710693, + "learning_rate": 1.591884967017647e-05, + "loss": 1.0119, + "step": 5095 + }, + { + "epoch": 0.31951846510753024, + "grad_norm": 2.8781309127807617, + "learning_rate": 1.5917212706382937e-05, + "loss": 1.1639, + "step": 5096 + }, + { + "epoch": 0.31958116496332056, + "grad_norm": 2.992565155029297, + "learning_rate": 1.5915575498563293e-05, + "loss": 1.2038, + "step": 5097 + }, + { + "epoch": 0.3196438648191109, + "grad_norm": 3.4123125076293945, + "learning_rate": 1.591393804678506e-05, + "loss": 1.1011, + "step": 5098 + }, + { + "epoch": 0.3197065646749013, + "grad_norm": 3.4050934314727783, + "learning_rate": 1.591230035111576e-05, + "loss": 1.1732, + "step": 5099 + }, + { + "epoch": 0.3197692645306916, + "grad_norm": 3.2216885089874268, + "learning_rate": 1.5910662411622934e-05, + "loss": 1.2083, + "step": 5100 + }, + { + "epoch": 0.3198319643864819, + "grad_norm": 3.387206792831421, + "learning_rate": 1.5909024228374135e-05, + "loss": 1.1775, + "step": 5101 + }, + { + "epoch": 0.31989466424227225, + "grad_norm": 3.2127606868743896, + "learning_rate": 1.5907385801436918e-05, + "loss": 1.2001, + "step": 5102 + }, + { + "epoch": 0.3199573640980626, + "grad_norm": 2.7369210720062256, + "learning_rate": 1.5905747130878853e-05, + "loss": 1.1869, + "step": 5103 + }, + { + "epoch": 0.3200200639538529, + "grad_norm": 2.744629144668579, + "learning_rate": 1.5904108216767516e-05, + "loss": 1.1823, + "step": 5104 + }, + { + "epoch": 0.32008276380964323, + "grad_norm": 2.897364854812622, + "learning_rate": 1.5902469059170504e-05, + "loss": 1.2296, + "step": 5105 + }, + { + "epoch": 0.32014546366543356, + "grad_norm": 3.1060502529144287, + "learning_rate": 1.5900829658155406e-05, + "loss": 1.2985, + "step": 5106 + }, + { + "epoch": 0.3202081635212239, + "grad_norm": 2.976170063018799, + "learning_rate": 1.5899190013789833e-05, + "loss": 1.0886, + "step": 5107 + }, + { + "epoch": 0.3202708633770142, + "grad_norm": 2.77752947807312, + "learning_rate": 1.589755012614141e-05, + "loss": 1.1941, + "step": 5108 + }, + { + "epoch": 0.32033356323280454, + "grad_norm": 3.1635067462921143, + "learning_rate": 1.589590999527776e-05, + "loss": 1.2177, + "step": 5109 + }, + { + "epoch": 0.3203962630885949, + "grad_norm": 3.0345280170440674, + "learning_rate": 1.5894269621266527e-05, + "loss": 1.1522, + "step": 5110 + }, + { + "epoch": 0.32045896294438525, + "grad_norm": 3.3947770595550537, + "learning_rate": 1.5892629004175358e-05, + "loss": 1.1439, + "step": 5111 + }, + { + "epoch": 0.3205216628001756, + "grad_norm": 2.9579837322235107, + "learning_rate": 1.589098814407191e-05, + "loss": 1.1135, + "step": 5112 + }, + { + "epoch": 0.3205843626559659, + "grad_norm": 3.002920627593994, + "learning_rate": 1.588934704102385e-05, + "loss": 1.1343, + "step": 5113 + }, + { + "epoch": 0.32064706251175623, + "grad_norm": 3.212357521057129, + "learning_rate": 1.5887705695098866e-05, + "loss": 1.1458, + "step": 5114 + }, + { + "epoch": 0.32070976236754656, + "grad_norm": 2.779210329055786, + "learning_rate": 1.588606410636464e-05, + "loss": 1.3164, + "step": 5115 + }, + { + "epoch": 0.3207724622233369, + "grad_norm": 2.90960431098938, + "learning_rate": 1.5884422274888873e-05, + "loss": 1.1548, + "step": 5116 + }, + { + "epoch": 0.3208351620791272, + "grad_norm": 3.1068758964538574, + "learning_rate": 1.5882780200739275e-05, + "loss": 1.1294, + "step": 5117 + }, + { + "epoch": 0.32089786193491754, + "grad_norm": 3.0046498775482178, + "learning_rate": 1.5881137883983564e-05, + "loss": 1.2677, + "step": 5118 + }, + { + "epoch": 0.32096056179070787, + "grad_norm": 2.858140468597412, + "learning_rate": 1.587949532468947e-05, + "loss": 1.0769, + "step": 5119 + }, + { + "epoch": 0.3210232616464982, + "grad_norm": 3.3454928398132324, + "learning_rate": 1.5877852522924733e-05, + "loss": 1.2745, + "step": 5120 + }, + { + "epoch": 0.3210859615022885, + "grad_norm": 3.0327565670013428, + "learning_rate": 1.58762094787571e-05, + "loss": 0.9778, + "step": 5121 + }, + { + "epoch": 0.3211486613580789, + "grad_norm": 3.0320372581481934, + "learning_rate": 1.5874566192254333e-05, + "loss": 1.0103, + "step": 5122 + }, + { + "epoch": 0.32121136121386923, + "grad_norm": 2.9764468669891357, + "learning_rate": 1.58729226634842e-05, + "loss": 1.1144, + "step": 5123 + }, + { + "epoch": 0.32127406106965956, + "grad_norm": 3.39500093460083, + "learning_rate": 1.5871278892514478e-05, + "loss": 1.2132, + "step": 5124 + }, + { + "epoch": 0.3213367609254499, + "grad_norm": 3.270872116088867, + "learning_rate": 1.5869634879412958e-05, + "loss": 1.1869, + "step": 5125 + }, + { + "epoch": 0.3213994607812402, + "grad_norm": 3.1153359413146973, + "learning_rate": 1.586799062424744e-05, + "loss": 1.0761, + "step": 5126 + }, + { + "epoch": 0.32146216063703054, + "grad_norm": 3.2500648498535156, + "learning_rate": 1.5866346127085733e-05, + "loss": 1.0615, + "step": 5127 + }, + { + "epoch": 0.32152486049282086, + "grad_norm": 2.859417676925659, + "learning_rate": 1.5864701387995656e-05, + "loss": 1.2244, + "step": 5128 + }, + { + "epoch": 0.3215875603486112, + "grad_norm": 3.171053171157837, + "learning_rate": 1.5863056407045034e-05, + "loss": 1.2117, + "step": 5129 + }, + { + "epoch": 0.3216502602044015, + "grad_norm": 2.900256395339966, + "learning_rate": 1.586141118430171e-05, + "loss": 1.1139, + "step": 5130 + }, + { + "epoch": 0.32171296006019184, + "grad_norm": 3.30643892288208, + "learning_rate": 1.5859765719833536e-05, + "loss": 1.0283, + "step": 5131 + }, + { + "epoch": 0.32177565991598217, + "grad_norm": 2.9698262214660645, + "learning_rate": 1.5858120013708366e-05, + "loss": 1.1295, + "step": 5132 + }, + { + "epoch": 0.3218383597717725, + "grad_norm": 2.49753475189209, + "learning_rate": 1.5856474065994074e-05, + "loss": 1.2421, + "step": 5133 + }, + { + "epoch": 0.3219010596275629, + "grad_norm": 3.1440980434417725, + "learning_rate": 1.5854827876758535e-05, + "loss": 1.1797, + "step": 5134 + }, + { + "epoch": 0.3219637594833532, + "grad_norm": 2.6213345527648926, + "learning_rate": 1.5853181446069635e-05, + "loss": 1.0895, + "step": 5135 + }, + { + "epoch": 0.32202645933914353, + "grad_norm": 3.2108888626098633, + "learning_rate": 1.585153477399528e-05, + "loss": 1.2557, + "step": 5136 + }, + { + "epoch": 0.32208915919493386, + "grad_norm": 3.044926166534424, + "learning_rate": 1.5849887860603374e-05, + "loss": 1.1289, + "step": 5137 + }, + { + "epoch": 0.3221518590507242, + "grad_norm": 3.4364631175994873, + "learning_rate": 1.5848240705961837e-05, + "loss": 1.245, + "step": 5138 + }, + { + "epoch": 0.3222145589065145, + "grad_norm": 3.0131235122680664, + "learning_rate": 1.58465933101386e-05, + "loss": 1.0092, + "step": 5139 + }, + { + "epoch": 0.32227725876230484, + "grad_norm": 3.110863447189331, + "learning_rate": 1.5844945673201594e-05, + "loss": 1.1454, + "step": 5140 + }, + { + "epoch": 0.32233995861809517, + "grad_norm": 2.8795058727264404, + "learning_rate": 1.5843297795218776e-05, + "loss": 1.0374, + "step": 5141 + }, + { + "epoch": 0.3224026584738855, + "grad_norm": 3.0130820274353027, + "learning_rate": 1.5841649676258106e-05, + "loss": 0.9554, + "step": 5142 + }, + { + "epoch": 0.3224653583296758, + "grad_norm": 3.2029457092285156, + "learning_rate": 1.5840001316387545e-05, + "loss": 1.2655, + "step": 5143 + }, + { + "epoch": 0.32252805818546615, + "grad_norm": 3.379600763320923, + "learning_rate": 1.5838352715675074e-05, + "loss": 1.0612, + "step": 5144 + }, + { + "epoch": 0.32259075804125653, + "grad_norm": 2.8823325634002686, + "learning_rate": 1.583670387418868e-05, + "loss": 1.1532, + "step": 5145 + }, + { + "epoch": 0.32265345789704686, + "grad_norm": 2.881028890609741, + "learning_rate": 1.5835054791996366e-05, + "loss": 0.9374, + "step": 5146 + }, + { + "epoch": 0.3227161577528372, + "grad_norm": 3.3568174839019775, + "learning_rate": 1.5833405469166142e-05, + "loss": 1.1226, + "step": 5147 + }, + { + "epoch": 0.3227788576086275, + "grad_norm": 3.056586742401123, + "learning_rate": 1.5831755905766012e-05, + "loss": 0.9897, + "step": 5148 + }, + { + "epoch": 0.32284155746441784, + "grad_norm": 3.5879006385803223, + "learning_rate": 1.5830106101864022e-05, + "loss": 1.0265, + "step": 5149 + }, + { + "epoch": 0.32290425732020817, + "grad_norm": 3.3803794384002686, + "learning_rate": 1.58284560575282e-05, + "loss": 1.0477, + "step": 5150 + }, + { + "epoch": 0.3229669571759985, + "grad_norm": 2.720541000366211, + "learning_rate": 1.5826805772826592e-05, + "loss": 1.0949, + "step": 5151 + }, + { + "epoch": 0.3230296570317888, + "grad_norm": 2.6391894817352295, + "learning_rate": 1.582515524782726e-05, + "loss": 1.3159, + "step": 5152 + }, + { + "epoch": 0.32309235688757915, + "grad_norm": 3.032339572906494, + "learning_rate": 1.5823504482598275e-05, + "loss": 1.2421, + "step": 5153 + }, + { + "epoch": 0.3231550567433695, + "grad_norm": 2.5803816318511963, + "learning_rate": 1.582185347720771e-05, + "loss": 1.1886, + "step": 5154 + }, + { + "epoch": 0.3232177565991598, + "grad_norm": 3.082258939743042, + "learning_rate": 1.582020223172365e-05, + "loss": 1.052, + "step": 5155 + }, + { + "epoch": 0.3232804564549501, + "grad_norm": 2.6650125980377197, + "learning_rate": 1.5818550746214198e-05, + "loss": 1.1517, + "step": 5156 + }, + { + "epoch": 0.3233431563107405, + "grad_norm": 3.459990978240967, + "learning_rate": 1.581689902074746e-05, + "loss": 1.1766, + "step": 5157 + }, + { + "epoch": 0.32340585616653084, + "grad_norm": 3.5430209636688232, + "learning_rate": 1.5815247055391553e-05, + "loss": 1.214, + "step": 5158 + }, + { + "epoch": 0.32346855602232116, + "grad_norm": 2.8131062984466553, + "learning_rate": 1.58135948502146e-05, + "loss": 1.2399, + "step": 5159 + }, + { + "epoch": 0.3235312558781115, + "grad_norm": 2.803424119949341, + "learning_rate": 1.5811942405284745e-05, + "loss": 1.0436, + "step": 5160 + }, + { + "epoch": 0.3235939557339018, + "grad_norm": 2.8161120414733887, + "learning_rate": 1.581028972067013e-05, + "loss": 1.2026, + "step": 5161 + }, + { + "epoch": 0.32365665558969214, + "grad_norm": 3.137651205062866, + "learning_rate": 1.5808636796438912e-05, + "loss": 0.9948, + "step": 5162 + }, + { + "epoch": 0.32371935544548247, + "grad_norm": 2.9665420055389404, + "learning_rate": 1.580698363265926e-05, + "loss": 1.1562, + "step": 5163 + }, + { + "epoch": 0.3237820553012728, + "grad_norm": 2.9119760990142822, + "learning_rate": 1.580533022939935e-05, + "loss": 1.0738, + "step": 5164 + }, + { + "epoch": 0.3238447551570631, + "grad_norm": 2.8639397621154785, + "learning_rate": 1.5803676586727366e-05, + "loss": 0.9387, + "step": 5165 + }, + { + "epoch": 0.32390745501285345, + "grad_norm": 3.2033426761627197, + "learning_rate": 1.580202270471151e-05, + "loss": 1.385, + "step": 5166 + }, + { + "epoch": 0.3239701548686438, + "grad_norm": 2.6518490314483643, + "learning_rate": 1.5800368583419978e-05, + "loss": 1.2946, + "step": 5167 + }, + { + "epoch": 0.32403285472443416, + "grad_norm": 3.150770902633667, + "learning_rate": 1.5798714222921e-05, + "loss": 1.1337, + "step": 5168 + }, + { + "epoch": 0.3240955545802245, + "grad_norm": 3.1323740482330322, + "learning_rate": 1.5797059623282787e-05, + "loss": 1.1344, + "step": 5169 + }, + { + "epoch": 0.3241582544360148, + "grad_norm": 2.7871978282928467, + "learning_rate": 1.5795404784573584e-05, + "loss": 1.1007, + "step": 5170 + }, + { + "epoch": 0.32422095429180514, + "grad_norm": 2.875858783721924, + "learning_rate": 1.5793749706861637e-05, + "loss": 1.0903, + "step": 5171 + }, + { + "epoch": 0.32428365414759547, + "grad_norm": 2.9841907024383545, + "learning_rate": 1.5792094390215198e-05, + "loss": 1.2823, + "step": 5172 + }, + { + "epoch": 0.3243463540033858, + "grad_norm": 3.1263554096221924, + "learning_rate": 1.5790438834702534e-05, + "loss": 1.187, + "step": 5173 + }, + { + "epoch": 0.3244090538591761, + "grad_norm": 3.2640321254730225, + "learning_rate": 1.578878304039192e-05, + "loss": 0.9699, + "step": 5174 + }, + { + "epoch": 0.32447175371496645, + "grad_norm": 3.13820481300354, + "learning_rate": 1.5787127007351637e-05, + "loss": 1.1427, + "step": 5175 + }, + { + "epoch": 0.3245344535707568, + "grad_norm": 2.8298962116241455, + "learning_rate": 1.5785470735649986e-05, + "loss": 1.2171, + "step": 5176 + }, + { + "epoch": 0.3245971534265471, + "grad_norm": 2.9014217853546143, + "learning_rate": 1.5783814225355268e-05, + "loss": 1.1029, + "step": 5177 + }, + { + "epoch": 0.32465985328233743, + "grad_norm": 2.983607530593872, + "learning_rate": 1.57821574765358e-05, + "loss": 1.1601, + "step": 5178 + }, + { + "epoch": 0.32472255313812776, + "grad_norm": 2.933576822280884, + "learning_rate": 1.5780500489259907e-05, + "loss": 1.2083, + "step": 5179 + }, + { + "epoch": 0.32478525299391814, + "grad_norm": 3.0293116569519043, + "learning_rate": 1.5778843263595918e-05, + "loss": 0.9531, + "step": 5180 + }, + { + "epoch": 0.32484795284970847, + "grad_norm": 3.0144569873809814, + "learning_rate": 1.577718579961218e-05, + "loss": 1.0882, + "step": 5181 + }, + { + "epoch": 0.3249106527054988, + "grad_norm": 3.0045559406280518, + "learning_rate": 1.577552809737705e-05, + "loss": 1.0452, + "step": 5182 + }, + { + "epoch": 0.3249733525612891, + "grad_norm": 2.909959077835083, + "learning_rate": 1.577387015695889e-05, + "loss": 1.1359, + "step": 5183 + }, + { + "epoch": 0.32503605241707945, + "grad_norm": 3.1569745540618896, + "learning_rate": 1.577221197842607e-05, + "loss": 1.2926, + "step": 5184 + }, + { + "epoch": 0.3250987522728698, + "grad_norm": 3.0633480548858643, + "learning_rate": 1.5770553561846975e-05, + "loss": 1.1835, + "step": 5185 + }, + { + "epoch": 0.3251614521286601, + "grad_norm": 2.9298288822174072, + "learning_rate": 1.576889490729e-05, + "loss": 1.2162, + "step": 5186 + }, + { + "epoch": 0.3252241519844504, + "grad_norm": 2.9296696186065674, + "learning_rate": 1.576723601482355e-05, + "loss": 1.351, + "step": 5187 + }, + { + "epoch": 0.32528685184024075, + "grad_norm": 3.1065750122070312, + "learning_rate": 1.576557688451603e-05, + "loss": 1.0895, + "step": 5188 + }, + { + "epoch": 0.3253495516960311, + "grad_norm": 3.017106533050537, + "learning_rate": 1.5763917516435873e-05, + "loss": 1.1738, + "step": 5189 + }, + { + "epoch": 0.3254122515518214, + "grad_norm": 3.033999443054199, + "learning_rate": 1.5762257910651504e-05, + "loss": 1.2169, + "step": 5190 + }, + { + "epoch": 0.3254749514076118, + "grad_norm": 2.781681537628174, + "learning_rate": 1.5760598067231364e-05, + "loss": 1.0355, + "step": 5191 + }, + { + "epoch": 0.3255376512634021, + "grad_norm": 3.223893404006958, + "learning_rate": 1.575893798624391e-05, + "loss": 1.0469, + "step": 5192 + }, + { + "epoch": 0.32560035111919244, + "grad_norm": 3.110470771789551, + "learning_rate": 1.5757277667757604e-05, + "loss": 0.9939, + "step": 5193 + }, + { + "epoch": 0.32566305097498277, + "grad_norm": 2.990111827850342, + "learning_rate": 1.5755617111840914e-05, + "loss": 1.104, + "step": 5194 + }, + { + "epoch": 0.3257257508307731, + "grad_norm": 2.8770415782928467, + "learning_rate": 1.5753956318562324e-05, + "loss": 1.1778, + "step": 5195 + }, + { + "epoch": 0.3257884506865634, + "grad_norm": 3.272787570953369, + "learning_rate": 1.575229528799032e-05, + "loss": 1.1898, + "step": 5196 + }, + { + "epoch": 0.32585115054235375, + "grad_norm": 2.9118826389312744, + "learning_rate": 1.5750634020193412e-05, + "loss": 1.2049, + "step": 5197 + }, + { + "epoch": 0.3259138503981441, + "grad_norm": 3.6910369396209717, + "learning_rate": 1.57489725152401e-05, + "loss": 1.0408, + "step": 5198 + }, + { + "epoch": 0.3259765502539344, + "grad_norm": 2.6811041831970215, + "learning_rate": 1.574731077319892e-05, + "loss": 1.2096, + "step": 5199 + }, + { + "epoch": 0.32603925010972473, + "grad_norm": 2.8587169647216797, + "learning_rate": 1.5745648794138384e-05, + "loss": 1.3302, + "step": 5200 + }, + { + "epoch": 0.32610194996551506, + "grad_norm": 3.504918336868286, + "learning_rate": 1.5743986578127043e-05, + "loss": 1.057, + "step": 5201 + }, + { + "epoch": 0.3261646498213054, + "grad_norm": 2.7092745304107666, + "learning_rate": 1.5742324125233448e-05, + "loss": 1.092, + "step": 5202 + }, + { + "epoch": 0.32622734967709577, + "grad_norm": 3.098345994949341, + "learning_rate": 1.5740661435526154e-05, + "loss": 1.2623, + "step": 5203 + }, + { + "epoch": 0.3262900495328861, + "grad_norm": 2.995776653289795, + "learning_rate": 1.573899850907373e-05, + "loss": 1.22, + "step": 5204 + }, + { + "epoch": 0.3263527493886764, + "grad_norm": 2.9524872303009033, + "learning_rate": 1.5737335345944758e-05, + "loss": 1.1914, + "step": 5205 + }, + { + "epoch": 0.32641544924446675, + "grad_norm": 3.185076951980591, + "learning_rate": 1.5735671946207823e-05, + "loss": 1.0902, + "step": 5206 + }, + { + "epoch": 0.3264781491002571, + "grad_norm": 2.942716121673584, + "learning_rate": 1.573400830993153e-05, + "loss": 1.022, + "step": 5207 + }, + { + "epoch": 0.3265408489560474, + "grad_norm": 2.746164321899414, + "learning_rate": 1.573234443718448e-05, + "loss": 1.2528, + "step": 5208 + }, + { + "epoch": 0.32660354881183773, + "grad_norm": 3.159822702407837, + "learning_rate": 1.5730680328035298e-05, + "loss": 1.0924, + "step": 5209 + }, + { + "epoch": 0.32666624866762806, + "grad_norm": 2.6887781620025635, + "learning_rate": 1.572901598255261e-05, + "loss": 1.1388, + "step": 5210 + }, + { + "epoch": 0.3267289485234184, + "grad_norm": 2.8011393547058105, + "learning_rate": 1.5727351400805054e-05, + "loss": 1.3377, + "step": 5211 + }, + { + "epoch": 0.3267916483792087, + "grad_norm": 3.3149874210357666, + "learning_rate": 1.5725686582861273e-05, + "loss": 1.0486, + "step": 5212 + }, + { + "epoch": 0.32685434823499904, + "grad_norm": 3.2126214504241943, + "learning_rate": 1.5724021528789928e-05, + "loss": 1.0279, + "step": 5213 + }, + { + "epoch": 0.32691704809078936, + "grad_norm": 3.3480639457702637, + "learning_rate": 1.5722356238659685e-05, + "loss": 1.1036, + "step": 5214 + }, + { + "epoch": 0.32697974794657975, + "grad_norm": 3.287778377532959, + "learning_rate": 1.572069071253922e-05, + "loss": 0.9361, + "step": 5215 + }, + { + "epoch": 0.3270424478023701, + "grad_norm": 3.4010426998138428, + "learning_rate": 1.571902495049722e-05, + "loss": 1.1146, + "step": 5216 + }, + { + "epoch": 0.3271051476581604, + "grad_norm": 2.920985221862793, + "learning_rate": 1.5717358952602385e-05, + "loss": 1.063, + "step": 5217 + }, + { + "epoch": 0.3271678475139507, + "grad_norm": 2.7190496921539307, + "learning_rate": 1.5715692718923413e-05, + "loss": 1.1243, + "step": 5218 + }, + { + "epoch": 0.32723054736974105, + "grad_norm": 2.829420566558838, + "learning_rate": 1.5714026249529025e-05, + "loss": 1.2263, + "step": 5219 + }, + { + "epoch": 0.3272932472255314, + "grad_norm": 2.8673198223114014, + "learning_rate": 1.5712359544487944e-05, + "loss": 1.1538, + "step": 5220 + }, + { + "epoch": 0.3273559470813217, + "grad_norm": 3.211021900177002, + "learning_rate": 1.5710692603868907e-05, + "loss": 1.4179, + "step": 5221 + }, + { + "epoch": 0.32741864693711203, + "grad_norm": 2.9687039852142334, + "learning_rate": 1.570902542774066e-05, + "loss": 1.2595, + "step": 5222 + }, + { + "epoch": 0.32748134679290236, + "grad_norm": 3.0446083545684814, + "learning_rate": 1.5707358016171953e-05, + "loss": 1.0826, + "step": 5223 + }, + { + "epoch": 0.3275440466486927, + "grad_norm": 3.0134735107421875, + "learning_rate": 1.5705690369231552e-05, + "loss": 1.2749, + "step": 5224 + }, + { + "epoch": 0.327606746504483, + "grad_norm": 3.4523658752441406, + "learning_rate": 1.570402248698823e-05, + "loss": 1.2325, + "step": 5225 + }, + { + "epoch": 0.3276694463602734, + "grad_norm": 2.626636266708374, + "learning_rate": 1.5702354369510772e-05, + "loss": 1.1606, + "step": 5226 + }, + { + "epoch": 0.3277321462160637, + "grad_norm": 3.116173028945923, + "learning_rate": 1.5700686016867972e-05, + "loss": 1.0902, + "step": 5227 + }, + { + "epoch": 0.32779484607185405, + "grad_norm": 3.0823822021484375, + "learning_rate": 1.5699017429128633e-05, + "loss": 1.0115, + "step": 5228 + }, + { + "epoch": 0.3278575459276444, + "grad_norm": 2.8310470581054688, + "learning_rate": 1.5697348606361564e-05, + "loss": 1.1414, + "step": 5229 + }, + { + "epoch": 0.3279202457834347, + "grad_norm": 2.6305086612701416, + "learning_rate": 1.569567954863559e-05, + "loss": 1.2458, + "step": 5230 + }, + { + "epoch": 0.32798294563922503, + "grad_norm": 2.911332607269287, + "learning_rate": 1.569401025601954e-05, + "loss": 1.0429, + "step": 5231 + }, + { + "epoch": 0.32804564549501536, + "grad_norm": 2.765167474746704, + "learning_rate": 1.569234072858226e-05, + "loss": 1.1729, + "step": 5232 + }, + { + "epoch": 0.3281083453508057, + "grad_norm": 3.009190797805786, + "learning_rate": 1.5690670966392602e-05, + "loss": 1.2139, + "step": 5233 + }, + { + "epoch": 0.328171045206596, + "grad_norm": 2.8939106464385986, + "learning_rate": 1.5689000969519422e-05, + "loss": 1.2827, + "step": 5234 + }, + { + "epoch": 0.32823374506238634, + "grad_norm": 2.912914991378784, + "learning_rate": 1.5687330738031598e-05, + "loss": 1.1403, + "step": 5235 + }, + { + "epoch": 0.32829644491817667, + "grad_norm": 2.9265475273132324, + "learning_rate": 1.5685660271998002e-05, + "loss": 1.1239, + "step": 5236 + }, + { + "epoch": 0.328359144773967, + "grad_norm": 2.6694891452789307, + "learning_rate": 1.568398957148753e-05, + "loss": 1.2037, + "step": 5237 + }, + { + "epoch": 0.3284218446297574, + "grad_norm": 2.670689344406128, + "learning_rate": 1.5682318636569078e-05, + "loss": 1.0143, + "step": 5238 + }, + { + "epoch": 0.3284845444855477, + "grad_norm": 3.0138332843780518, + "learning_rate": 1.568064746731156e-05, + "loss": 1.151, + "step": 5239 + }, + { + "epoch": 0.32854724434133803, + "grad_norm": 2.9762747287750244, + "learning_rate": 1.567897606378389e-05, + "loss": 1.2563, + "step": 5240 + }, + { + "epoch": 0.32860994419712836, + "grad_norm": 3.042402744293213, + "learning_rate": 1.5677304426055e-05, + "loss": 1.0932, + "step": 5241 + }, + { + "epoch": 0.3286726440529187, + "grad_norm": 3.1899547576904297, + "learning_rate": 1.5675632554193827e-05, + "loss": 1.1772, + "step": 5242 + }, + { + "epoch": 0.328735343908709, + "grad_norm": 2.9397690296173096, + "learning_rate": 1.567396044826932e-05, + "loss": 1.1998, + "step": 5243 + }, + { + "epoch": 0.32879804376449934, + "grad_norm": 2.7370352745056152, + "learning_rate": 1.5672288108350438e-05, + "loss": 1.1177, + "step": 5244 + }, + { + "epoch": 0.32886074362028966, + "grad_norm": 2.7546234130859375, + "learning_rate": 1.5670615534506145e-05, + "loss": 1.1466, + "step": 5245 + }, + { + "epoch": 0.32892344347608, + "grad_norm": 3.2774293422698975, + "learning_rate": 1.566894272680542e-05, + "loss": 1.3666, + "step": 5246 + }, + { + "epoch": 0.3289861433318703, + "grad_norm": 3.2971818447113037, + "learning_rate": 1.5667269685317248e-05, + "loss": 1.0178, + "step": 5247 + }, + { + "epoch": 0.32904884318766064, + "grad_norm": 2.972958564758301, + "learning_rate": 1.566559641011063e-05, + "loss": 1.1101, + "step": 5248 + }, + { + "epoch": 0.329111543043451, + "grad_norm": 2.6249513626098633, + "learning_rate": 1.5663922901254563e-05, + "loss": 1.1251, + "step": 5249 + }, + { + "epoch": 0.32917424289924135, + "grad_norm": 2.8307549953460693, + "learning_rate": 1.5662249158818072e-05, + "loss": 1.2096, + "step": 5250 + }, + { + "epoch": 0.3292369427550317, + "grad_norm": 2.830807685852051, + "learning_rate": 1.566057518287018e-05, + "loss": 1.1805, + "step": 5251 + }, + { + "epoch": 0.329299642610822, + "grad_norm": 3.317089080810547, + "learning_rate": 1.5658900973479916e-05, + "loss": 1.0316, + "step": 5252 + }, + { + "epoch": 0.32936234246661233, + "grad_norm": 2.7882094383239746, + "learning_rate": 1.5657226530716334e-05, + "loss": 1.1958, + "step": 5253 + }, + { + "epoch": 0.32942504232240266, + "grad_norm": 3.0971624851226807, + "learning_rate": 1.5655551854648477e-05, + "loss": 1.1322, + "step": 5254 + }, + { + "epoch": 0.329487742178193, + "grad_norm": 3.0669100284576416, + "learning_rate": 1.565387694534542e-05, + "loss": 1.0366, + "step": 5255 + }, + { + "epoch": 0.3295504420339833, + "grad_norm": 3.166826009750366, + "learning_rate": 1.5652201802876227e-05, + "loss": 1.1503, + "step": 5256 + }, + { + "epoch": 0.32961314188977364, + "grad_norm": 3.300490379333496, + "learning_rate": 1.5650526427309985e-05, + "loss": 1.0447, + "step": 5257 + }, + { + "epoch": 0.32967584174556397, + "grad_norm": 3.0532147884368896, + "learning_rate": 1.5648850818715787e-05, + "loss": 1.0478, + "step": 5258 + }, + { + "epoch": 0.3297385416013543, + "grad_norm": 3.0780396461486816, + "learning_rate": 1.5647174977162735e-05, + "loss": 1.088, + "step": 5259 + }, + { + "epoch": 0.3298012414571446, + "grad_norm": 2.913569927215576, + "learning_rate": 1.564549890271994e-05, + "loss": 1.1372, + "step": 5260 + }, + { + "epoch": 0.329863941312935, + "grad_norm": 2.7343461513519287, + "learning_rate": 1.564382259545653e-05, + "loss": 1.0855, + "step": 5261 + }, + { + "epoch": 0.32992664116872533, + "grad_norm": 3.183415174484253, + "learning_rate": 1.564214605544162e-05, + "loss": 1.1362, + "step": 5262 + }, + { + "epoch": 0.32998934102451566, + "grad_norm": 3.0395359992980957, + "learning_rate": 1.5640469282744364e-05, + "loss": 1.053, + "step": 5263 + }, + { + "epoch": 0.330052040880306, + "grad_norm": 3.0655226707458496, + "learning_rate": 1.563879227743391e-05, + "loss": 1.0907, + "step": 5264 + }, + { + "epoch": 0.3301147407360963, + "grad_norm": 2.737551212310791, + "learning_rate": 1.5637115039579414e-05, + "loss": 0.9982, + "step": 5265 + }, + { + "epoch": 0.33017744059188664, + "grad_norm": 2.7343907356262207, + "learning_rate": 1.5635437569250048e-05, + "loss": 1.1906, + "step": 5266 + }, + { + "epoch": 0.33024014044767697, + "grad_norm": 2.8418259620666504, + "learning_rate": 1.563375986651499e-05, + "loss": 1.1125, + "step": 5267 + }, + { + "epoch": 0.3303028403034673, + "grad_norm": 2.751559019088745, + "learning_rate": 1.5632081931443433e-05, + "loss": 1.2335, + "step": 5268 + }, + { + "epoch": 0.3303655401592576, + "grad_norm": 2.882183790206909, + "learning_rate": 1.5630403764104566e-05, + "loss": 1.0998, + "step": 5269 + }, + { + "epoch": 0.33042824001504795, + "grad_norm": 2.9299075603485107, + "learning_rate": 1.5628725364567605e-05, + "loss": 1.1222, + "step": 5270 + }, + { + "epoch": 0.3304909398708383, + "grad_norm": 3.138319730758667, + "learning_rate": 1.5627046732901763e-05, + "loss": 1.0393, + "step": 5271 + }, + { + "epoch": 0.33055363972662866, + "grad_norm": 2.8406074047088623, + "learning_rate": 1.562536786917627e-05, + "loss": 0.9694, + "step": 5272 + }, + { + "epoch": 0.330616339582419, + "grad_norm": 2.987163782119751, + "learning_rate": 1.5623688773460358e-05, + "loss": 1.1676, + "step": 5273 + }, + { + "epoch": 0.3306790394382093, + "grad_norm": 2.898390054702759, + "learning_rate": 1.5622009445823274e-05, + "loss": 1.1949, + "step": 5274 + }, + { + "epoch": 0.33074173929399964, + "grad_norm": 2.843257427215576, + "learning_rate": 1.562032988633428e-05, + "loss": 1.0505, + "step": 5275 + }, + { + "epoch": 0.33080443914978996, + "grad_norm": 2.8237545490264893, + "learning_rate": 1.561865009506263e-05, + "loss": 1.1481, + "step": 5276 + }, + { + "epoch": 0.3308671390055803, + "grad_norm": 3.4962475299835205, + "learning_rate": 1.561697007207761e-05, + "loss": 1.1901, + "step": 5277 + }, + { + "epoch": 0.3309298388613706, + "grad_norm": 2.618084192276001, + "learning_rate": 1.5615289817448495e-05, + "loss": 1.0712, + "step": 5278 + }, + { + "epoch": 0.33099253871716094, + "grad_norm": 3.0822620391845703, + "learning_rate": 1.5613609331244584e-05, + "loss": 1.2092, + "step": 5279 + }, + { + "epoch": 0.33105523857295127, + "grad_norm": 2.7067389488220215, + "learning_rate": 1.561192861353518e-05, + "loss": 1.2183, + "step": 5280 + }, + { + "epoch": 0.3311179384287416, + "grad_norm": 2.9286293983459473, + "learning_rate": 1.5610247664389595e-05, + "loss": 1.0089, + "step": 5281 + }, + { + "epoch": 0.3311806382845319, + "grad_norm": 2.958413600921631, + "learning_rate": 1.560856648387715e-05, + "loss": 1.189, + "step": 5282 + }, + { + "epoch": 0.33124333814032225, + "grad_norm": 2.6436846256256104, + "learning_rate": 1.560688507206718e-05, + "loss": 1.156, + "step": 5283 + }, + { + "epoch": 0.33130603799611263, + "grad_norm": 3.3168582916259766, + "learning_rate": 1.5605203429029024e-05, + "loss": 1.1574, + "step": 5284 + }, + { + "epoch": 0.33136873785190296, + "grad_norm": 3.3151681423187256, + "learning_rate": 1.5603521554832034e-05, + "loss": 1.1004, + "step": 5285 + }, + { + "epoch": 0.3314314377076933, + "grad_norm": 3.1246109008789062, + "learning_rate": 1.560183944954557e-05, + "loss": 1.1149, + "step": 5286 + }, + { + "epoch": 0.3314941375634836, + "grad_norm": 3.037760019302368, + "learning_rate": 1.5600157113239e-05, + "loss": 1.307, + "step": 5287 + }, + { + "epoch": 0.33155683741927394, + "grad_norm": 2.9802932739257812, + "learning_rate": 1.559847454598171e-05, + "loss": 1.0993, + "step": 5288 + }, + { + "epoch": 0.33161953727506427, + "grad_norm": 2.979346990585327, + "learning_rate": 1.5596791747843083e-05, + "loss": 1.1198, + "step": 5289 + }, + { + "epoch": 0.3316822371308546, + "grad_norm": 3.0462656021118164, + "learning_rate": 1.559510871889252e-05, + "loss": 1.0227, + "step": 5290 + }, + { + "epoch": 0.3317449369866449, + "grad_norm": 2.982445478439331, + "learning_rate": 1.559342545919943e-05, + "loss": 1.0819, + "step": 5291 + }, + { + "epoch": 0.33180763684243525, + "grad_norm": 3.1868228912353516, + "learning_rate": 1.5591741968833233e-05, + "loss": 1.0672, + "step": 5292 + }, + { + "epoch": 0.3318703366982256, + "grad_norm": 3.127119302749634, + "learning_rate": 1.5590058247863345e-05, + "loss": 1.1686, + "step": 5293 + }, + { + "epoch": 0.3319330365540159, + "grad_norm": 2.976418972015381, + "learning_rate": 1.5588374296359217e-05, + "loss": 1.2708, + "step": 5294 + }, + { + "epoch": 0.33199573640980623, + "grad_norm": 2.928887128829956, + "learning_rate": 1.5586690114390285e-05, + "loss": 1.0226, + "step": 5295 + }, + { + "epoch": 0.3320584362655966, + "grad_norm": 2.936429738998413, + "learning_rate": 1.5585005702026014e-05, + "loss": 0.9257, + "step": 5296 + }, + { + "epoch": 0.33212113612138694, + "grad_norm": 3.0256171226501465, + "learning_rate": 1.5583321059335862e-05, + "loss": 1.1106, + "step": 5297 + }, + { + "epoch": 0.33218383597717727, + "grad_norm": 2.9784774780273438, + "learning_rate": 1.5581636186389304e-05, + "loss": 1.0349, + "step": 5298 + }, + { + "epoch": 0.3322465358329676, + "grad_norm": 3.10617995262146, + "learning_rate": 1.5579951083255823e-05, + "loss": 1.1355, + "step": 5299 + }, + { + "epoch": 0.3323092356887579, + "grad_norm": 3.334362268447876, + "learning_rate": 1.557826575000492e-05, + "loss": 1.1487, + "step": 5300 + }, + { + "epoch": 0.33237193554454825, + "grad_norm": 2.826941728591919, + "learning_rate": 1.5576580186706093e-05, + "loss": 1.1736, + "step": 5301 + }, + { + "epoch": 0.3324346354003386, + "grad_norm": 2.9202518463134766, + "learning_rate": 1.5574894393428856e-05, + "loss": 1.0324, + "step": 5302 + }, + { + "epoch": 0.3324973352561289, + "grad_norm": 2.7475082874298096, + "learning_rate": 1.5573208370242733e-05, + "loss": 1.2159, + "step": 5303 + }, + { + "epoch": 0.3325600351119192, + "grad_norm": 3.208089828491211, + "learning_rate": 1.557152211721725e-05, + "loss": 0.954, + "step": 5304 + }, + { + "epoch": 0.33262273496770955, + "grad_norm": 3.4946584701538086, + "learning_rate": 1.5569835634421952e-05, + "loss": 1.2673, + "step": 5305 + }, + { + "epoch": 0.3326854348234999, + "grad_norm": 2.96846079826355, + "learning_rate": 1.556814892192639e-05, + "loss": 1.0983, + "step": 5306 + }, + { + "epoch": 0.33274813467929026, + "grad_norm": 3.282106637954712, + "learning_rate": 1.556646197980012e-05, + "loss": 1.3594, + "step": 5307 + }, + { + "epoch": 0.3328108345350806, + "grad_norm": 3.080857276916504, + "learning_rate": 1.5564774808112718e-05, + "loss": 1.1745, + "step": 5308 + }, + { + "epoch": 0.3328735343908709, + "grad_norm": 3.009929656982422, + "learning_rate": 1.5563087406933762e-05, + "loss": 1.1785, + "step": 5309 + }, + { + "epoch": 0.33293623424666124, + "grad_norm": 2.755157947540283, + "learning_rate": 1.5561399776332835e-05, + "loss": 1.2288, + "step": 5310 + }, + { + "epoch": 0.33299893410245157, + "grad_norm": 2.9527828693389893, + "learning_rate": 1.555971191637954e-05, + "loss": 1.1509, + "step": 5311 + }, + { + "epoch": 0.3330616339582419, + "grad_norm": 2.952979326248169, + "learning_rate": 1.5558023827143484e-05, + "loss": 1.1996, + "step": 5312 + }, + { + "epoch": 0.3331243338140322, + "grad_norm": 2.9612815380096436, + "learning_rate": 1.5556335508694277e-05, + "loss": 1.0986, + "step": 5313 + }, + { + "epoch": 0.33318703366982255, + "grad_norm": 3.3412578105926514, + "learning_rate": 1.5554646961101557e-05, + "loss": 1.2069, + "step": 5314 + }, + { + "epoch": 0.3332497335256129, + "grad_norm": 3.1063337326049805, + "learning_rate": 1.555295818443495e-05, + "loss": 1.0749, + "step": 5315 + }, + { + "epoch": 0.3333124333814032, + "grad_norm": 2.7759644985198975, + "learning_rate": 1.5551269178764108e-05, + "loss": 1.2846, + "step": 5316 + }, + { + "epoch": 0.33337513323719353, + "grad_norm": 2.755985975265503, + "learning_rate": 1.5549579944158682e-05, + "loss": 1.1219, + "step": 5317 + }, + { + "epoch": 0.33343783309298386, + "grad_norm": 2.8501648902893066, + "learning_rate": 1.5547890480688335e-05, + "loss": 1.1647, + "step": 5318 + }, + { + "epoch": 0.33350053294877424, + "grad_norm": 2.817206382751465, + "learning_rate": 1.554620078842274e-05, + "loss": 1.1855, + "step": 5319 + }, + { + "epoch": 0.33356323280456457, + "grad_norm": 3.1916749477386475, + "learning_rate": 1.554451086743159e-05, + "loss": 1.1609, + "step": 5320 + }, + { + "epoch": 0.3336259326603549, + "grad_norm": 2.6928863525390625, + "learning_rate": 1.5542820717784564e-05, + "loss": 1.0479, + "step": 5321 + }, + { + "epoch": 0.3336886325161452, + "grad_norm": 2.708648920059204, + "learning_rate": 1.5541130339551373e-05, + "loss": 1.0849, + "step": 5322 + }, + { + "epoch": 0.33375133237193555, + "grad_norm": 2.8438918590545654, + "learning_rate": 1.553943973280172e-05, + "loss": 1.2867, + "step": 5323 + }, + { + "epoch": 0.3338140322277259, + "grad_norm": 2.740449905395508, + "learning_rate": 1.553774889760533e-05, + "loss": 0.94, + "step": 5324 + }, + { + "epoch": 0.3338767320835162, + "grad_norm": 3.151845932006836, + "learning_rate": 1.5536057834031938e-05, + "loss": 1.0084, + "step": 5325 + }, + { + "epoch": 0.33393943193930653, + "grad_norm": 2.9678709506988525, + "learning_rate": 1.553436654215128e-05, + "loss": 1.2331, + "step": 5326 + }, + { + "epoch": 0.33400213179509686, + "grad_norm": 2.86051607131958, + "learning_rate": 1.5532675022033102e-05, + "loss": 1.0999, + "step": 5327 + }, + { + "epoch": 0.3340648316508872, + "grad_norm": 2.9577548503875732, + "learning_rate": 1.553098327374716e-05, + "loss": 1.1511, + "step": 5328 + }, + { + "epoch": 0.3341275315066775, + "grad_norm": 3.000279426574707, + "learning_rate": 1.5529291297363235e-05, + "loss": 1.176, + "step": 5329 + }, + { + "epoch": 0.3341902313624679, + "grad_norm": 2.9744858741760254, + "learning_rate": 1.5527599092951093e-05, + "loss": 1.2188, + "step": 5330 + }, + { + "epoch": 0.3342529312182582, + "grad_norm": 3.5035645961761475, + "learning_rate": 1.5525906660580517e-05, + "loss": 1.2923, + "step": 5331 + }, + { + "epoch": 0.33431563107404855, + "grad_norm": 2.6883044242858887, + "learning_rate": 1.5524214000321315e-05, + "loss": 1.1928, + "step": 5332 + }, + { + "epoch": 0.3343783309298389, + "grad_norm": 3.1498677730560303, + "learning_rate": 1.5522521112243283e-05, + "loss": 1.1212, + "step": 5333 + }, + { + "epoch": 0.3344410307856292, + "grad_norm": 3.1172235012054443, + "learning_rate": 1.552082799641624e-05, + "loss": 1.0367, + "step": 5334 + }, + { + "epoch": 0.3345037306414195, + "grad_norm": 3.054431915283203, + "learning_rate": 1.5519134652910013e-05, + "loss": 1.2333, + "step": 5335 + }, + { + "epoch": 0.33456643049720985, + "grad_norm": 3.0081822872161865, + "learning_rate": 1.5517441081794428e-05, + "loss": 1.2222, + "step": 5336 + }, + { + "epoch": 0.3346291303530002, + "grad_norm": 3.4885387420654297, + "learning_rate": 1.5515747283139333e-05, + "loss": 1.131, + "step": 5337 + }, + { + "epoch": 0.3346918302087905, + "grad_norm": 3.1696646213531494, + "learning_rate": 1.551405325701458e-05, + "loss": 1.1918, + "step": 5338 + }, + { + "epoch": 0.33475453006458084, + "grad_norm": 2.7451398372650146, + "learning_rate": 1.551235900349003e-05, + "loss": 1.2609, + "step": 5339 + }, + { + "epoch": 0.33481722992037116, + "grad_norm": 2.944441318511963, + "learning_rate": 1.551066452263555e-05, + "loss": 1.1937, + "step": 5340 + }, + { + "epoch": 0.3348799297761615, + "grad_norm": 2.8803036212921143, + "learning_rate": 1.5508969814521026e-05, + "loss": 1.05, + "step": 5341 + }, + { + "epoch": 0.33494262963195187, + "grad_norm": 2.880263328552246, + "learning_rate": 1.550727487921635e-05, + "loss": 1.1508, + "step": 5342 + }, + { + "epoch": 0.3350053294877422, + "grad_norm": 2.533444404602051, + "learning_rate": 1.550557971679141e-05, + "loss": 1.2884, + "step": 5343 + }, + { + "epoch": 0.3350680293435325, + "grad_norm": 2.869501829147339, + "learning_rate": 1.550388432731613e-05, + "loss": 1.15, + "step": 5344 + }, + { + "epoch": 0.33513072919932285, + "grad_norm": 3.0277915000915527, + "learning_rate": 1.5502188710860416e-05, + "loss": 1.0371, + "step": 5345 + }, + { + "epoch": 0.3351934290551132, + "grad_norm": 2.699780225753784, + "learning_rate": 1.5500492867494204e-05, + "loss": 1.148, + "step": 5346 + }, + { + "epoch": 0.3352561289109035, + "grad_norm": 2.7221641540527344, + "learning_rate": 1.549879679728742e-05, + "loss": 1.2059, + "step": 5347 + }, + { + "epoch": 0.33531882876669383, + "grad_norm": 3.36578631401062, + "learning_rate": 1.549710050031002e-05, + "loss": 1.1209, + "step": 5348 + }, + { + "epoch": 0.33538152862248416, + "grad_norm": 2.6266257762908936, + "learning_rate": 1.5495403976631952e-05, + "loss": 1.1209, + "step": 5349 + }, + { + "epoch": 0.3354442284782745, + "grad_norm": 3.2336013317108154, + "learning_rate": 1.5493707226323185e-05, + "loss": 1.0421, + "step": 5350 + }, + { + "epoch": 0.3355069283340648, + "grad_norm": 2.871474027633667, + "learning_rate": 1.5492010249453694e-05, + "loss": 1.0179, + "step": 5351 + }, + { + "epoch": 0.33556962818985514, + "grad_norm": 3.1428961753845215, + "learning_rate": 1.5490313046093458e-05, + "loss": 1.1386, + "step": 5352 + }, + { + "epoch": 0.3356323280456455, + "grad_norm": 2.9023661613464355, + "learning_rate": 1.5488615616312472e-05, + "loss": 1.0116, + "step": 5353 + }, + { + "epoch": 0.33569502790143585, + "grad_norm": 2.7859480381011963, + "learning_rate": 1.5486917960180742e-05, + "loss": 0.9975, + "step": 5354 + }, + { + "epoch": 0.3357577277572262, + "grad_norm": 2.7774574756622314, + "learning_rate": 1.548522007776827e-05, + "loss": 0.9752, + "step": 5355 + }, + { + "epoch": 0.3358204276130165, + "grad_norm": 3.4556093215942383, + "learning_rate": 1.548352196914509e-05, + "loss": 1.0253, + "step": 5356 + }, + { + "epoch": 0.33588312746880683, + "grad_norm": 3.0876779556274414, + "learning_rate": 1.5481823634381217e-05, + "loss": 1.1332, + "step": 5357 + }, + { + "epoch": 0.33594582732459716, + "grad_norm": 3.0138471126556396, + "learning_rate": 1.5480125073546705e-05, + "loss": 1.0629, + "step": 5358 + }, + { + "epoch": 0.3360085271803875, + "grad_norm": 2.9814937114715576, + "learning_rate": 1.5478426286711588e-05, + "loss": 1.1785, + "step": 5359 + }, + { + "epoch": 0.3360712270361778, + "grad_norm": 3.076714277267456, + "learning_rate": 1.5476727273945936e-05, + "loss": 1.0048, + "step": 5360 + }, + { + "epoch": 0.33613392689196814, + "grad_norm": 3.2041544914245605, + "learning_rate": 1.5475028035319814e-05, + "loss": 1.0007, + "step": 5361 + }, + { + "epoch": 0.33619662674775846, + "grad_norm": 2.8534131050109863, + "learning_rate": 1.5473328570903293e-05, + "loss": 1.2417, + "step": 5362 + }, + { + "epoch": 0.3362593266035488, + "grad_norm": 3.104081392288208, + "learning_rate": 1.5471628880766464e-05, + "loss": 1.2077, + "step": 5363 + }, + { + "epoch": 0.3363220264593391, + "grad_norm": 3.050189971923828, + "learning_rate": 1.5469928964979418e-05, + "loss": 1.0646, + "step": 5364 + }, + { + "epoch": 0.3363847263151295, + "grad_norm": 3.031829833984375, + "learning_rate": 1.5468228823612268e-05, + "loss": 1.0269, + "step": 5365 + }, + { + "epoch": 0.3364474261709198, + "grad_norm": 2.928535223007202, + "learning_rate": 1.546652845673512e-05, + "loss": 0.9832, + "step": 5366 + }, + { + "epoch": 0.33651012602671015, + "grad_norm": 3.1215970516204834, + "learning_rate": 1.54648278644181e-05, + "loss": 1.0871, + "step": 5367 + }, + { + "epoch": 0.3365728258825005, + "grad_norm": 2.9501559734344482, + "learning_rate": 1.546312704673134e-05, + "loss": 1.1366, + "step": 5368 + }, + { + "epoch": 0.3366355257382908, + "grad_norm": 2.8093481063842773, + "learning_rate": 1.5461426003744982e-05, + "loss": 1.0541, + "step": 5369 + }, + { + "epoch": 0.33669822559408114, + "grad_norm": 3.2606842517852783, + "learning_rate": 1.5459724735529177e-05, + "loss": 1.19, + "step": 5370 + }, + { + "epoch": 0.33676092544987146, + "grad_norm": 3.1471259593963623, + "learning_rate": 1.5458023242154085e-05, + "loss": 1.0633, + "step": 5371 + }, + { + "epoch": 0.3368236253056618, + "grad_norm": 2.937319278717041, + "learning_rate": 1.5456321523689877e-05, + "loss": 1.1289, + "step": 5372 + }, + { + "epoch": 0.3368863251614521, + "grad_norm": 3.1405768394470215, + "learning_rate": 1.545461958020673e-05, + "loss": 1.1335, + "step": 5373 + }, + { + "epoch": 0.33694902501724244, + "grad_norm": 2.635608673095703, + "learning_rate": 1.5452917411774837e-05, + "loss": 1.2808, + "step": 5374 + }, + { + "epoch": 0.33701172487303277, + "grad_norm": 3.016540765762329, + "learning_rate": 1.5451215018464386e-05, + "loss": 1.3131, + "step": 5375 + }, + { + "epoch": 0.3370744247288231, + "grad_norm": 2.746523380279541, + "learning_rate": 1.5449512400345594e-05, + "loss": 1.1856, + "step": 5376 + }, + { + "epoch": 0.3371371245846135, + "grad_norm": 2.654707431793213, + "learning_rate": 1.5447809557488672e-05, + "loss": 0.9781, + "step": 5377 + }, + { + "epoch": 0.3371998244404038, + "grad_norm": 3.3638970851898193, + "learning_rate": 1.5446106489963844e-05, + "loss": 1.1869, + "step": 5378 + }, + { + "epoch": 0.33726252429619413, + "grad_norm": 3.2616047859191895, + "learning_rate": 1.5444403197841345e-05, + "loss": 1.0481, + "step": 5379 + }, + { + "epoch": 0.33732522415198446, + "grad_norm": 2.9024152755737305, + "learning_rate": 1.5442699681191426e-05, + "loss": 1.2337, + "step": 5380 + }, + { + "epoch": 0.3373879240077748, + "grad_norm": 3.029651403427124, + "learning_rate": 1.544099594008433e-05, + "loss": 0.975, + "step": 5381 + }, + { + "epoch": 0.3374506238635651, + "grad_norm": 3.1406562328338623, + "learning_rate": 1.5439291974590327e-05, + "loss": 1.0286, + "step": 5382 + }, + { + "epoch": 0.33751332371935544, + "grad_norm": 2.983574628829956, + "learning_rate": 1.5437587784779685e-05, + "loss": 1.032, + "step": 5383 + }, + { + "epoch": 0.33757602357514577, + "grad_norm": 3.5408406257629395, + "learning_rate": 1.543588337072268e-05, + "loss": 1.1584, + "step": 5384 + }, + { + "epoch": 0.3376387234309361, + "grad_norm": 3.0765621662139893, + "learning_rate": 1.5434178732489615e-05, + "loss": 1.0233, + "step": 5385 + }, + { + "epoch": 0.3377014232867264, + "grad_norm": 3.0566227436065674, + "learning_rate": 1.5432473870150778e-05, + "loss": 1.1763, + "step": 5386 + }, + { + "epoch": 0.33776412314251675, + "grad_norm": 2.9747681617736816, + "learning_rate": 1.5430768783776483e-05, + "loss": 1.1988, + "step": 5387 + }, + { + "epoch": 0.33782682299830713, + "grad_norm": 2.8256685733795166, + "learning_rate": 1.5429063473437045e-05, + "loss": 1.2379, + "step": 5388 + }, + { + "epoch": 0.33788952285409746, + "grad_norm": 3.156174659729004, + "learning_rate": 1.542735793920279e-05, + "loss": 1.1256, + "step": 5389 + }, + { + "epoch": 0.3379522227098878, + "grad_norm": 2.8259358406066895, + "learning_rate": 1.542565218114406e-05, + "loss": 0.9908, + "step": 5390 + }, + { + "epoch": 0.3380149225656781, + "grad_norm": 2.669114589691162, + "learning_rate": 1.5423946199331195e-05, + "loss": 1.1606, + "step": 5391 + }, + { + "epoch": 0.33807762242146844, + "grad_norm": 2.994183301925659, + "learning_rate": 1.542223999383455e-05, + "loss": 1.1317, + "step": 5392 + }, + { + "epoch": 0.33814032227725876, + "grad_norm": 3.001359701156616, + "learning_rate": 1.5420533564724495e-05, + "loss": 1.0621, + "step": 5393 + }, + { + "epoch": 0.3382030221330491, + "grad_norm": 3.149660348892212, + "learning_rate": 1.54188269120714e-05, + "loss": 1.1735, + "step": 5394 + }, + { + "epoch": 0.3382657219888394, + "grad_norm": 3.146664619445801, + "learning_rate": 1.5417120035945642e-05, + "loss": 1.0955, + "step": 5395 + }, + { + "epoch": 0.33832842184462975, + "grad_norm": 3.383451461791992, + "learning_rate": 1.541541293641762e-05, + "loss": 1.181, + "step": 5396 + }, + { + "epoch": 0.33839112170042007, + "grad_norm": 3.11441969871521, + "learning_rate": 1.5413705613557732e-05, + "loss": 1.0896, + "step": 5397 + }, + { + "epoch": 0.3384538215562104, + "grad_norm": 2.792125701904297, + "learning_rate": 1.5411998067436388e-05, + "loss": 1.2527, + "step": 5398 + }, + { + "epoch": 0.3385165214120007, + "grad_norm": 3.239206552505493, + "learning_rate": 1.5410290298124006e-05, + "loss": 1.212, + "step": 5399 + }, + { + "epoch": 0.3385792212677911, + "grad_norm": 3.1432082653045654, + "learning_rate": 1.5408582305691014e-05, + "loss": 1.1139, + "step": 5400 + }, + { + "epoch": 0.33864192112358144, + "grad_norm": 2.8478078842163086, + "learning_rate": 1.5406874090207856e-05, + "loss": 1.0773, + "step": 5401 + }, + { + "epoch": 0.33870462097937176, + "grad_norm": 2.9979348182678223, + "learning_rate": 1.5405165651744972e-05, + "loss": 1.117, + "step": 5402 + }, + { + "epoch": 0.3387673208351621, + "grad_norm": 3.0847363471984863, + "learning_rate": 1.540345699037282e-05, + "loss": 1.0216, + "step": 5403 + }, + { + "epoch": 0.3388300206909524, + "grad_norm": 2.8131399154663086, + "learning_rate": 1.5401748106161868e-05, + "loss": 1.1166, + "step": 5404 + }, + { + "epoch": 0.33889272054674274, + "grad_norm": 3.2105319499969482, + "learning_rate": 1.5400038999182588e-05, + "loss": 1.2493, + "step": 5405 + }, + { + "epoch": 0.33895542040253307, + "grad_norm": 2.9879539012908936, + "learning_rate": 1.539832966950546e-05, + "loss": 1.2485, + "step": 5406 + }, + { + "epoch": 0.3390181202583234, + "grad_norm": 2.7313687801361084, + "learning_rate": 1.5396620117200983e-05, + "loss": 1.0424, + "step": 5407 + }, + { + "epoch": 0.3390808201141137, + "grad_norm": 2.9354095458984375, + "learning_rate": 1.5394910342339656e-05, + "loss": 1.2653, + "step": 5408 + }, + { + "epoch": 0.33914351996990405, + "grad_norm": 3.2166266441345215, + "learning_rate": 1.5393200344991993e-05, + "loss": 1.1595, + "step": 5409 + }, + { + "epoch": 0.3392062198256944, + "grad_norm": 2.8643784523010254, + "learning_rate": 1.539149012522851e-05, + "loss": 1.2483, + "step": 5410 + }, + { + "epoch": 0.33926891968148476, + "grad_norm": 3.281264066696167, + "learning_rate": 1.5389779683119745e-05, + "loss": 0.9215, + "step": 5411 + }, + { + "epoch": 0.3393316195372751, + "grad_norm": 3.1004323959350586, + "learning_rate": 1.538806901873622e-05, + "loss": 1.0742, + "step": 5412 + }, + { + "epoch": 0.3393943193930654, + "grad_norm": 2.9680933952331543, + "learning_rate": 1.53863581321485e-05, + "loss": 1.1429, + "step": 5413 + }, + { + "epoch": 0.33945701924885574, + "grad_norm": 2.790970802307129, + "learning_rate": 1.5384647023427136e-05, + "loss": 1.0311, + "step": 5414 + }, + { + "epoch": 0.33951971910464607, + "grad_norm": 3.4364843368530273, + "learning_rate": 1.538293569264269e-05, + "loss": 1.1498, + "step": 5415 + }, + { + "epoch": 0.3395824189604364, + "grad_norm": 2.916346788406372, + "learning_rate": 1.5381224139865746e-05, + "loss": 1.0939, + "step": 5416 + }, + { + "epoch": 0.3396451188162267, + "grad_norm": 3.1973717212677, + "learning_rate": 1.5379512365166878e-05, + "loss": 1.1044, + "step": 5417 + }, + { + "epoch": 0.33970781867201705, + "grad_norm": 2.8000521659851074, + "learning_rate": 1.5377800368616687e-05, + "loss": 1.2327, + "step": 5418 + }, + { + "epoch": 0.3397705185278074, + "grad_norm": 3.007814407348633, + "learning_rate": 1.5376088150285777e-05, + "loss": 1.2001, + "step": 5419 + }, + { + "epoch": 0.3398332183835977, + "grad_norm": 3.3301024436950684, + "learning_rate": 1.5374375710244754e-05, + "loss": 1.094, + "step": 5420 + }, + { + "epoch": 0.33989591823938803, + "grad_norm": 3.074061393737793, + "learning_rate": 1.537266304856424e-05, + "loss": 1.2369, + "step": 5421 + }, + { + "epoch": 0.33995861809517836, + "grad_norm": 2.877028703689575, + "learning_rate": 1.5370950165314874e-05, + "loss": 1.2047, + "step": 5422 + }, + { + "epoch": 0.34002131795096874, + "grad_norm": 3.122781276702881, + "learning_rate": 1.536923706056728e-05, + "loss": 1.2698, + "step": 5423 + }, + { + "epoch": 0.34008401780675906, + "grad_norm": 2.8377366065979004, + "learning_rate": 1.5367523734392122e-05, + "loss": 1.0911, + "step": 5424 + }, + { + "epoch": 0.3401467176625494, + "grad_norm": 3.068850517272949, + "learning_rate": 1.5365810186860047e-05, + "loss": 1.0773, + "step": 5425 + }, + { + "epoch": 0.3402094175183397, + "grad_norm": 3.4403109550476074, + "learning_rate": 1.5364096418041723e-05, + "loss": 1.0045, + "step": 5426 + }, + { + "epoch": 0.34027211737413005, + "grad_norm": 2.899738311767578, + "learning_rate": 1.5362382428007837e-05, + "loss": 1.0334, + "step": 5427 + }, + { + "epoch": 0.34033481722992037, + "grad_norm": 2.7818832397460938, + "learning_rate": 1.5360668216829056e-05, + "loss": 0.9251, + "step": 5428 + }, + { + "epoch": 0.3403975170857107, + "grad_norm": 2.9120259284973145, + "learning_rate": 1.5358953784576093e-05, + "loss": 1.1962, + "step": 5429 + }, + { + "epoch": 0.340460216941501, + "grad_norm": 3.0451831817626953, + "learning_rate": 1.5357239131319634e-05, + "loss": 1.2014, + "step": 5430 + }, + { + "epoch": 0.34052291679729135, + "grad_norm": 3.0463035106658936, + "learning_rate": 1.5355524257130405e-05, + "loss": 1.1603, + "step": 5431 + }, + { + "epoch": 0.3405856166530817, + "grad_norm": 3.3377461433410645, + "learning_rate": 1.5353809162079117e-05, + "loss": 1.0052, + "step": 5432 + }, + { + "epoch": 0.340648316508872, + "grad_norm": 2.891493320465088, + "learning_rate": 1.535209384623651e-05, + "loss": 1.2105, + "step": 5433 + }, + { + "epoch": 0.34071101636466233, + "grad_norm": 3.0560686588287354, + "learning_rate": 1.5350378309673315e-05, + "loss": 1.0621, + "step": 5434 + }, + { + "epoch": 0.3407737162204527, + "grad_norm": 2.925461530685425, + "learning_rate": 1.5348662552460286e-05, + "loss": 1.0416, + "step": 5435 + }, + { + "epoch": 0.34083641607624304, + "grad_norm": 2.8973050117492676, + "learning_rate": 1.534694657466818e-05, + "loss": 1.3245, + "step": 5436 + }, + { + "epoch": 0.34089911593203337, + "grad_norm": 2.9600167274475098, + "learning_rate": 1.5345230376367766e-05, + "loss": 1.0867, + "step": 5437 + }, + { + "epoch": 0.3409618157878237, + "grad_norm": 3.0094151496887207, + "learning_rate": 1.5343513957629814e-05, + "loss": 1.2521, + "step": 5438 + }, + { + "epoch": 0.341024515643614, + "grad_norm": 3.0911996364593506, + "learning_rate": 1.5341797318525115e-05, + "loss": 1.0258, + "step": 5439 + }, + { + "epoch": 0.34108721549940435, + "grad_norm": 3.1363837718963623, + "learning_rate": 1.534008045912446e-05, + "loss": 1.1023, + "step": 5440 + }, + { + "epoch": 0.3411499153551947, + "grad_norm": 3.0285820960998535, + "learning_rate": 1.5338363379498656e-05, + "loss": 0.9965, + "step": 5441 + }, + { + "epoch": 0.341212615210985, + "grad_norm": 2.953470468521118, + "learning_rate": 1.533664607971851e-05, + "loss": 1.0286, + "step": 5442 + }, + { + "epoch": 0.34127531506677533, + "grad_norm": 2.4453601837158203, + "learning_rate": 1.533492855985485e-05, + "loss": 1.1595, + "step": 5443 + }, + { + "epoch": 0.34133801492256566, + "grad_norm": 2.9343340396881104, + "learning_rate": 1.5333210819978503e-05, + "loss": 1.1703, + "step": 5444 + }, + { + "epoch": 0.341400714778356, + "grad_norm": 3.0672473907470703, + "learning_rate": 1.533149286016031e-05, + "loss": 1.1402, + "step": 5445 + }, + { + "epoch": 0.34146341463414637, + "grad_norm": 2.908554792404175, + "learning_rate": 1.5329774680471114e-05, + "loss": 1.1953, + "step": 5446 + }, + { + "epoch": 0.3415261144899367, + "grad_norm": 3.1979780197143555, + "learning_rate": 1.5328056280981777e-05, + "loss": 1.3429, + "step": 5447 + }, + { + "epoch": 0.341588814345727, + "grad_norm": 2.7982985973358154, + "learning_rate": 1.5326337661763173e-05, + "loss": 1.3235, + "step": 5448 + }, + { + "epoch": 0.34165151420151735, + "grad_norm": 2.7719621658325195, + "learning_rate": 1.5324618822886167e-05, + "loss": 1.0487, + "step": 5449 + }, + { + "epoch": 0.3417142140573077, + "grad_norm": 3.1183016300201416, + "learning_rate": 1.5322899764421647e-05, + "loss": 1.1255, + "step": 5450 + }, + { + "epoch": 0.341776913913098, + "grad_norm": 2.7407724857330322, + "learning_rate": 1.532118048644051e-05, + "loss": 1.2616, + "step": 5451 + }, + { + "epoch": 0.34183961376888833, + "grad_norm": 2.99938702583313, + "learning_rate": 1.5319460989013655e-05, + "loss": 1.0364, + "step": 5452 + }, + { + "epoch": 0.34190231362467866, + "grad_norm": 2.8681914806365967, + "learning_rate": 1.5317741272212e-05, + "loss": 1.148, + "step": 5453 + }, + { + "epoch": 0.341965013480469, + "grad_norm": 2.924368143081665, + "learning_rate": 1.5316021336106463e-05, + "loss": 1.1675, + "step": 5454 + }, + { + "epoch": 0.3420277133362593, + "grad_norm": 3.4486186504364014, + "learning_rate": 1.531430118076797e-05, + "loss": 1.2389, + "step": 5455 + }, + { + "epoch": 0.34209041319204964, + "grad_norm": 2.982351779937744, + "learning_rate": 1.531258080626747e-05, + "loss": 1.327, + "step": 5456 + }, + { + "epoch": 0.34215311304783996, + "grad_norm": 2.906907081604004, + "learning_rate": 1.5310860212675902e-05, + "loss": 1.1459, + "step": 5457 + }, + { + "epoch": 0.34221581290363035, + "grad_norm": 2.6019747257232666, + "learning_rate": 1.5309139400064224e-05, + "loss": 1.1702, + "step": 5458 + }, + { + "epoch": 0.34227851275942067, + "grad_norm": 2.8778023719787598, + "learning_rate": 1.530741836850341e-05, + "loss": 1.295, + "step": 5459 + }, + { + "epoch": 0.342341212615211, + "grad_norm": 2.976168632507324, + "learning_rate": 1.530569711806443e-05, + "loss": 1.0701, + "step": 5460 + }, + { + "epoch": 0.3424039124710013, + "grad_norm": 3.2611770629882812, + "learning_rate": 1.5303975648818265e-05, + "loss": 1.2425, + "step": 5461 + }, + { + "epoch": 0.34246661232679165, + "grad_norm": 2.8405120372772217, + "learning_rate": 1.530225396083592e-05, + "loss": 1.2091, + "step": 5462 + }, + { + "epoch": 0.342529312182582, + "grad_norm": 3.021369695663452, + "learning_rate": 1.5300532054188382e-05, + "loss": 1.2381, + "step": 5463 + }, + { + "epoch": 0.3425920120383723, + "grad_norm": 3.0933094024658203, + "learning_rate": 1.5298809928946676e-05, + "loss": 1.217, + "step": 5464 + }, + { + "epoch": 0.34265471189416263, + "grad_norm": 2.837691068649292, + "learning_rate": 1.5297087585181816e-05, + "loss": 1.0128, + "step": 5465 + }, + { + "epoch": 0.34271741174995296, + "grad_norm": 3.1388144493103027, + "learning_rate": 1.5295365022964832e-05, + "loss": 1.1491, + "step": 5466 + }, + { + "epoch": 0.3427801116057433, + "grad_norm": 3.089761734008789, + "learning_rate": 1.5293642242366762e-05, + "loss": 1.0254, + "step": 5467 + }, + { + "epoch": 0.3428428114615336, + "grad_norm": 2.979832172393799, + "learning_rate": 1.529191924345866e-05, + "loss": 1.1468, + "step": 5468 + }, + { + "epoch": 0.342905511317324, + "grad_norm": 3.2070322036743164, + "learning_rate": 1.529019602631157e-05, + "loss": 1.336, + "step": 5469 + }, + { + "epoch": 0.3429682111731143, + "grad_norm": 3.0791609287261963, + "learning_rate": 1.528847259099657e-05, + "loss": 1.0097, + "step": 5470 + }, + { + "epoch": 0.34303091102890465, + "grad_norm": 3.0364115238189697, + "learning_rate": 1.528674893758473e-05, + "loss": 1.1897, + "step": 5471 + }, + { + "epoch": 0.343093610884695, + "grad_norm": 3.1540451049804688, + "learning_rate": 1.5285025066147126e-05, + "loss": 1.2605, + "step": 5472 + }, + { + "epoch": 0.3431563107404853, + "grad_norm": 2.978590965270996, + "learning_rate": 1.5283300976754864e-05, + "loss": 1.2611, + "step": 5473 + }, + { + "epoch": 0.34321901059627563, + "grad_norm": 3.088132381439209, + "learning_rate": 1.5281576669479036e-05, + "loss": 1.1446, + "step": 5474 + }, + { + "epoch": 0.34328171045206596, + "grad_norm": 2.945028066635132, + "learning_rate": 1.5279852144390757e-05, + "loss": 1.1704, + "step": 5475 + }, + { + "epoch": 0.3433444103078563, + "grad_norm": 2.994425058364868, + "learning_rate": 1.5278127401561145e-05, + "loss": 1.198, + "step": 5476 + }, + { + "epoch": 0.3434071101636466, + "grad_norm": 3.354137659072876, + "learning_rate": 1.527640244106133e-05, + "loss": 0.99, + "step": 5477 + }, + { + "epoch": 0.34346981001943694, + "grad_norm": 2.8628742694854736, + "learning_rate": 1.5274677262962444e-05, + "loss": 1.1419, + "step": 5478 + }, + { + "epoch": 0.34353250987522727, + "grad_norm": 3.253660202026367, + "learning_rate": 1.527295186733564e-05, + "loss": 1.1191, + "step": 5479 + }, + { + "epoch": 0.3435952097310176, + "grad_norm": 2.8102259635925293, + "learning_rate": 1.527122625425207e-05, + "loss": 1.1357, + "step": 5480 + }, + { + "epoch": 0.343657909586808, + "grad_norm": 3.201334238052368, + "learning_rate": 1.5269500423782896e-05, + "loss": 1.1984, + "step": 5481 + }, + { + "epoch": 0.3437206094425983, + "grad_norm": 3.2804129123687744, + "learning_rate": 1.5267774375999303e-05, + "loss": 1.093, + "step": 5482 + }, + { + "epoch": 0.34378330929838863, + "grad_norm": 2.9778287410736084, + "learning_rate": 1.5266048110972457e-05, + "loss": 1.0194, + "step": 5483 + }, + { + "epoch": 0.34384600915417896, + "grad_norm": 3.058475971221924, + "learning_rate": 1.526432162877356e-05, + "loss": 1.1288, + "step": 5484 + }, + { + "epoch": 0.3439087090099693, + "grad_norm": 2.7978527545928955, + "learning_rate": 1.526259492947381e-05, + "loss": 1.3858, + "step": 5485 + }, + { + "epoch": 0.3439714088657596, + "grad_norm": 2.9963185787200928, + "learning_rate": 1.526086801314441e-05, + "loss": 0.9408, + "step": 5486 + }, + { + "epoch": 0.34403410872154994, + "grad_norm": 3.001991033554077, + "learning_rate": 1.5259140879856586e-05, + "loss": 1.2656, + "step": 5487 + }, + { + "epoch": 0.34409680857734026, + "grad_norm": 3.107447862625122, + "learning_rate": 1.5257413529681567e-05, + "loss": 1.095, + "step": 5488 + }, + { + "epoch": 0.3441595084331306, + "grad_norm": 2.7788569927215576, + "learning_rate": 1.5255685962690581e-05, + "loss": 1.0666, + "step": 5489 + }, + { + "epoch": 0.3442222082889209, + "grad_norm": 2.935478925704956, + "learning_rate": 1.5253958178954875e-05, + "loss": 1.2384, + "step": 5490 + }, + { + "epoch": 0.34428490814471124, + "grad_norm": 2.7039003372192383, + "learning_rate": 1.5252230178545704e-05, + "loss": 1.1482, + "step": 5491 + }, + { + "epoch": 0.3443476080005016, + "grad_norm": 3.3224358558654785, + "learning_rate": 1.5250501961534334e-05, + "loss": 1.0595, + "step": 5492 + }, + { + "epoch": 0.34441030785629195, + "grad_norm": 2.8174819946289062, + "learning_rate": 1.5248773527992033e-05, + "loss": 1.2471, + "step": 5493 + }, + { + "epoch": 0.3444730077120823, + "grad_norm": 3.1239049434661865, + "learning_rate": 1.524704487799008e-05, + "loss": 1.101, + "step": 5494 + }, + { + "epoch": 0.3445357075678726, + "grad_norm": 2.847879409790039, + "learning_rate": 1.524531601159977e-05, + "loss": 0.953, + "step": 5495 + }, + { + "epoch": 0.34459840742366293, + "grad_norm": 3.1621363162994385, + "learning_rate": 1.5243586928892398e-05, + "loss": 1.1986, + "step": 5496 + }, + { + "epoch": 0.34466110727945326, + "grad_norm": 3.3301010131835938, + "learning_rate": 1.5241857629939272e-05, + "loss": 1.029, + "step": 5497 + }, + { + "epoch": 0.3447238071352436, + "grad_norm": 3.1968061923980713, + "learning_rate": 1.5240128114811708e-05, + "loss": 1.1004, + "step": 5498 + }, + { + "epoch": 0.3447865069910339, + "grad_norm": 2.6515085697174072, + "learning_rate": 1.5238398383581033e-05, + "loss": 1.1666, + "step": 5499 + }, + { + "epoch": 0.34484920684682424, + "grad_norm": 3.0800390243530273, + "learning_rate": 1.5236668436318577e-05, + "loss": 1.1945, + "step": 5500 + }, + { + "epoch": 0.34491190670261457, + "grad_norm": 2.906259298324585, + "learning_rate": 1.5234938273095687e-05, + "loss": 1.0261, + "step": 5501 + }, + { + "epoch": 0.3449746065584049, + "grad_norm": 2.700643539428711, + "learning_rate": 1.5233207893983717e-05, + "loss": 1.295, + "step": 5502 + }, + { + "epoch": 0.3450373064141952, + "grad_norm": 2.925936222076416, + "learning_rate": 1.523147729905402e-05, + "loss": 1.0926, + "step": 5503 + }, + { + "epoch": 0.3451000062699856, + "grad_norm": 3.1306118965148926, + "learning_rate": 1.5229746488377974e-05, + "loss": 1.2933, + "step": 5504 + }, + { + "epoch": 0.34516270612577593, + "grad_norm": 3.1517493724823, + "learning_rate": 1.5228015462026955e-05, + "loss": 1.3539, + "step": 5505 + }, + { + "epoch": 0.34522540598156626, + "grad_norm": 2.8510193824768066, + "learning_rate": 1.522628422007235e-05, + "loss": 1.3175, + "step": 5506 + }, + { + "epoch": 0.3452881058373566, + "grad_norm": 3.0837395191192627, + "learning_rate": 1.5224552762585552e-05, + "loss": 1.153, + "step": 5507 + }, + { + "epoch": 0.3453508056931469, + "grad_norm": 3.891629457473755, + "learning_rate": 1.5222821089637973e-05, + "loss": 1.0874, + "step": 5508 + }, + { + "epoch": 0.34541350554893724, + "grad_norm": 2.8512842655181885, + "learning_rate": 1.5221089201301023e-05, + "loss": 1.1071, + "step": 5509 + }, + { + "epoch": 0.34547620540472757, + "grad_norm": 2.8959591388702393, + "learning_rate": 1.521935709764613e-05, + "loss": 1.1917, + "step": 5510 + }, + { + "epoch": 0.3455389052605179, + "grad_norm": 2.822803020477295, + "learning_rate": 1.5217624778744718e-05, + "loss": 1.3259, + "step": 5511 + }, + { + "epoch": 0.3456016051163082, + "grad_norm": 2.8762478828430176, + "learning_rate": 1.521589224466823e-05, + "loss": 1.1896, + "step": 5512 + }, + { + "epoch": 0.34566430497209855, + "grad_norm": 2.6664164066314697, + "learning_rate": 1.5214159495488123e-05, + "loss": 1.0812, + "step": 5513 + }, + { + "epoch": 0.3457270048278889, + "grad_norm": 2.7919955253601074, + "learning_rate": 1.521242653127585e-05, + "loss": 1.2615, + "step": 5514 + }, + { + "epoch": 0.3457897046836792, + "grad_norm": 3.1956331729888916, + "learning_rate": 1.521069335210288e-05, + "loss": 1.0935, + "step": 5515 + }, + { + "epoch": 0.3458524045394696, + "grad_norm": 3.048168420791626, + "learning_rate": 1.5208959958040685e-05, + "loss": 1.1291, + "step": 5516 + }, + { + "epoch": 0.3459151043952599, + "grad_norm": 3.224613904953003, + "learning_rate": 1.5207226349160757e-05, + "loss": 1.0412, + "step": 5517 + }, + { + "epoch": 0.34597780425105024, + "grad_norm": 2.9284706115722656, + "learning_rate": 1.5205492525534586e-05, + "loss": 1.1196, + "step": 5518 + }, + { + "epoch": 0.34604050410684056, + "grad_norm": 2.6007142066955566, + "learning_rate": 1.5203758487233677e-05, + "loss": 1.1717, + "step": 5519 + }, + { + "epoch": 0.3461032039626309, + "grad_norm": 2.94228196144104, + "learning_rate": 1.5202024234329537e-05, + "loss": 1.0847, + "step": 5520 + }, + { + "epoch": 0.3461659038184212, + "grad_norm": 2.9413154125213623, + "learning_rate": 1.5200289766893694e-05, + "loss": 1.1507, + "step": 5521 + }, + { + "epoch": 0.34622860367421154, + "grad_norm": 2.9887638092041016, + "learning_rate": 1.5198555084997673e-05, + "loss": 0.9222, + "step": 5522 + }, + { + "epoch": 0.34629130353000187, + "grad_norm": 3.2293243408203125, + "learning_rate": 1.5196820188713011e-05, + "loss": 1.197, + "step": 5523 + }, + { + "epoch": 0.3463540033857922, + "grad_norm": 3.0155839920043945, + "learning_rate": 1.5195085078111263e-05, + "loss": 1.286, + "step": 5524 + }, + { + "epoch": 0.3464167032415825, + "grad_norm": 3.2577531337738037, + "learning_rate": 1.5193349753263972e-05, + "loss": 1.1578, + "step": 5525 + }, + { + "epoch": 0.34647940309737285, + "grad_norm": 2.954712390899658, + "learning_rate": 1.5191614214242715e-05, + "loss": 1.1092, + "step": 5526 + }, + { + "epoch": 0.34654210295316323, + "grad_norm": 2.911461591720581, + "learning_rate": 1.5189878461119061e-05, + "loss": 0.9131, + "step": 5527 + }, + { + "epoch": 0.34660480280895356, + "grad_norm": 3.1550567150115967, + "learning_rate": 1.5188142493964595e-05, + "loss": 1.0921, + "step": 5528 + }, + { + "epoch": 0.3466675026647439, + "grad_norm": 2.898050308227539, + "learning_rate": 1.5186406312850901e-05, + "loss": 1.0985, + "step": 5529 + }, + { + "epoch": 0.3467302025205342, + "grad_norm": 3.582439422607422, + "learning_rate": 1.5184669917849588e-05, + "loss": 1.1992, + "step": 5530 + }, + { + "epoch": 0.34679290237632454, + "grad_norm": 2.687420606613159, + "learning_rate": 1.5182933309032258e-05, + "loss": 0.9455, + "step": 5531 + }, + { + "epoch": 0.34685560223211487, + "grad_norm": 3.0384223461151123, + "learning_rate": 1.5181196486470532e-05, + "loss": 1.1419, + "step": 5532 + }, + { + "epoch": 0.3469183020879052, + "grad_norm": 2.7120347023010254, + "learning_rate": 1.517945945023604e-05, + "loss": 1.1421, + "step": 5533 + }, + { + "epoch": 0.3469810019436955, + "grad_norm": 3.244953155517578, + "learning_rate": 1.5177722200400413e-05, + "loss": 1.3787, + "step": 5534 + }, + { + "epoch": 0.34704370179948585, + "grad_norm": 3.2291600704193115, + "learning_rate": 1.5175984737035293e-05, + "loss": 1.1474, + "step": 5535 + }, + { + "epoch": 0.3471064016552762, + "grad_norm": 3.328519821166992, + "learning_rate": 1.5174247060212341e-05, + "loss": 1.2406, + "step": 5536 + }, + { + "epoch": 0.3471691015110665, + "grad_norm": 2.9776008129119873, + "learning_rate": 1.5172509170003213e-05, + "loss": 1.0942, + "step": 5537 + }, + { + "epoch": 0.34723180136685683, + "grad_norm": 2.9064996242523193, + "learning_rate": 1.5170771066479579e-05, + "loss": 1.0792, + "step": 5538 + }, + { + "epoch": 0.3472945012226472, + "grad_norm": 2.9740047454833984, + "learning_rate": 1.5169032749713121e-05, + "loss": 1.0559, + "step": 5539 + }, + { + "epoch": 0.34735720107843754, + "grad_norm": 3.011836528778076, + "learning_rate": 1.5167294219775527e-05, + "loss": 1.1296, + "step": 5540 + }, + { + "epoch": 0.34741990093422787, + "grad_norm": 2.8367393016815186, + "learning_rate": 1.5165555476738496e-05, + "loss": 1.1299, + "step": 5541 + }, + { + "epoch": 0.3474826007900182, + "grad_norm": 2.8423984050750732, + "learning_rate": 1.5163816520673729e-05, + "loss": 1.1539, + "step": 5542 + }, + { + "epoch": 0.3475453006458085, + "grad_norm": 2.9408466815948486, + "learning_rate": 1.5162077351652942e-05, + "loss": 0.8393, + "step": 5543 + }, + { + "epoch": 0.34760800050159885, + "grad_norm": 2.9924161434173584, + "learning_rate": 1.5160337969747863e-05, + "loss": 1.0692, + "step": 5544 + }, + { + "epoch": 0.3476707003573892, + "grad_norm": 2.682366371154785, + "learning_rate": 1.5158598375030218e-05, + "loss": 1.1602, + "step": 5545 + }, + { + "epoch": 0.3477334002131795, + "grad_norm": 2.916747570037842, + "learning_rate": 1.5156858567571752e-05, + "loss": 1.3022, + "step": 5546 + }, + { + "epoch": 0.3477961000689698, + "grad_norm": 2.813255548477173, + "learning_rate": 1.5155118547444215e-05, + "loss": 1.0526, + "step": 5547 + }, + { + "epoch": 0.34785879992476015, + "grad_norm": 2.989656448364258, + "learning_rate": 1.5153378314719362e-05, + "loss": 1.2081, + "step": 5548 + }, + { + "epoch": 0.3479214997805505, + "grad_norm": 3.0738985538482666, + "learning_rate": 1.515163786946896e-05, + "loss": 1.0669, + "step": 5549 + }, + { + "epoch": 0.34798419963634086, + "grad_norm": 3.096743583679199, + "learning_rate": 1.5149897211764792e-05, + "loss": 0.9648, + "step": 5550 + }, + { + "epoch": 0.3480468994921312, + "grad_norm": 3.1560027599334717, + "learning_rate": 1.5148156341678637e-05, + "loss": 1.1626, + "step": 5551 + }, + { + "epoch": 0.3481095993479215, + "grad_norm": 2.6993303298950195, + "learning_rate": 1.5146415259282288e-05, + "loss": 1.1771, + "step": 5552 + }, + { + "epoch": 0.34817229920371184, + "grad_norm": 2.9574968814849854, + "learning_rate": 1.514467396464755e-05, + "loss": 1.1853, + "step": 5553 + }, + { + "epoch": 0.34823499905950217, + "grad_norm": 3.1458373069763184, + "learning_rate": 1.514293245784623e-05, + "loss": 1.2244, + "step": 5554 + }, + { + "epoch": 0.3482976989152925, + "grad_norm": 3.0254013538360596, + "learning_rate": 1.5141190738950155e-05, + "loss": 0.891, + "step": 5555 + }, + { + "epoch": 0.3483603987710828, + "grad_norm": 3.0577878952026367, + "learning_rate": 1.5139448808031148e-05, + "loss": 1.1749, + "step": 5556 + }, + { + "epoch": 0.34842309862687315, + "grad_norm": 2.8602797985076904, + "learning_rate": 1.5137706665161047e-05, + "loss": 1.1289, + "step": 5557 + }, + { + "epoch": 0.3484857984826635, + "grad_norm": 3.1407549381256104, + "learning_rate": 1.5135964310411699e-05, + "loss": 0.9598, + "step": 5558 + }, + { + "epoch": 0.3485484983384538, + "grad_norm": 2.7908589839935303, + "learning_rate": 1.513422174385496e-05, + "loss": 1.1601, + "step": 5559 + }, + { + "epoch": 0.34861119819424413, + "grad_norm": 3.0119001865386963, + "learning_rate": 1.5132478965562692e-05, + "loss": 1.1597, + "step": 5560 + }, + { + "epoch": 0.34867389805003446, + "grad_norm": 2.636284112930298, + "learning_rate": 1.5130735975606765e-05, + "loss": 1.0133, + "step": 5561 + }, + { + "epoch": 0.34873659790582484, + "grad_norm": 2.9317073822021484, + "learning_rate": 1.5128992774059063e-05, + "loss": 1.2575, + "step": 5562 + }, + { + "epoch": 0.34879929776161517, + "grad_norm": 2.840566873550415, + "learning_rate": 1.5127249360991477e-05, + "loss": 1.0727, + "step": 5563 + }, + { + "epoch": 0.3488619976174055, + "grad_norm": 2.942458391189575, + "learning_rate": 1.5125505736475901e-05, + "loss": 1.1405, + "step": 5564 + }, + { + "epoch": 0.3489246974731958, + "grad_norm": 3.3334085941314697, + "learning_rate": 1.5123761900584245e-05, + "loss": 1.1423, + "step": 5565 + }, + { + "epoch": 0.34898739732898615, + "grad_norm": 2.666051149368286, + "learning_rate": 1.5122017853388427e-05, + "loss": 1.0113, + "step": 5566 + }, + { + "epoch": 0.3490500971847765, + "grad_norm": 2.9128782749176025, + "learning_rate": 1.5120273594960365e-05, + "loss": 1.0561, + "step": 5567 + }, + { + "epoch": 0.3491127970405668, + "grad_norm": 3.0581889152526855, + "learning_rate": 1.5118529125372e-05, + "loss": 1.0361, + "step": 5568 + }, + { + "epoch": 0.34917549689635713, + "grad_norm": 3.1065640449523926, + "learning_rate": 1.5116784444695267e-05, + "loss": 1.1079, + "step": 5569 + }, + { + "epoch": 0.34923819675214746, + "grad_norm": 3.1801862716674805, + "learning_rate": 1.5115039553002123e-05, + "loss": 1.2513, + "step": 5570 + }, + { + "epoch": 0.3493008966079378, + "grad_norm": 2.9606597423553467, + "learning_rate": 1.5113294450364521e-05, + "loss": 1.2171, + "step": 5571 + }, + { + "epoch": 0.3493635964637281, + "grad_norm": 3.0974280834198, + "learning_rate": 1.5111549136854437e-05, + "loss": 1.3181, + "step": 5572 + }, + { + "epoch": 0.3494262963195185, + "grad_norm": 3.3936593532562256, + "learning_rate": 1.5109803612543841e-05, + "loss": 1.3184, + "step": 5573 + }, + { + "epoch": 0.3494889961753088, + "grad_norm": 3.2795588970184326, + "learning_rate": 1.510805787750472e-05, + "loss": 0.8103, + "step": 5574 + }, + { + "epoch": 0.34955169603109915, + "grad_norm": 3.3971362113952637, + "learning_rate": 1.510631193180907e-05, + "loss": 1.1391, + "step": 5575 + }, + { + "epoch": 0.3496143958868895, + "grad_norm": 2.767221212387085, + "learning_rate": 1.5104565775528894e-05, + "loss": 1.1991, + "step": 5576 + }, + { + "epoch": 0.3496770957426798, + "grad_norm": 2.8117220401763916, + "learning_rate": 1.5102819408736202e-05, + "loss": 1.1059, + "step": 5577 + }, + { + "epoch": 0.3497397955984701, + "grad_norm": 2.9934678077697754, + "learning_rate": 1.5101072831503013e-05, + "loss": 1.1175, + "step": 5578 + }, + { + "epoch": 0.34980249545426045, + "grad_norm": 2.7835679054260254, + "learning_rate": 1.5099326043901361e-05, + "loss": 1.2343, + "step": 5579 + }, + { + "epoch": 0.3498651953100508, + "grad_norm": 2.944068431854248, + "learning_rate": 1.5097579046003276e-05, + "loss": 1.2287, + "step": 5580 + }, + { + "epoch": 0.3499278951658411, + "grad_norm": 3.082746982574463, + "learning_rate": 1.5095831837880814e-05, + "loss": 1.1876, + "step": 5581 + }, + { + "epoch": 0.34999059502163143, + "grad_norm": 3.116727828979492, + "learning_rate": 1.509408441960602e-05, + "loss": 1.1294, + "step": 5582 + }, + { + "epoch": 0.35005329487742176, + "grad_norm": 3.3361856937408447, + "learning_rate": 1.5092336791250964e-05, + "loss": 1.15, + "step": 5583 + }, + { + "epoch": 0.3501159947332121, + "grad_norm": 2.9702370166778564, + "learning_rate": 1.5090588952887715e-05, + "loss": 1.2752, + "step": 5584 + }, + { + "epoch": 0.35017869458900247, + "grad_norm": 3.037398338317871, + "learning_rate": 1.5088840904588356e-05, + "loss": 1.2503, + "step": 5585 + }, + { + "epoch": 0.3502413944447928, + "grad_norm": 2.7378761768341064, + "learning_rate": 1.5087092646424977e-05, + "loss": 1.145, + "step": 5586 + }, + { + "epoch": 0.3503040943005831, + "grad_norm": 3.4288809299468994, + "learning_rate": 1.5085344178469672e-05, + "loss": 1.2492, + "step": 5587 + }, + { + "epoch": 0.35036679415637345, + "grad_norm": 2.6684634685516357, + "learning_rate": 1.5083595500794554e-05, + "loss": 1.1468, + "step": 5588 + }, + { + "epoch": 0.3504294940121638, + "grad_norm": 3.0269248485565186, + "learning_rate": 1.5081846613471736e-05, + "loss": 1.0657, + "step": 5589 + }, + { + "epoch": 0.3504921938679541, + "grad_norm": 3.2222399711608887, + "learning_rate": 1.508009751657334e-05, + "loss": 1.1714, + "step": 5590 + }, + { + "epoch": 0.35055489372374443, + "grad_norm": 2.787775993347168, + "learning_rate": 1.5078348210171499e-05, + "loss": 1.0849, + "step": 5591 + }, + { + "epoch": 0.35061759357953476, + "grad_norm": 2.6167335510253906, + "learning_rate": 1.507659869433836e-05, + "loss": 1.1706, + "step": 5592 + }, + { + "epoch": 0.3506802934353251, + "grad_norm": 3.196805238723755, + "learning_rate": 1.5074848969146065e-05, + "loss": 1.0364, + "step": 5593 + }, + { + "epoch": 0.3507429932911154, + "grad_norm": 3.026745557785034, + "learning_rate": 1.5073099034666779e-05, + "loss": 1.1716, + "step": 5594 + }, + { + "epoch": 0.35080569314690574, + "grad_norm": 3.0881874561309814, + "learning_rate": 1.5071348890972668e-05, + "loss": 1.0103, + "step": 5595 + }, + { + "epoch": 0.35086839300269607, + "grad_norm": 2.9193291664123535, + "learning_rate": 1.5069598538135905e-05, + "loss": 1.1806, + "step": 5596 + }, + { + "epoch": 0.35093109285848645, + "grad_norm": 2.982534646987915, + "learning_rate": 1.506784797622868e-05, + "loss": 1.2992, + "step": 5597 + }, + { + "epoch": 0.3509937927142768, + "grad_norm": 3.1698532104492188, + "learning_rate": 1.506609720532318e-05, + "loss": 1.0152, + "step": 5598 + }, + { + "epoch": 0.3510564925700671, + "grad_norm": 3.1596457958221436, + "learning_rate": 1.5064346225491612e-05, + "loss": 1.1034, + "step": 5599 + }, + { + "epoch": 0.35111919242585743, + "grad_norm": 3.009389877319336, + "learning_rate": 1.5062595036806182e-05, + "loss": 1.2373, + "step": 5600 + }, + { + "epoch": 0.35118189228164776, + "grad_norm": 3.0173754692077637, + "learning_rate": 1.5060843639339116e-05, + "loss": 1.0281, + "step": 5601 + }, + { + "epoch": 0.3512445921374381, + "grad_norm": 3.0751147270202637, + "learning_rate": 1.5059092033162635e-05, + "loss": 1.1999, + "step": 5602 + }, + { + "epoch": 0.3513072919932284, + "grad_norm": 3.2123289108276367, + "learning_rate": 1.505734021834898e-05, + "loss": 1.2059, + "step": 5603 + }, + { + "epoch": 0.35136999184901874, + "grad_norm": 3.019566774368286, + "learning_rate": 1.505558819497039e-05, + "loss": 1.0078, + "step": 5604 + }, + { + "epoch": 0.35143269170480906, + "grad_norm": 2.8787643909454346, + "learning_rate": 1.505383596309912e-05, + "loss": 1.0793, + "step": 5605 + }, + { + "epoch": 0.3514953915605994, + "grad_norm": 3.1912949085235596, + "learning_rate": 1.5052083522807439e-05, + "loss": 1.0324, + "step": 5606 + }, + { + "epoch": 0.3515580914163897, + "grad_norm": 2.926017999649048, + "learning_rate": 1.5050330874167611e-05, + "loss": 0.9049, + "step": 5607 + }, + { + "epoch": 0.3516207912721801, + "grad_norm": 3.270162343978882, + "learning_rate": 1.5048578017251918e-05, + "loss": 0.9704, + "step": 5608 + }, + { + "epoch": 0.3516834911279704, + "grad_norm": 2.6691551208496094, + "learning_rate": 1.5046824952132647e-05, + "loss": 1.1024, + "step": 5609 + }, + { + "epoch": 0.35174619098376075, + "grad_norm": 2.994656801223755, + "learning_rate": 1.5045071678882094e-05, + "loss": 1.1258, + "step": 5610 + }, + { + "epoch": 0.3518088908395511, + "grad_norm": 3.165102958679199, + "learning_rate": 1.5043318197572566e-05, + "loss": 1.0466, + "step": 5611 + }, + { + "epoch": 0.3518715906953414, + "grad_norm": 3.4758665561676025, + "learning_rate": 1.5041564508276372e-05, + "loss": 1.1543, + "step": 5612 + }, + { + "epoch": 0.35193429055113173, + "grad_norm": 3.1655218601226807, + "learning_rate": 1.503981061106584e-05, + "loss": 1.1113, + "step": 5613 + }, + { + "epoch": 0.35199699040692206, + "grad_norm": 2.800495147705078, + "learning_rate": 1.5038056506013297e-05, + "loss": 1.1571, + "step": 5614 + }, + { + "epoch": 0.3520596902627124, + "grad_norm": 3.3455519676208496, + "learning_rate": 1.5036302193191086e-05, + "loss": 1.1106, + "step": 5615 + }, + { + "epoch": 0.3521223901185027, + "grad_norm": 3.192758560180664, + "learning_rate": 1.5034547672671549e-05, + "loss": 1.2065, + "step": 5616 + }, + { + "epoch": 0.35218508997429304, + "grad_norm": 2.9734315872192383, + "learning_rate": 1.503279294452705e-05, + "loss": 1.1584, + "step": 5617 + }, + { + "epoch": 0.35224778983008337, + "grad_norm": 3.087184429168701, + "learning_rate": 1.5031038008829945e-05, + "loss": 1.0105, + "step": 5618 + }, + { + "epoch": 0.3523104896858737, + "grad_norm": 3.0222065448760986, + "learning_rate": 1.502928286565262e-05, + "loss": 1.1563, + "step": 5619 + }, + { + "epoch": 0.3523731895416641, + "grad_norm": 2.852092981338501, + "learning_rate": 1.5027527515067446e-05, + "loss": 1.1078, + "step": 5620 + }, + { + "epoch": 0.3524358893974544, + "grad_norm": 2.7641870975494385, + "learning_rate": 1.5025771957146817e-05, + "loss": 1.1179, + "step": 5621 + }, + { + "epoch": 0.35249858925324473, + "grad_norm": 2.7199463844299316, + "learning_rate": 1.5024016191963134e-05, + "loss": 1.03, + "step": 5622 + }, + { + "epoch": 0.35256128910903506, + "grad_norm": 3.0047614574432373, + "learning_rate": 1.5022260219588805e-05, + "loss": 1.1975, + "step": 5623 + }, + { + "epoch": 0.3526239889648254, + "grad_norm": 2.92978572845459, + "learning_rate": 1.5020504040096241e-05, + "loss": 1.1829, + "step": 5624 + }, + { + "epoch": 0.3526866888206157, + "grad_norm": 2.9675912857055664, + "learning_rate": 1.5018747653557875e-05, + "loss": 1.1365, + "step": 5625 + }, + { + "epoch": 0.35274938867640604, + "grad_norm": 2.6356639862060547, + "learning_rate": 1.5016991060046136e-05, + "loss": 1.1666, + "step": 5626 + }, + { + "epoch": 0.35281208853219637, + "grad_norm": 3.042553424835205, + "learning_rate": 1.5015234259633467e-05, + "loss": 1.1129, + "step": 5627 + }, + { + "epoch": 0.3528747883879867, + "grad_norm": 2.7790863513946533, + "learning_rate": 1.5013477252392318e-05, + "loss": 1.1272, + "step": 5628 + }, + { + "epoch": 0.352937488243777, + "grad_norm": 3.404778003692627, + "learning_rate": 1.5011720038395145e-05, + "loss": 1.2429, + "step": 5629 + }, + { + "epoch": 0.35300018809956735, + "grad_norm": 3.1009714603424072, + "learning_rate": 1.5009962617714425e-05, + "loss": 1.2052, + "step": 5630 + }, + { + "epoch": 0.35306288795535773, + "grad_norm": 2.964895009994507, + "learning_rate": 1.5008204990422624e-05, + "loss": 1.0793, + "step": 5631 + }, + { + "epoch": 0.35312558781114806, + "grad_norm": 2.8724863529205322, + "learning_rate": 1.500644715659223e-05, + "loss": 1.1282, + "step": 5632 + }, + { + "epoch": 0.3531882876669384, + "grad_norm": 2.8127870559692383, + "learning_rate": 1.5004689116295739e-05, + "loss": 1.018, + "step": 5633 + }, + { + "epoch": 0.3532509875227287, + "grad_norm": 2.5572052001953125, + "learning_rate": 1.5002930869605647e-05, + "loss": 1.1433, + "step": 5634 + }, + { + "epoch": 0.35331368737851904, + "grad_norm": 2.927907705307007, + "learning_rate": 1.500117241659447e-05, + "loss": 1.1729, + "step": 5635 + }, + { + "epoch": 0.35337638723430936, + "grad_norm": 3.0798797607421875, + "learning_rate": 1.4999413757334723e-05, + "loss": 1.2026, + "step": 5636 + }, + { + "epoch": 0.3534390870900997, + "grad_norm": 3.0816597938537598, + "learning_rate": 1.4997654891898934e-05, + "loss": 1.0317, + "step": 5637 + }, + { + "epoch": 0.35350178694589, + "grad_norm": 2.8269331455230713, + "learning_rate": 1.4995895820359639e-05, + "loss": 1.0932, + "step": 5638 + }, + { + "epoch": 0.35356448680168034, + "grad_norm": 2.7796216011047363, + "learning_rate": 1.4994136542789383e-05, + "loss": 1.2751, + "step": 5639 + }, + { + "epoch": 0.35362718665747067, + "grad_norm": 2.7570722103118896, + "learning_rate": 1.4992377059260718e-05, + "loss": 1.1974, + "step": 5640 + }, + { + "epoch": 0.353689886513261, + "grad_norm": 2.8220646381378174, + "learning_rate": 1.4990617369846206e-05, + "loss": 0.932, + "step": 5641 + }, + { + "epoch": 0.3537525863690513, + "grad_norm": 2.911250352859497, + "learning_rate": 1.4988857474618412e-05, + "loss": 1.1734, + "step": 5642 + }, + { + "epoch": 0.3538152862248417, + "grad_norm": 3.2236928939819336, + "learning_rate": 1.4987097373649922e-05, + "loss": 0.9653, + "step": 5643 + }, + { + "epoch": 0.35387798608063203, + "grad_norm": 3.043570041656494, + "learning_rate": 1.4985337067013316e-05, + "loss": 1.2413, + "step": 5644 + }, + { + "epoch": 0.35394068593642236, + "grad_norm": 2.843655824661255, + "learning_rate": 1.4983576554781193e-05, + "loss": 1.1253, + "step": 5645 + }, + { + "epoch": 0.3540033857922127, + "grad_norm": 2.9685888290405273, + "learning_rate": 1.4981815837026155e-05, + "loss": 0.9827, + "step": 5646 + }, + { + "epoch": 0.354066085648003, + "grad_norm": 3.1678595542907715, + "learning_rate": 1.4980054913820814e-05, + "loss": 1.0015, + "step": 5647 + }, + { + "epoch": 0.35412878550379334, + "grad_norm": 2.7862465381622314, + "learning_rate": 1.4978293785237792e-05, + "loss": 1.1292, + "step": 5648 + }, + { + "epoch": 0.35419148535958367, + "grad_norm": 3.828692674636841, + "learning_rate": 1.4976532451349714e-05, + "loss": 1.1226, + "step": 5649 + }, + { + "epoch": 0.354254185215374, + "grad_norm": 3.2210516929626465, + "learning_rate": 1.4974770912229225e-05, + "loss": 1.2031, + "step": 5650 + }, + { + "epoch": 0.3543168850711643, + "grad_norm": 3.0762929916381836, + "learning_rate": 1.4973009167948963e-05, + "loss": 1.3361, + "step": 5651 + }, + { + "epoch": 0.35437958492695465, + "grad_norm": 3.327320098876953, + "learning_rate": 1.4971247218581585e-05, + "loss": 0.974, + "step": 5652 + }, + { + "epoch": 0.354442284782745, + "grad_norm": 3.098877191543579, + "learning_rate": 1.4969485064199756e-05, + "loss": 1.1889, + "step": 5653 + }, + { + "epoch": 0.35450498463853536, + "grad_norm": 3.1838831901550293, + "learning_rate": 1.4967722704876147e-05, + "loss": 1.0908, + "step": 5654 + }, + { + "epoch": 0.3545676844943257, + "grad_norm": 3.266233205795288, + "learning_rate": 1.4965960140683434e-05, + "loss": 1.3854, + "step": 5655 + }, + { + "epoch": 0.354630384350116, + "grad_norm": 2.6408212184906006, + "learning_rate": 1.4964197371694308e-05, + "loss": 0.9428, + "step": 5656 + }, + { + "epoch": 0.35469308420590634, + "grad_norm": 3.014207363128662, + "learning_rate": 1.4962434397981468e-05, + "loss": 1.1893, + "step": 5657 + }, + { + "epoch": 0.35475578406169667, + "grad_norm": 3.1495141983032227, + "learning_rate": 1.4960671219617618e-05, + "loss": 1.2395, + "step": 5658 + }, + { + "epoch": 0.354818483917487, + "grad_norm": 3.247401714324951, + "learning_rate": 1.4958907836675467e-05, + "loss": 1.1044, + "step": 5659 + }, + { + "epoch": 0.3548811837732773, + "grad_norm": 2.969761371612549, + "learning_rate": 1.495714424922774e-05, + "loss": 1.0311, + "step": 5660 + }, + { + "epoch": 0.35494388362906765, + "grad_norm": 3.0937530994415283, + "learning_rate": 1.495538045734717e-05, + "loss": 1.1407, + "step": 5661 + }, + { + "epoch": 0.355006583484858, + "grad_norm": 2.8964242935180664, + "learning_rate": 1.4953616461106492e-05, + "loss": 1.1043, + "step": 5662 + }, + { + "epoch": 0.3550692833406483, + "grad_norm": 3.3024353981018066, + "learning_rate": 1.4951852260578457e-05, + "loss": 1.0667, + "step": 5663 + }, + { + "epoch": 0.3551319831964386, + "grad_norm": 2.9389395713806152, + "learning_rate": 1.4950087855835816e-05, + "loss": 1.1383, + "step": 5664 + }, + { + "epoch": 0.35519468305222895, + "grad_norm": 2.9514200687408447, + "learning_rate": 1.4948323246951334e-05, + "loss": 1.1976, + "step": 5665 + }, + { + "epoch": 0.35525738290801934, + "grad_norm": 3.0231571197509766, + "learning_rate": 1.4946558433997792e-05, + "loss": 1.2158, + "step": 5666 + }, + { + "epoch": 0.35532008276380966, + "grad_norm": 2.7867367267608643, + "learning_rate": 1.4944793417047958e-05, + "loss": 1.2004, + "step": 5667 + }, + { + "epoch": 0.3553827826196, + "grad_norm": 3.099719285964966, + "learning_rate": 1.4943028196174632e-05, + "loss": 1.0942, + "step": 5668 + }, + { + "epoch": 0.3554454824753903, + "grad_norm": 2.9091248512268066, + "learning_rate": 1.4941262771450601e-05, + "loss": 1.3057, + "step": 5669 + }, + { + "epoch": 0.35550818233118064, + "grad_norm": 3.1533453464508057, + "learning_rate": 1.4939497142948685e-05, + "loss": 1.1432, + "step": 5670 + }, + { + "epoch": 0.35557088218697097, + "grad_norm": 2.7979702949523926, + "learning_rate": 1.4937731310741686e-05, + "loss": 1.176, + "step": 5671 + }, + { + "epoch": 0.3556335820427613, + "grad_norm": 3.021263837814331, + "learning_rate": 1.4935965274902434e-05, + "loss": 1.2055, + "step": 5672 + }, + { + "epoch": 0.3556962818985516, + "grad_norm": 2.768697738647461, + "learning_rate": 1.4934199035503758e-05, + "loss": 1.2452, + "step": 5673 + }, + { + "epoch": 0.35575898175434195, + "grad_norm": 2.7167727947235107, + "learning_rate": 1.49324325926185e-05, + "loss": 1.1954, + "step": 5674 + }, + { + "epoch": 0.3558216816101323, + "grad_norm": 3.3795249462127686, + "learning_rate": 1.4930665946319503e-05, + "loss": 1.1036, + "step": 5675 + }, + { + "epoch": 0.3558843814659226, + "grad_norm": 3.1215760707855225, + "learning_rate": 1.4928899096679628e-05, + "loss": 1.251, + "step": 5676 + }, + { + "epoch": 0.35594708132171293, + "grad_norm": 3.1308810710906982, + "learning_rate": 1.492713204377174e-05, + "loss": 1.1969, + "step": 5677 + }, + { + "epoch": 0.3560097811775033, + "grad_norm": 3.0395984649658203, + "learning_rate": 1.492536478766871e-05, + "loss": 1.1586, + "step": 5678 + }, + { + "epoch": 0.35607248103329364, + "grad_norm": 2.7318124771118164, + "learning_rate": 1.4923597328443423e-05, + "loss": 1.1144, + "step": 5679 + }, + { + "epoch": 0.35613518088908397, + "grad_norm": 2.7952473163604736, + "learning_rate": 1.4921829666168766e-05, + "loss": 1.1639, + "step": 5680 + }, + { + "epoch": 0.3561978807448743, + "grad_norm": 2.9969701766967773, + "learning_rate": 1.4920061800917637e-05, + "loss": 1.0518, + "step": 5681 + }, + { + "epoch": 0.3562605806006646, + "grad_norm": 3.0089452266693115, + "learning_rate": 1.4918293732762948e-05, + "loss": 1.2772, + "step": 5682 + }, + { + "epoch": 0.35632328045645495, + "grad_norm": 3.1912124156951904, + "learning_rate": 1.4916525461777611e-05, + "loss": 1.2152, + "step": 5683 + }, + { + "epoch": 0.3563859803122453, + "grad_norm": 2.959021806716919, + "learning_rate": 1.4914756988034547e-05, + "loss": 1.1856, + "step": 5684 + }, + { + "epoch": 0.3564486801680356, + "grad_norm": 2.7662527561187744, + "learning_rate": 1.491298831160669e-05, + "loss": 0.9815, + "step": 5685 + }, + { + "epoch": 0.35651138002382593, + "grad_norm": 3.05879807472229, + "learning_rate": 1.491121943256698e-05, + "loss": 1.0038, + "step": 5686 + }, + { + "epoch": 0.35657407987961626, + "grad_norm": 3.162724494934082, + "learning_rate": 1.4909450350988368e-05, + "loss": 1.1704, + "step": 5687 + }, + { + "epoch": 0.3566367797354066, + "grad_norm": 3.112232208251953, + "learning_rate": 1.4907681066943808e-05, + "loss": 1.1565, + "step": 5688 + }, + { + "epoch": 0.35669947959119697, + "grad_norm": 2.8951473236083984, + "learning_rate": 1.4905911580506269e-05, + "loss": 1.0152, + "step": 5689 + }, + { + "epoch": 0.3567621794469873, + "grad_norm": 2.842991590499878, + "learning_rate": 1.4904141891748718e-05, + "loss": 1.1241, + "step": 5690 + }, + { + "epoch": 0.3568248793027776, + "grad_norm": 3.0487477779388428, + "learning_rate": 1.4902372000744144e-05, + "loss": 1.1411, + "step": 5691 + }, + { + "epoch": 0.35688757915856795, + "grad_norm": 2.988318681716919, + "learning_rate": 1.4900601907565535e-05, + "loss": 1.1136, + "step": 5692 + }, + { + "epoch": 0.3569502790143583, + "grad_norm": 2.906278610229492, + "learning_rate": 1.4898831612285887e-05, + "loss": 1.1882, + "step": 5693 + }, + { + "epoch": 0.3570129788701486, + "grad_norm": 2.849673271179199, + "learning_rate": 1.489706111497821e-05, + "loss": 1.1326, + "step": 5694 + }, + { + "epoch": 0.3570756787259389, + "grad_norm": 2.9416916370391846, + "learning_rate": 1.489529041571552e-05, + "loss": 1.1433, + "step": 5695 + }, + { + "epoch": 0.35713837858172925, + "grad_norm": 2.722468137741089, + "learning_rate": 1.489351951457084e-05, + "loss": 1.3362, + "step": 5696 + }, + { + "epoch": 0.3572010784375196, + "grad_norm": 3.242288589477539, + "learning_rate": 1.48917484116172e-05, + "loss": 1.0594, + "step": 5697 + }, + { + "epoch": 0.3572637782933099, + "grad_norm": 3.4381160736083984, + "learning_rate": 1.4889977106927642e-05, + "loss": 0.9771, + "step": 5698 + }, + { + "epoch": 0.35732647814910024, + "grad_norm": 2.993227243423462, + "learning_rate": 1.4888205600575214e-05, + "loss": 1.0355, + "step": 5699 + }, + { + "epoch": 0.35738917800489056, + "grad_norm": 2.8769516944885254, + "learning_rate": 1.4886433892632977e-05, + "loss": 1.094, + "step": 5700 + }, + { + "epoch": 0.35745187786068094, + "grad_norm": 2.838733196258545, + "learning_rate": 1.488466198317399e-05, + "loss": 1.0736, + "step": 5701 + }, + { + "epoch": 0.35751457771647127, + "grad_norm": 3.106626272201538, + "learning_rate": 1.4882889872271327e-05, + "loss": 1.1348, + "step": 5702 + }, + { + "epoch": 0.3575772775722616, + "grad_norm": 3.287414073944092, + "learning_rate": 1.4881117559998078e-05, + "loss": 1.0329, + "step": 5703 + }, + { + "epoch": 0.3576399774280519, + "grad_norm": 3.1213347911834717, + "learning_rate": 1.4879345046427322e-05, + "loss": 1.1007, + "step": 5704 + }, + { + "epoch": 0.35770267728384225, + "grad_norm": 2.871628999710083, + "learning_rate": 1.4877572331632169e-05, + "loss": 1.0747, + "step": 5705 + }, + { + "epoch": 0.3577653771396326, + "grad_norm": 3.003152370452881, + "learning_rate": 1.4875799415685715e-05, + "loss": 1.04, + "step": 5706 + }, + { + "epoch": 0.3578280769954229, + "grad_norm": 2.930497169494629, + "learning_rate": 1.4874026298661084e-05, + "loss": 1.151, + "step": 5707 + }, + { + "epoch": 0.35789077685121323, + "grad_norm": 3.3213999271392822, + "learning_rate": 1.4872252980631392e-05, + "loss": 1.1648, + "step": 5708 + }, + { + "epoch": 0.35795347670700356, + "grad_norm": 2.774994134902954, + "learning_rate": 1.4870479461669777e-05, + "loss": 1.0558, + "step": 5709 + }, + { + "epoch": 0.3580161765627939, + "grad_norm": 3.0590007305145264, + "learning_rate": 1.4868705741849376e-05, + "loss": 1.224, + "step": 5710 + }, + { + "epoch": 0.3580788764185842, + "grad_norm": 2.956541061401367, + "learning_rate": 1.4866931821243335e-05, + "loss": 1.1542, + "step": 5711 + }, + { + "epoch": 0.3581415762743746, + "grad_norm": 2.8257617950439453, + "learning_rate": 1.4865157699924817e-05, + "loss": 1.1658, + "step": 5712 + }, + { + "epoch": 0.3582042761301649, + "grad_norm": 3.0003445148468018, + "learning_rate": 1.486338337796698e-05, + "loss": 1.0963, + "step": 5713 + }, + { + "epoch": 0.35826697598595525, + "grad_norm": 2.873961925506592, + "learning_rate": 1.4861608855443002e-05, + "loss": 1.1544, + "step": 5714 + }, + { + "epoch": 0.3583296758417456, + "grad_norm": 3.0534069538116455, + "learning_rate": 1.485983413242606e-05, + "loss": 1.0164, + "step": 5715 + }, + { + "epoch": 0.3583923756975359, + "grad_norm": 3.057605743408203, + "learning_rate": 1.4858059208989351e-05, + "loss": 1.2777, + "step": 5716 + }, + { + "epoch": 0.35845507555332623, + "grad_norm": 3.308701276779175, + "learning_rate": 1.4856284085206063e-05, + "loss": 1.1632, + "step": 5717 + }, + { + "epoch": 0.35851777540911656, + "grad_norm": 3.306936264038086, + "learning_rate": 1.485450876114941e-05, + "loss": 1.1382, + "step": 5718 + }, + { + "epoch": 0.3585804752649069, + "grad_norm": 3.4744532108306885, + "learning_rate": 1.4852733236892602e-05, + "loss": 1.3012, + "step": 5719 + }, + { + "epoch": 0.3586431751206972, + "grad_norm": 2.8893914222717285, + "learning_rate": 1.4850957512508865e-05, + "loss": 1.138, + "step": 5720 + }, + { + "epoch": 0.35870587497648754, + "grad_norm": 3.3593997955322266, + "learning_rate": 1.484918158807143e-05, + "loss": 1.1083, + "step": 5721 + }, + { + "epoch": 0.35876857483227786, + "grad_norm": 3.0325331687927246, + "learning_rate": 1.484740546365353e-05, + "loss": 1.0881, + "step": 5722 + }, + { + "epoch": 0.3588312746880682, + "grad_norm": 2.8776607513427734, + "learning_rate": 1.4845629139328419e-05, + "loss": 1.0838, + "step": 5723 + }, + { + "epoch": 0.3588939745438586, + "grad_norm": 2.8659558296203613, + "learning_rate": 1.4843852615169352e-05, + "loss": 1.1202, + "step": 5724 + }, + { + "epoch": 0.3589566743996489, + "grad_norm": 3.0472071170806885, + "learning_rate": 1.484207589124959e-05, + "loss": 1.1494, + "step": 5725 + }, + { + "epoch": 0.3590193742554392, + "grad_norm": 2.933722734451294, + "learning_rate": 1.4840298967642408e-05, + "loss": 1.0782, + "step": 5726 + }, + { + "epoch": 0.35908207411122955, + "grad_norm": 3.1003592014312744, + "learning_rate": 1.4838521844421085e-05, + "loss": 1.0894, + "step": 5727 + }, + { + "epoch": 0.3591447739670199, + "grad_norm": 3.1869077682495117, + "learning_rate": 1.4836744521658908e-05, + "loss": 1.0604, + "step": 5728 + }, + { + "epoch": 0.3592074738228102, + "grad_norm": 3.156590700149536, + "learning_rate": 1.4834966999429179e-05, + "loss": 1.1209, + "step": 5729 + }, + { + "epoch": 0.35927017367860054, + "grad_norm": 3.1924781799316406, + "learning_rate": 1.4833189277805198e-05, + "loss": 1.1064, + "step": 5730 + }, + { + "epoch": 0.35933287353439086, + "grad_norm": 2.597198009490967, + "learning_rate": 1.4831411356860276e-05, + "loss": 1.0196, + "step": 5731 + }, + { + "epoch": 0.3593955733901812, + "grad_norm": 3.063662052154541, + "learning_rate": 1.4829633236667746e-05, + "loss": 1.1518, + "step": 5732 + }, + { + "epoch": 0.3594582732459715, + "grad_norm": 2.977048397064209, + "learning_rate": 1.4827854917300925e-05, + "loss": 1.0811, + "step": 5733 + }, + { + "epoch": 0.35952097310176184, + "grad_norm": 3.3563737869262695, + "learning_rate": 1.482607639883316e-05, + "loss": 1.2105, + "step": 5734 + }, + { + "epoch": 0.3595836729575522, + "grad_norm": 3.245161533355713, + "learning_rate": 1.4824297681337792e-05, + "loss": 1.1559, + "step": 5735 + }, + { + "epoch": 0.35964637281334255, + "grad_norm": 2.8907318115234375, + "learning_rate": 1.4822518764888177e-05, + "loss": 1.1488, + "step": 5736 + }, + { + "epoch": 0.3597090726691329, + "grad_norm": 3.251016855239868, + "learning_rate": 1.4820739649557677e-05, + "loss": 1.1337, + "step": 5737 + }, + { + "epoch": 0.3597717725249232, + "grad_norm": 2.6190185546875, + "learning_rate": 1.4818960335419666e-05, + "loss": 1.2877, + "step": 5738 + }, + { + "epoch": 0.35983447238071353, + "grad_norm": 2.910036325454712, + "learning_rate": 1.4817180822547516e-05, + "loss": 1.0777, + "step": 5739 + }, + { + "epoch": 0.35989717223650386, + "grad_norm": 3.2518107891082764, + "learning_rate": 1.4815401111014623e-05, + "loss": 1.0265, + "step": 5740 + }, + { + "epoch": 0.3599598720922942, + "grad_norm": 2.9666125774383545, + "learning_rate": 1.4813621200894373e-05, + "loss": 1.2441, + "step": 5741 + }, + { + "epoch": 0.3600225719480845, + "grad_norm": 3.0176656246185303, + "learning_rate": 1.4811841092260175e-05, + "loss": 1.1102, + "step": 5742 + }, + { + "epoch": 0.36008527180387484, + "grad_norm": 3.28926682472229, + "learning_rate": 1.4810060785185445e-05, + "loss": 1.1072, + "step": 5743 + }, + { + "epoch": 0.36014797165966517, + "grad_norm": 3.0533907413482666, + "learning_rate": 1.4808280279743594e-05, + "loss": 1.0915, + "step": 5744 + }, + { + "epoch": 0.3602106715154555, + "grad_norm": 3.0907325744628906, + "learning_rate": 1.4806499576008058e-05, + "loss": 1.2305, + "step": 5745 + }, + { + "epoch": 0.3602733713712458, + "grad_norm": 2.9063217639923096, + "learning_rate": 1.4804718674052265e-05, + "loss": 1.0313, + "step": 5746 + }, + { + "epoch": 0.3603360712270362, + "grad_norm": 3.4888811111450195, + "learning_rate": 1.4802937573949667e-05, + "loss": 1.2126, + "step": 5747 + }, + { + "epoch": 0.36039877108282653, + "grad_norm": 2.9400405883789062, + "learning_rate": 1.4801156275773712e-05, + "loss": 1.2777, + "step": 5748 + }, + { + "epoch": 0.36046147093861686, + "grad_norm": 3.041386842727661, + "learning_rate": 1.4799374779597866e-05, + "loss": 1.1733, + "step": 5749 + }, + { + "epoch": 0.3605241707944072, + "grad_norm": 2.8808646202087402, + "learning_rate": 1.479759308549559e-05, + "loss": 1.0216, + "step": 5750 + }, + { + "epoch": 0.3605868706501975, + "grad_norm": 2.7764952182769775, + "learning_rate": 1.479581119354037e-05, + "loss": 1.1114, + "step": 5751 + }, + { + "epoch": 0.36064957050598784, + "grad_norm": 2.9398858547210693, + "learning_rate": 1.4794029103805682e-05, + "loss": 1.3129, + "step": 5752 + }, + { + "epoch": 0.36071227036177816, + "grad_norm": 3.0758609771728516, + "learning_rate": 1.4792246816365028e-05, + "loss": 1.167, + "step": 5753 + }, + { + "epoch": 0.3607749702175685, + "grad_norm": 2.9060754776000977, + "learning_rate": 1.4790464331291906e-05, + "loss": 1.064, + "step": 5754 + }, + { + "epoch": 0.3608376700733588, + "grad_norm": 2.760810136795044, + "learning_rate": 1.4788681648659824e-05, + "loss": 1.2192, + "step": 5755 + }, + { + "epoch": 0.36090036992914915, + "grad_norm": 2.9063456058502197, + "learning_rate": 1.4786898768542304e-05, + "loss": 1.1254, + "step": 5756 + }, + { + "epoch": 0.36096306978493947, + "grad_norm": 3.0595321655273438, + "learning_rate": 1.4785115691012866e-05, + "loss": 1.2609, + "step": 5757 + }, + { + "epoch": 0.3610257696407298, + "grad_norm": 3.13797926902771, + "learning_rate": 1.4783332416145051e-05, + "loss": 1.1102, + "step": 5758 + }, + { + "epoch": 0.3610884694965202, + "grad_norm": 2.8096020221710205, + "learning_rate": 1.4781548944012398e-05, + "loss": 1.2187, + "step": 5759 + }, + { + "epoch": 0.3611511693523105, + "grad_norm": 3.197852373123169, + "learning_rate": 1.4779765274688458e-05, + "loss": 1.1418, + "step": 5760 + }, + { + "epoch": 0.36121386920810084, + "grad_norm": 2.7916600704193115, + "learning_rate": 1.4777981408246787e-05, + "loss": 1.1524, + "step": 5761 + }, + { + "epoch": 0.36127656906389116, + "grad_norm": 3.045088291168213, + "learning_rate": 1.4776197344760954e-05, + "loss": 1.0946, + "step": 5762 + }, + { + "epoch": 0.3613392689196815, + "grad_norm": 3.213036298751831, + "learning_rate": 1.4774413084304538e-05, + "loss": 1.0292, + "step": 5763 + }, + { + "epoch": 0.3614019687754718, + "grad_norm": 2.922560691833496, + "learning_rate": 1.4772628626951114e-05, + "loss": 1.1587, + "step": 5764 + }, + { + "epoch": 0.36146466863126214, + "grad_norm": 2.8422584533691406, + "learning_rate": 1.477084397277428e-05, + "loss": 1.1467, + "step": 5765 + }, + { + "epoch": 0.36152736848705247, + "grad_norm": 3.0150952339172363, + "learning_rate": 1.476905912184763e-05, + "loss": 1.1202, + "step": 5766 + }, + { + "epoch": 0.3615900683428428, + "grad_norm": 2.956650733947754, + "learning_rate": 1.4767274074244775e-05, + "loss": 1.1019, + "step": 5767 + }, + { + "epoch": 0.3616527681986331, + "grad_norm": 3.2791640758514404, + "learning_rate": 1.4765488830039327e-05, + "loss": 0.9596, + "step": 5768 + }, + { + "epoch": 0.36171546805442345, + "grad_norm": 2.730635404586792, + "learning_rate": 1.4763703389304913e-05, + "loss": 1.2519, + "step": 5769 + }, + { + "epoch": 0.36177816791021383, + "grad_norm": 2.939218282699585, + "learning_rate": 1.4761917752115162e-05, + "loss": 1.2647, + "step": 5770 + }, + { + "epoch": 0.36184086776600416, + "grad_norm": 3.225619316101074, + "learning_rate": 1.4760131918543717e-05, + "loss": 1.1607, + "step": 5771 + }, + { + "epoch": 0.3619035676217945, + "grad_norm": 2.761284351348877, + "learning_rate": 1.4758345888664222e-05, + "loss": 1.2047, + "step": 5772 + }, + { + "epoch": 0.3619662674775848, + "grad_norm": 3.5151374340057373, + "learning_rate": 1.4756559662550337e-05, + "loss": 0.9233, + "step": 5773 + }, + { + "epoch": 0.36202896733337514, + "grad_norm": 3.0265989303588867, + "learning_rate": 1.4754773240275721e-05, + "loss": 1.0019, + "step": 5774 + }, + { + "epoch": 0.36209166718916547, + "grad_norm": 3.0256028175354004, + "learning_rate": 1.4752986621914049e-05, + "loss": 1.1069, + "step": 5775 + }, + { + "epoch": 0.3621543670449558, + "grad_norm": 3.0642473697662354, + "learning_rate": 1.4751199807539004e-05, + "loss": 1.1371, + "step": 5776 + }, + { + "epoch": 0.3622170669007461, + "grad_norm": 2.9329049587249756, + "learning_rate": 1.474941279722427e-05, + "loss": 1.1206, + "step": 5777 + }, + { + "epoch": 0.36227976675653645, + "grad_norm": 3.3935439586639404, + "learning_rate": 1.4747625591043544e-05, + "loss": 1.1381, + "step": 5778 + }, + { + "epoch": 0.3623424666123268, + "grad_norm": 3.0271036624908447, + "learning_rate": 1.4745838189070531e-05, + "loss": 1.048, + "step": 5779 + }, + { + "epoch": 0.3624051664681171, + "grad_norm": 3.162738800048828, + "learning_rate": 1.4744050591378947e-05, + "loss": 1.1771, + "step": 5780 + }, + { + "epoch": 0.36246786632390743, + "grad_norm": 2.9259119033813477, + "learning_rate": 1.4742262798042505e-05, + "loss": 0.9913, + "step": 5781 + }, + { + "epoch": 0.3625305661796978, + "grad_norm": 3.307441234588623, + "learning_rate": 1.4740474809134941e-05, + "loss": 1.1489, + "step": 5782 + }, + { + "epoch": 0.36259326603548814, + "grad_norm": 2.842125654220581, + "learning_rate": 1.4738686624729987e-05, + "loss": 0.9843, + "step": 5783 + }, + { + "epoch": 0.36265596589127846, + "grad_norm": 2.8847243785858154, + "learning_rate": 1.4736898244901392e-05, + "loss": 1.2618, + "step": 5784 + }, + { + "epoch": 0.3627186657470688, + "grad_norm": 2.912876605987549, + "learning_rate": 1.4735109669722905e-05, + "loss": 0.9945, + "step": 5785 + }, + { + "epoch": 0.3627813656028591, + "grad_norm": 3.021800994873047, + "learning_rate": 1.4733320899268288e-05, + "loss": 1.1986, + "step": 5786 + }, + { + "epoch": 0.36284406545864945, + "grad_norm": 3.020598888397217, + "learning_rate": 1.4731531933611311e-05, + "loss": 1.1218, + "step": 5787 + }, + { + "epoch": 0.36290676531443977, + "grad_norm": 2.987377882003784, + "learning_rate": 1.472974277282575e-05, + "loss": 1.2681, + "step": 5788 + }, + { + "epoch": 0.3629694651702301, + "grad_norm": 3.2781026363372803, + "learning_rate": 1.4727953416985394e-05, + "loss": 1.2467, + "step": 5789 + }, + { + "epoch": 0.3630321650260204, + "grad_norm": 3.0168352127075195, + "learning_rate": 1.472616386616403e-05, + "loss": 1.0205, + "step": 5790 + }, + { + "epoch": 0.36309486488181075, + "grad_norm": 2.939974546432495, + "learning_rate": 1.472437412043546e-05, + "loss": 1.0501, + "step": 5791 + }, + { + "epoch": 0.3631575647376011, + "grad_norm": 2.7103874683380127, + "learning_rate": 1.4722584179873496e-05, + "loss": 1.0088, + "step": 5792 + }, + { + "epoch": 0.36322026459339146, + "grad_norm": 2.850207567214966, + "learning_rate": 1.4720794044551954e-05, + "loss": 1.0024, + "step": 5793 + }, + { + "epoch": 0.3632829644491818, + "grad_norm": 2.8045032024383545, + "learning_rate": 1.4719003714544662e-05, + "loss": 1.1045, + "step": 5794 + }, + { + "epoch": 0.3633456643049721, + "grad_norm": 2.833685874938965, + "learning_rate": 1.4717213189925448e-05, + "loss": 1.1262, + "step": 5795 + }, + { + "epoch": 0.36340836416076244, + "grad_norm": 3.416297674179077, + "learning_rate": 1.4715422470768157e-05, + "loss": 1.1483, + "step": 5796 + }, + { + "epoch": 0.36347106401655277, + "grad_norm": 2.95973539352417, + "learning_rate": 1.4713631557146637e-05, + "loss": 1.2429, + "step": 5797 + }, + { + "epoch": 0.3635337638723431, + "grad_norm": 2.996750593185425, + "learning_rate": 1.4711840449134746e-05, + "loss": 1.2617, + "step": 5798 + }, + { + "epoch": 0.3635964637281334, + "grad_norm": 3.122908592224121, + "learning_rate": 1.4710049146806348e-05, + "loss": 1.0863, + "step": 5799 + }, + { + "epoch": 0.36365916358392375, + "grad_norm": 2.840146780014038, + "learning_rate": 1.470825765023532e-05, + "loss": 1.1438, + "step": 5800 + }, + { + "epoch": 0.3637218634397141, + "grad_norm": 3.0698885917663574, + "learning_rate": 1.470646595949554e-05, + "loss": 1.0708, + "step": 5801 + }, + { + "epoch": 0.3637845632955044, + "grad_norm": 3.0505528450012207, + "learning_rate": 1.4704674074660898e-05, + "loss": 1.0655, + "step": 5802 + }, + { + "epoch": 0.36384726315129473, + "grad_norm": 2.702427864074707, + "learning_rate": 1.4702881995805291e-05, + "loss": 1.2839, + "step": 5803 + }, + { + "epoch": 0.36390996300708506, + "grad_norm": 2.776036262512207, + "learning_rate": 1.4701089723002623e-05, + "loss": 1.0006, + "step": 5804 + }, + { + "epoch": 0.36397266286287544, + "grad_norm": 3.0059542655944824, + "learning_rate": 1.4699297256326815e-05, + "loss": 1.1397, + "step": 5805 + }, + { + "epoch": 0.36403536271866577, + "grad_norm": 3.0174667835235596, + "learning_rate": 1.469750459585178e-05, + "loss": 1.0326, + "step": 5806 + }, + { + "epoch": 0.3640980625744561, + "grad_norm": 3.002368688583374, + "learning_rate": 1.4695711741651451e-05, + "loss": 1.0815, + "step": 5807 + }, + { + "epoch": 0.3641607624302464, + "grad_norm": 3.0597243309020996, + "learning_rate": 1.4693918693799762e-05, + "loss": 1.0697, + "step": 5808 + }, + { + "epoch": 0.36422346228603675, + "grad_norm": 2.912083387374878, + "learning_rate": 1.4692125452370664e-05, + "loss": 1.1492, + "step": 5809 + }, + { + "epoch": 0.3642861621418271, + "grad_norm": 2.936767101287842, + "learning_rate": 1.4690332017438108e-05, + "loss": 1.1236, + "step": 5810 + }, + { + "epoch": 0.3643488619976174, + "grad_norm": 3.0627124309539795, + "learning_rate": 1.4688538389076052e-05, + "loss": 1.1633, + "step": 5811 + }, + { + "epoch": 0.36441156185340773, + "grad_norm": 2.9692296981811523, + "learning_rate": 1.4686744567358468e-05, + "loss": 1.0376, + "step": 5812 + }, + { + "epoch": 0.36447426170919806, + "grad_norm": 2.783947229385376, + "learning_rate": 1.4684950552359335e-05, + "loss": 1.257, + "step": 5813 + }, + { + "epoch": 0.3645369615649884, + "grad_norm": 3.220231294631958, + "learning_rate": 1.4683156344152637e-05, + "loss": 1.0998, + "step": 5814 + }, + { + "epoch": 0.3645996614207787, + "grad_norm": 3.4037797451019287, + "learning_rate": 1.4681361942812366e-05, + "loss": 1.2387, + "step": 5815 + }, + { + "epoch": 0.3646623612765691, + "grad_norm": 2.7196502685546875, + "learning_rate": 1.4679567348412524e-05, + "loss": 1.0192, + "step": 5816 + }, + { + "epoch": 0.3647250611323594, + "grad_norm": 2.843207836151123, + "learning_rate": 1.4677772561027121e-05, + "loss": 1.2779, + "step": 5817 + }, + { + "epoch": 0.36478776098814975, + "grad_norm": 2.8840177059173584, + "learning_rate": 1.4675977580730175e-05, + "loss": 0.9239, + "step": 5818 + }, + { + "epoch": 0.36485046084394007, + "grad_norm": 2.9260928630828857, + "learning_rate": 1.4674182407595705e-05, + "loss": 1.1229, + "step": 5819 + }, + { + "epoch": 0.3649131606997304, + "grad_norm": 3.300645351409912, + "learning_rate": 1.4672387041697751e-05, + "loss": 1.0559, + "step": 5820 + }, + { + "epoch": 0.3649758605555207, + "grad_norm": 3.3159518241882324, + "learning_rate": 1.4670591483110351e-05, + "loss": 1.3279, + "step": 5821 + }, + { + "epoch": 0.36503856041131105, + "grad_norm": 3.1264705657958984, + "learning_rate": 1.4668795731907555e-05, + "loss": 1.099, + "step": 5822 + }, + { + "epoch": 0.3651012602671014, + "grad_norm": 2.997669219970703, + "learning_rate": 1.4666999788163418e-05, + "loss": 1.229, + "step": 5823 + }, + { + "epoch": 0.3651639601228917, + "grad_norm": 2.8875300884246826, + "learning_rate": 1.4665203651952004e-05, + "loss": 1.1137, + "step": 5824 + }, + { + "epoch": 0.36522665997868203, + "grad_norm": 3.082704544067383, + "learning_rate": 1.4663407323347392e-05, + "loss": 1.1212, + "step": 5825 + }, + { + "epoch": 0.36528935983447236, + "grad_norm": 3.002458095550537, + "learning_rate": 1.4661610802423657e-05, + "loss": 1.1153, + "step": 5826 + }, + { + "epoch": 0.3653520596902627, + "grad_norm": 2.9881861209869385, + "learning_rate": 1.4659814089254889e-05, + "loss": 1.1259, + "step": 5827 + }, + { + "epoch": 0.36541475954605307, + "grad_norm": 3.0229697227478027, + "learning_rate": 1.4658017183915184e-05, + "loss": 0.9051, + "step": 5828 + }, + { + "epoch": 0.3654774594018434, + "grad_norm": 2.8103814125061035, + "learning_rate": 1.4656220086478645e-05, + "loss": 1.1479, + "step": 5829 + }, + { + "epoch": 0.3655401592576337, + "grad_norm": 2.8027613162994385, + "learning_rate": 1.4654422797019387e-05, + "loss": 1.2015, + "step": 5830 + }, + { + "epoch": 0.36560285911342405, + "grad_norm": 2.847886562347412, + "learning_rate": 1.4652625315611533e-05, + "loss": 1.2011, + "step": 5831 + }, + { + "epoch": 0.3656655589692144, + "grad_norm": 3.137190818786621, + "learning_rate": 1.4650827642329203e-05, + "loss": 1.1593, + "step": 5832 + }, + { + "epoch": 0.3657282588250047, + "grad_norm": 3.0327911376953125, + "learning_rate": 1.4649029777246542e-05, + "loss": 1.3039, + "step": 5833 + }, + { + "epoch": 0.36579095868079503, + "grad_norm": 2.984509229660034, + "learning_rate": 1.4647231720437687e-05, + "loss": 1.1037, + "step": 5834 + }, + { + "epoch": 0.36585365853658536, + "grad_norm": 2.9384279251098633, + "learning_rate": 1.4645433471976793e-05, + "loss": 1.0624, + "step": 5835 + }, + { + "epoch": 0.3659163583923757, + "grad_norm": 3.017202377319336, + "learning_rate": 1.4643635031938023e-05, + "loss": 1.0418, + "step": 5836 + }, + { + "epoch": 0.365979058248166, + "grad_norm": 2.7980852127075195, + "learning_rate": 1.4641836400395536e-05, + "loss": 1.1463, + "step": 5837 + }, + { + "epoch": 0.36604175810395634, + "grad_norm": 3.012012481689453, + "learning_rate": 1.464003757742352e-05, + "loss": 1.11, + "step": 5838 + }, + { + "epoch": 0.36610445795974667, + "grad_norm": 3.120666265487671, + "learning_rate": 1.4638238563096145e-05, + "loss": 1.0519, + "step": 5839 + }, + { + "epoch": 0.36616715781553705, + "grad_norm": 3.0831215381622314, + "learning_rate": 1.4636439357487615e-05, + "loss": 1.0461, + "step": 5840 + }, + { + "epoch": 0.3662298576713274, + "grad_norm": 3.168412685394287, + "learning_rate": 1.463463996067212e-05, + "loss": 1.1227, + "step": 5841 + }, + { + "epoch": 0.3662925575271177, + "grad_norm": 3.303638458251953, + "learning_rate": 1.4632840372723873e-05, + "loss": 1.1091, + "step": 5842 + }, + { + "epoch": 0.36635525738290803, + "grad_norm": 2.9065868854522705, + "learning_rate": 1.4631040593717082e-05, + "loss": 1.1782, + "step": 5843 + }, + { + "epoch": 0.36641795723869836, + "grad_norm": 3.5288586616516113, + "learning_rate": 1.4629240623725977e-05, + "loss": 1.082, + "step": 5844 + }, + { + "epoch": 0.3664806570944887, + "grad_norm": 3.293016195297241, + "learning_rate": 1.4627440462824789e-05, + "loss": 1.1405, + "step": 5845 + }, + { + "epoch": 0.366543356950279, + "grad_norm": 3.206897497177124, + "learning_rate": 1.4625640111087753e-05, + "loss": 1.1463, + "step": 5846 + }, + { + "epoch": 0.36660605680606934, + "grad_norm": 2.8886802196502686, + "learning_rate": 1.4623839568589118e-05, + "loss": 1.1486, + "step": 5847 + }, + { + "epoch": 0.36666875666185966, + "grad_norm": 3.1182596683502197, + "learning_rate": 1.4622038835403135e-05, + "loss": 1.0155, + "step": 5848 + }, + { + "epoch": 0.36673145651765, + "grad_norm": 3.2501587867736816, + "learning_rate": 1.4620237911604069e-05, + "loss": 1.0156, + "step": 5849 + }, + { + "epoch": 0.3667941563734403, + "grad_norm": 2.7711474895477295, + "learning_rate": 1.461843679726619e-05, + "loss": 1.1556, + "step": 5850 + }, + { + "epoch": 0.3668568562292307, + "grad_norm": 2.731078624725342, + "learning_rate": 1.4616635492463775e-05, + "loss": 1.0736, + "step": 5851 + }, + { + "epoch": 0.366919556085021, + "grad_norm": 3.13114070892334, + "learning_rate": 1.461483399727111e-05, + "loss": 1.3088, + "step": 5852 + }, + { + "epoch": 0.36698225594081135, + "grad_norm": 2.892240285873413, + "learning_rate": 1.4613032311762492e-05, + "loss": 1.1922, + "step": 5853 + }, + { + "epoch": 0.3670449557966017, + "grad_norm": 3.741736888885498, + "learning_rate": 1.4611230436012217e-05, + "loss": 1.2349, + "step": 5854 + }, + { + "epoch": 0.367107655652392, + "grad_norm": 2.8333775997161865, + "learning_rate": 1.46094283700946e-05, + "loss": 1.1441, + "step": 5855 + }, + { + "epoch": 0.36717035550818233, + "grad_norm": 3.0806918144226074, + "learning_rate": 1.4607626114083956e-05, + "loss": 1.0259, + "step": 5856 + }, + { + "epoch": 0.36723305536397266, + "grad_norm": 2.958155393600464, + "learning_rate": 1.4605823668054609e-05, + "loss": 1.1527, + "step": 5857 + }, + { + "epoch": 0.367295755219763, + "grad_norm": 3.0388290882110596, + "learning_rate": 1.4604021032080894e-05, + "loss": 1.3316, + "step": 5858 + }, + { + "epoch": 0.3673584550755533, + "grad_norm": 3.049938440322876, + "learning_rate": 1.4602218206237147e-05, + "loss": 1.0125, + "step": 5859 + }, + { + "epoch": 0.36742115493134364, + "grad_norm": 3.262890100479126, + "learning_rate": 1.4600415190597723e-05, + "loss": 1.091, + "step": 5860 + }, + { + "epoch": 0.36748385478713397, + "grad_norm": 2.8263111114501953, + "learning_rate": 1.4598611985236975e-05, + "loss": 1.0579, + "step": 5861 + }, + { + "epoch": 0.3675465546429243, + "grad_norm": 2.786266803741455, + "learning_rate": 1.4596808590229267e-05, + "loss": 1.1449, + "step": 5862 + }, + { + "epoch": 0.3676092544987147, + "grad_norm": 2.8795480728149414, + "learning_rate": 1.4595005005648975e-05, + "loss": 1.1002, + "step": 5863 + }, + { + "epoch": 0.367671954354505, + "grad_norm": 2.884925603866577, + "learning_rate": 1.4593201231570474e-05, + "loss": 1.1606, + "step": 5864 + }, + { + "epoch": 0.36773465421029533, + "grad_norm": 2.9865901470184326, + "learning_rate": 1.4591397268068153e-05, + "loss": 1.1572, + "step": 5865 + }, + { + "epoch": 0.36779735406608566, + "grad_norm": 3.010122776031494, + "learning_rate": 1.4589593115216407e-05, + "loss": 0.9487, + "step": 5866 + }, + { + "epoch": 0.367860053921876, + "grad_norm": 2.826673746109009, + "learning_rate": 1.4587788773089644e-05, + "loss": 1.0604, + "step": 5867 + }, + { + "epoch": 0.3679227537776663, + "grad_norm": 3.012464761734009, + "learning_rate": 1.4585984241762268e-05, + "loss": 1.1854, + "step": 5868 + }, + { + "epoch": 0.36798545363345664, + "grad_norm": 2.828927755355835, + "learning_rate": 1.4584179521308703e-05, + "loss": 1.155, + "step": 5869 + }, + { + "epoch": 0.36804815348924697, + "grad_norm": 3.0709712505340576, + "learning_rate": 1.4582374611803377e-05, + "loss": 1.0992, + "step": 5870 + }, + { + "epoch": 0.3681108533450373, + "grad_norm": 2.9627525806427, + "learning_rate": 1.4580569513320718e-05, + "loss": 1.1758, + "step": 5871 + }, + { + "epoch": 0.3681735532008276, + "grad_norm": 3.1371829509735107, + "learning_rate": 1.4578764225935173e-05, + "loss": 1.1512, + "step": 5872 + }, + { + "epoch": 0.36823625305661795, + "grad_norm": 2.792966604232788, + "learning_rate": 1.4576958749721192e-05, + "loss": 1.2558, + "step": 5873 + }, + { + "epoch": 0.36829895291240833, + "grad_norm": 2.8570327758789062, + "learning_rate": 1.4575153084753233e-05, + "loss": 1.1334, + "step": 5874 + }, + { + "epoch": 0.36836165276819866, + "grad_norm": 3.211273670196533, + "learning_rate": 1.4573347231105759e-05, + "loss": 1.0333, + "step": 5875 + }, + { + "epoch": 0.368424352623989, + "grad_norm": 2.931406021118164, + "learning_rate": 1.457154118885325e-05, + "loss": 1.2558, + "step": 5876 + }, + { + "epoch": 0.3684870524797793, + "grad_norm": 3.439035177230835, + "learning_rate": 1.4569734958070178e-05, + "loss": 0.9943, + "step": 5877 + }, + { + "epoch": 0.36854975233556964, + "grad_norm": 3.075155258178711, + "learning_rate": 1.4567928538831039e-05, + "loss": 1.1438, + "step": 5878 + }, + { + "epoch": 0.36861245219135996, + "grad_norm": 3.1813316345214844, + "learning_rate": 1.4566121931210326e-05, + "loss": 1.1307, + "step": 5879 + }, + { + "epoch": 0.3686751520471503, + "grad_norm": 3.6078591346740723, + "learning_rate": 1.4564315135282547e-05, + "loss": 1.0419, + "step": 5880 + }, + { + "epoch": 0.3687378519029406, + "grad_norm": 3.0589661598205566, + "learning_rate": 1.4562508151122213e-05, + "loss": 1.1821, + "step": 5881 + }, + { + "epoch": 0.36880055175873094, + "grad_norm": 2.861844301223755, + "learning_rate": 1.4560700978803841e-05, + "loss": 1.1741, + "step": 5882 + }, + { + "epoch": 0.36886325161452127, + "grad_norm": 3.585294723510742, + "learning_rate": 1.4558893618401961e-05, + "loss": 0.8906, + "step": 5883 + }, + { + "epoch": 0.3689259514703116, + "grad_norm": 3.13802433013916, + "learning_rate": 1.4557086069991113e-05, + "loss": 1.0126, + "step": 5884 + }, + { + "epoch": 0.3689886513261019, + "grad_norm": 2.764863967895508, + "learning_rate": 1.4555278333645833e-05, + "loss": 1.1555, + "step": 5885 + }, + { + "epoch": 0.3690513511818923, + "grad_norm": 2.987048864364624, + "learning_rate": 1.4553470409440675e-05, + "loss": 1.1535, + "step": 5886 + }, + { + "epoch": 0.36911405103768263, + "grad_norm": 2.9772846698760986, + "learning_rate": 1.4551662297450201e-05, + "loss": 1.164, + "step": 5887 + }, + { + "epoch": 0.36917675089347296, + "grad_norm": 2.806941032409668, + "learning_rate": 1.4549853997748975e-05, + "loss": 1.1039, + "step": 5888 + }, + { + "epoch": 0.3692394507492633, + "grad_norm": 3.140909433364868, + "learning_rate": 1.4548045510411573e-05, + "loss": 1.144, + "step": 5889 + }, + { + "epoch": 0.3693021506050536, + "grad_norm": 2.6808245182037354, + "learning_rate": 1.4546236835512572e-05, + "loss": 1.0126, + "step": 5890 + }, + { + "epoch": 0.36936485046084394, + "grad_norm": 3.2026455402374268, + "learning_rate": 1.4544427973126568e-05, + "loss": 1.11, + "step": 5891 + }, + { + "epoch": 0.36942755031663427, + "grad_norm": 2.9124555587768555, + "learning_rate": 1.4542618923328154e-05, + "loss": 1.0507, + "step": 5892 + }, + { + "epoch": 0.3694902501724246, + "grad_norm": 3.371222496032715, + "learning_rate": 1.4540809686191939e-05, + "loss": 1.0777, + "step": 5893 + }, + { + "epoch": 0.3695529500282149, + "grad_norm": 3.0490407943725586, + "learning_rate": 1.4539000261792532e-05, + "loss": 1.3203, + "step": 5894 + }, + { + "epoch": 0.36961564988400525, + "grad_norm": 3.0168168544769287, + "learning_rate": 1.4537190650204555e-05, + "loss": 1.0682, + "step": 5895 + }, + { + "epoch": 0.3696783497397956, + "grad_norm": 2.9264869689941406, + "learning_rate": 1.4535380851502644e-05, + "loss": 1.1386, + "step": 5896 + }, + { + "epoch": 0.36974104959558596, + "grad_norm": 2.8299474716186523, + "learning_rate": 1.4533570865761422e-05, + "loss": 1.2395, + "step": 5897 + }, + { + "epoch": 0.3698037494513763, + "grad_norm": 3.1379196643829346, + "learning_rate": 1.4531760693055543e-05, + "loss": 1.1192, + "step": 5898 + }, + { + "epoch": 0.3698664493071666, + "grad_norm": 2.826357126235962, + "learning_rate": 1.4529950333459653e-05, + "loss": 1.2876, + "step": 5899 + }, + { + "epoch": 0.36992914916295694, + "grad_norm": 2.8465354442596436, + "learning_rate": 1.4528139787048414e-05, + "loss": 1.1097, + "step": 5900 + }, + { + "epoch": 0.36999184901874727, + "grad_norm": 2.7509844303131104, + "learning_rate": 1.4526329053896492e-05, + "loss": 1.2903, + "step": 5901 + }, + { + "epoch": 0.3700545488745376, + "grad_norm": 3.0409774780273438, + "learning_rate": 1.4524518134078565e-05, + "loss": 1.0912, + "step": 5902 + }, + { + "epoch": 0.3701172487303279, + "grad_norm": 3.2634823322296143, + "learning_rate": 1.4522707027669309e-05, + "loss": 1.2588, + "step": 5903 + }, + { + "epoch": 0.37017994858611825, + "grad_norm": 3.2547361850738525, + "learning_rate": 1.4520895734743419e-05, + "loss": 1.0319, + "step": 5904 + }, + { + "epoch": 0.3702426484419086, + "grad_norm": 3.485206127166748, + "learning_rate": 1.4519084255375591e-05, + "loss": 1.1634, + "step": 5905 + }, + { + "epoch": 0.3703053482976989, + "grad_norm": 3.028611898422241, + "learning_rate": 1.4517272589640532e-05, + "loss": 1.0801, + "step": 5906 + }, + { + "epoch": 0.3703680481534892, + "grad_norm": 3.155036687850952, + "learning_rate": 1.4515460737612954e-05, + "loss": 1.1375, + "step": 5907 + }, + { + "epoch": 0.37043074800927955, + "grad_norm": 2.8203563690185547, + "learning_rate": 1.4513648699367577e-05, + "loss": 1.1886, + "step": 5908 + }, + { + "epoch": 0.37049344786506994, + "grad_norm": 3.0514137744903564, + "learning_rate": 1.4511836474979133e-05, + "loss": 1.1504, + "step": 5909 + }, + { + "epoch": 0.37055614772086026, + "grad_norm": 2.8867504596710205, + "learning_rate": 1.4510024064522353e-05, + "loss": 1.1448, + "step": 5910 + }, + { + "epoch": 0.3706188475766506, + "grad_norm": 3.334014415740967, + "learning_rate": 1.4508211468071985e-05, + "loss": 0.9403, + "step": 5911 + }, + { + "epoch": 0.3706815474324409, + "grad_norm": 2.8676061630249023, + "learning_rate": 1.4506398685702778e-05, + "loss": 1.1248, + "step": 5912 + }, + { + "epoch": 0.37074424728823124, + "grad_norm": 3.0284595489501953, + "learning_rate": 1.4504585717489494e-05, + "loss": 1.1758, + "step": 5913 + }, + { + "epoch": 0.37080694714402157, + "grad_norm": 2.6828770637512207, + "learning_rate": 1.4502772563506898e-05, + "loss": 1.1547, + "step": 5914 + }, + { + "epoch": 0.3708696469998119, + "grad_norm": 3.12772274017334, + "learning_rate": 1.4500959223829764e-05, + "loss": 1.107, + "step": 5915 + }, + { + "epoch": 0.3709323468556022, + "grad_norm": 3.0412118434906006, + "learning_rate": 1.4499145698532875e-05, + "loss": 1.0871, + "step": 5916 + }, + { + "epoch": 0.37099504671139255, + "grad_norm": 3.094982862472534, + "learning_rate": 1.4497331987691019e-05, + "loss": 1.1292, + "step": 5917 + }, + { + "epoch": 0.3710577465671829, + "grad_norm": 3.042786121368408, + "learning_rate": 1.4495518091378996e-05, + "loss": 0.9682, + "step": 5918 + }, + { + "epoch": 0.3711204464229732, + "grad_norm": 3.167666435241699, + "learning_rate": 1.4493704009671614e-05, + "loss": 1.2447, + "step": 5919 + }, + { + "epoch": 0.37118314627876353, + "grad_norm": 2.9610073566436768, + "learning_rate": 1.4491889742643681e-05, + "loss": 1.2048, + "step": 5920 + }, + { + "epoch": 0.3712458461345539, + "grad_norm": 3.068211078643799, + "learning_rate": 1.4490075290370018e-05, + "loss": 1.1464, + "step": 5921 + }, + { + "epoch": 0.37130854599034424, + "grad_norm": 3.1902146339416504, + "learning_rate": 1.4488260652925454e-05, + "loss": 1.0276, + "step": 5922 + }, + { + "epoch": 0.37137124584613457, + "grad_norm": 3.0360305309295654, + "learning_rate": 1.4486445830384825e-05, + "loss": 1.1212, + "step": 5923 + }, + { + "epoch": 0.3714339457019249, + "grad_norm": 2.919142246246338, + "learning_rate": 1.4484630822822976e-05, + "loss": 1.236, + "step": 5924 + }, + { + "epoch": 0.3714966455577152, + "grad_norm": 3.267573833465576, + "learning_rate": 1.4482815630314752e-05, + "loss": 1.0256, + "step": 5925 + }, + { + "epoch": 0.37155934541350555, + "grad_norm": 3.171717405319214, + "learning_rate": 1.448100025293502e-05, + "loss": 0.9927, + "step": 5926 + }, + { + "epoch": 0.3716220452692959, + "grad_norm": 2.979898452758789, + "learning_rate": 1.4479184690758638e-05, + "loss": 1.2269, + "step": 5927 + }, + { + "epoch": 0.3716847451250862, + "grad_norm": 3.2675528526306152, + "learning_rate": 1.4477368943860487e-05, + "loss": 1.1252, + "step": 5928 + }, + { + "epoch": 0.37174744498087653, + "grad_norm": 3.1182687282562256, + "learning_rate": 1.4475553012315441e-05, + "loss": 0.9917, + "step": 5929 + }, + { + "epoch": 0.37181014483666686, + "grad_norm": 3.4317235946655273, + "learning_rate": 1.4473736896198395e-05, + "loss": 1.2232, + "step": 5930 + }, + { + "epoch": 0.3718728446924572, + "grad_norm": 3.3095014095306396, + "learning_rate": 1.4471920595584248e-05, + "loss": 1.059, + "step": 5931 + }, + { + "epoch": 0.37193554454824757, + "grad_norm": 2.9017677307128906, + "learning_rate": 1.4470104110547896e-05, + "loss": 1.2018, + "step": 5932 + }, + { + "epoch": 0.3719982444040379, + "grad_norm": 3.046696186065674, + "learning_rate": 1.4468287441164259e-05, + "loss": 1.1267, + "step": 5933 + }, + { + "epoch": 0.3720609442598282, + "grad_norm": 3.4176037311553955, + "learning_rate": 1.4466470587508248e-05, + "loss": 1.0484, + "step": 5934 + }, + { + "epoch": 0.37212364411561855, + "grad_norm": 2.7402055263519287, + "learning_rate": 1.44646535496548e-05, + "loss": 1.2775, + "step": 5935 + }, + { + "epoch": 0.3721863439714089, + "grad_norm": 3.0263853073120117, + "learning_rate": 1.446283632767884e-05, + "loss": 1.1377, + "step": 5936 + }, + { + "epoch": 0.3722490438271992, + "grad_norm": 2.9172606468200684, + "learning_rate": 1.4461018921655319e-05, + "loss": 1.2321, + "step": 5937 + }, + { + "epoch": 0.3723117436829895, + "grad_norm": 2.838650941848755, + "learning_rate": 1.4459201331659179e-05, + "loss": 1.2485, + "step": 5938 + }, + { + "epoch": 0.37237444353877985, + "grad_norm": 2.952404022216797, + "learning_rate": 1.4457383557765385e-05, + "loss": 1.1694, + "step": 5939 + }, + { + "epoch": 0.3724371433945702, + "grad_norm": 2.5240161418914795, + "learning_rate": 1.4455565600048896e-05, + "loss": 1.3136, + "step": 5940 + }, + { + "epoch": 0.3724998432503605, + "grad_norm": 2.974658727645874, + "learning_rate": 1.4453747458584687e-05, + "loss": 1.1696, + "step": 5941 + }, + { + "epoch": 0.37256254310615083, + "grad_norm": 2.766706943511963, + "learning_rate": 1.445192913344774e-05, + "loss": 1.1061, + "step": 5942 + }, + { + "epoch": 0.37262524296194116, + "grad_norm": 3.0673534870147705, + "learning_rate": 1.4450110624713038e-05, + "loss": 0.9967, + "step": 5943 + }, + { + "epoch": 0.37268794281773154, + "grad_norm": 2.975179672241211, + "learning_rate": 1.4448291932455582e-05, + "loss": 1.2526, + "step": 5944 + }, + { + "epoch": 0.37275064267352187, + "grad_norm": 3.02907395362854, + "learning_rate": 1.4446473056750371e-05, + "loss": 1.0094, + "step": 5945 + }, + { + "epoch": 0.3728133425293122, + "grad_norm": 3.1930959224700928, + "learning_rate": 1.444465399767242e-05, + "loss": 1.0164, + "step": 5946 + }, + { + "epoch": 0.3728760423851025, + "grad_norm": 2.813929796218872, + "learning_rate": 1.4442834755296741e-05, + "loss": 1.1389, + "step": 5947 + }, + { + "epoch": 0.37293874224089285, + "grad_norm": 3.083801746368408, + "learning_rate": 1.4441015329698365e-05, + "loss": 1.1713, + "step": 5948 + }, + { + "epoch": 0.3730014420966832, + "grad_norm": 3.108386993408203, + "learning_rate": 1.4439195720952322e-05, + "loss": 1.0164, + "step": 5949 + }, + { + "epoch": 0.3730641419524735, + "grad_norm": 3.3854715824127197, + "learning_rate": 1.4437375929133652e-05, + "loss": 1.1007, + "step": 5950 + }, + { + "epoch": 0.37312684180826383, + "grad_norm": 2.926574468612671, + "learning_rate": 1.4435555954317407e-05, + "loss": 0.9864, + "step": 5951 + }, + { + "epoch": 0.37318954166405416, + "grad_norm": 3.1251893043518066, + "learning_rate": 1.4433735796578638e-05, + "loss": 1.0784, + "step": 5952 + }, + { + "epoch": 0.3732522415198445, + "grad_norm": 3.0474016666412354, + "learning_rate": 1.4431915455992416e-05, + "loss": 1.3086, + "step": 5953 + }, + { + "epoch": 0.3733149413756348, + "grad_norm": 3.162179708480835, + "learning_rate": 1.44300949326338e-05, + "loss": 1.162, + "step": 5954 + }, + { + "epoch": 0.3733776412314252, + "grad_norm": 3.2146310806274414, + "learning_rate": 1.4428274226577882e-05, + "loss": 1.0633, + "step": 5955 + }, + { + "epoch": 0.3734403410872155, + "grad_norm": 3.101501226425171, + "learning_rate": 1.4426453337899736e-05, + "loss": 1.1888, + "step": 5956 + }, + { + "epoch": 0.37350304094300585, + "grad_norm": 3.6321544647216797, + "learning_rate": 1.4424632266674468e-05, + "loss": 1.1608, + "step": 5957 + }, + { + "epoch": 0.3735657407987962, + "grad_norm": 3.318166494369507, + "learning_rate": 1.4422811012977167e-05, + "loss": 1.1199, + "step": 5958 + }, + { + "epoch": 0.3736284406545865, + "grad_norm": 3.1964385509490967, + "learning_rate": 1.4420989576882949e-05, + "loss": 1.0094, + "step": 5959 + }, + { + "epoch": 0.37369114051037683, + "grad_norm": 3.317410945892334, + "learning_rate": 1.4419167958466927e-05, + "loss": 1.0827, + "step": 5960 + }, + { + "epoch": 0.37375384036616716, + "grad_norm": 2.73354172706604, + "learning_rate": 1.4417346157804221e-05, + "loss": 1.2041, + "step": 5961 + }, + { + "epoch": 0.3738165402219575, + "grad_norm": 2.920814275741577, + "learning_rate": 1.4415524174969973e-05, + "loss": 1.2033, + "step": 5962 + }, + { + "epoch": 0.3738792400777478, + "grad_norm": 3.1208364963531494, + "learning_rate": 1.4413702010039312e-05, + "loss": 1.0421, + "step": 5963 + }, + { + "epoch": 0.37394193993353814, + "grad_norm": 3.198225498199463, + "learning_rate": 1.4411879663087389e-05, + "loss": 1.2223, + "step": 5964 + }, + { + "epoch": 0.37400463978932846, + "grad_norm": 3.3785417079925537, + "learning_rate": 1.4410057134189355e-05, + "loss": 1.1606, + "step": 5965 + }, + { + "epoch": 0.3740673396451188, + "grad_norm": 3.006924867630005, + "learning_rate": 1.4408234423420373e-05, + "loss": 1.0087, + "step": 5966 + }, + { + "epoch": 0.3741300395009092, + "grad_norm": 3.0220632553100586, + "learning_rate": 1.440641153085561e-05, + "loss": 1.0285, + "step": 5967 + }, + { + "epoch": 0.3741927393566995, + "grad_norm": 2.98880934715271, + "learning_rate": 1.4404588456570245e-05, + "loss": 1.043, + "step": 5968 + }, + { + "epoch": 0.3742554392124898, + "grad_norm": 2.9234695434570312, + "learning_rate": 1.4402765200639457e-05, + "loss": 1.1252, + "step": 5969 + }, + { + "epoch": 0.37431813906828015, + "grad_norm": 3.018367290496826, + "learning_rate": 1.440094176313844e-05, + "loss": 1.1427, + "step": 5970 + }, + { + "epoch": 0.3743808389240705, + "grad_norm": 2.7218174934387207, + "learning_rate": 1.4399118144142395e-05, + "loss": 1.1722, + "step": 5971 + }, + { + "epoch": 0.3744435387798608, + "grad_norm": 2.799443244934082, + "learning_rate": 1.4397294343726523e-05, + "loss": 1.1178, + "step": 5972 + }, + { + "epoch": 0.37450623863565113, + "grad_norm": 3.0571205615997314, + "learning_rate": 1.4395470361966041e-05, + "loss": 1.0969, + "step": 5973 + }, + { + "epoch": 0.37456893849144146, + "grad_norm": 3.0436666011810303, + "learning_rate": 1.4393646198936169e-05, + "loss": 1.0704, + "step": 5974 + }, + { + "epoch": 0.3746316383472318, + "grad_norm": 3.3658692836761475, + "learning_rate": 1.4391821854712139e-05, + "loss": 0.8326, + "step": 5975 + }, + { + "epoch": 0.3746943382030221, + "grad_norm": 2.9574661254882812, + "learning_rate": 1.4389997329369178e-05, + "loss": 0.9633, + "step": 5976 + }, + { + "epoch": 0.37475703805881244, + "grad_norm": 2.9456164836883545, + "learning_rate": 1.4388172622982542e-05, + "loss": 1.1554, + "step": 5977 + }, + { + "epoch": 0.3748197379146028, + "grad_norm": 2.881845712661743, + "learning_rate": 1.438634773562747e-05, + "loss": 1.1233, + "step": 5978 + }, + { + "epoch": 0.37488243777039315, + "grad_norm": 3.085757255554199, + "learning_rate": 1.4384522667379229e-05, + "loss": 1.1351, + "step": 5979 + }, + { + "epoch": 0.3749451376261835, + "grad_norm": 3.034748077392578, + "learning_rate": 1.4382697418313081e-05, + "loss": 1.0428, + "step": 5980 + }, + { + "epoch": 0.3750078374819738, + "grad_norm": 3.034205198287964, + "learning_rate": 1.4380871988504299e-05, + "loss": 1.066, + "step": 5981 + }, + { + "epoch": 0.37507053733776413, + "grad_norm": 3.0927951335906982, + "learning_rate": 1.4379046378028165e-05, + "loss": 1.2334, + "step": 5982 + }, + { + "epoch": 0.37513323719355446, + "grad_norm": 3.037562131881714, + "learning_rate": 1.4377220586959968e-05, + "loss": 1.0957, + "step": 5983 + }, + { + "epoch": 0.3751959370493448, + "grad_norm": 2.9637441635131836, + "learning_rate": 1.4375394615375004e-05, + "loss": 1.0714, + "step": 5984 + }, + { + "epoch": 0.3752586369051351, + "grad_norm": 2.7848618030548096, + "learning_rate": 1.4373568463348572e-05, + "loss": 1.1239, + "step": 5985 + }, + { + "epoch": 0.37532133676092544, + "grad_norm": 3.02439284324646, + "learning_rate": 1.4371742130955988e-05, + "loss": 1.041, + "step": 5986 + }, + { + "epoch": 0.37538403661671577, + "grad_norm": 3.1523971557617188, + "learning_rate": 1.4369915618272568e-05, + "loss": 1.1081, + "step": 5987 + }, + { + "epoch": 0.3754467364725061, + "grad_norm": 3.0471138954162598, + "learning_rate": 1.4368088925373635e-05, + "loss": 1.0918, + "step": 5988 + }, + { + "epoch": 0.3755094363282964, + "grad_norm": 2.8927090167999268, + "learning_rate": 1.4366262052334525e-05, + "loss": 1.1618, + "step": 5989 + }, + { + "epoch": 0.3755721361840868, + "grad_norm": 3.1464650630950928, + "learning_rate": 1.4364434999230579e-05, + "loss": 1.1331, + "step": 5990 + }, + { + "epoch": 0.37563483603987713, + "grad_norm": 3.1674561500549316, + "learning_rate": 1.4362607766137138e-05, + "loss": 1.0222, + "step": 5991 + }, + { + "epoch": 0.37569753589566746, + "grad_norm": 3.304659605026245, + "learning_rate": 1.4360780353129564e-05, + "loss": 1.177, + "step": 5992 + }, + { + "epoch": 0.3757602357514578, + "grad_norm": 3.0369486808776855, + "learning_rate": 1.435895276028322e-05, + "loss": 1.0825, + "step": 5993 + }, + { + "epoch": 0.3758229356072481, + "grad_norm": 3.296165943145752, + "learning_rate": 1.4357124987673474e-05, + "loss": 1.046, + "step": 5994 + }, + { + "epoch": 0.37588563546303844, + "grad_norm": 2.81539249420166, + "learning_rate": 1.4355297035375704e-05, + "loss": 1.0697, + "step": 5995 + }, + { + "epoch": 0.37594833531882876, + "grad_norm": 2.9150257110595703, + "learning_rate": 1.435346890346529e-05, + "loss": 1.1658, + "step": 5996 + }, + { + "epoch": 0.3760110351746191, + "grad_norm": 3.201477289199829, + "learning_rate": 1.4351640592017633e-05, + "loss": 1.1286, + "step": 5997 + }, + { + "epoch": 0.3760737350304094, + "grad_norm": 3.0210657119750977, + "learning_rate": 1.4349812101108125e-05, + "loss": 1.0542, + "step": 5998 + }, + { + "epoch": 0.37613643488619974, + "grad_norm": 3.0022635459899902, + "learning_rate": 1.4347983430812176e-05, + "loss": 1.0246, + "step": 5999 + }, + { + "epoch": 0.37619913474199007, + "grad_norm": 3.1772396564483643, + "learning_rate": 1.43461545812052e-05, + "loss": 1.0631, + "step": 6000 + }, + { + "epoch": 0.37619913474199007, + "eval_loss": 1.1449470520019531, + "eval_runtime": 144.0886, + "eval_samples_per_second": 4.372, + "eval_steps_per_second": 1.097, + "step": 6000 + }, + { + "epoch": 0.3762618345977804, + "grad_norm": 2.7815308570861816, + "learning_rate": 1.4344325552362622e-05, + "loss": 1.2764, + "step": 6001 + }, + { + "epoch": 0.3763245344535708, + "grad_norm": 2.8677139282226562, + "learning_rate": 1.4342496344359867e-05, + "loss": 1.052, + "step": 6002 + }, + { + "epoch": 0.3763872343093611, + "grad_norm": 2.882350444793701, + "learning_rate": 1.4340666957272371e-05, + "loss": 1.0985, + "step": 6003 + }, + { + "epoch": 0.37644993416515143, + "grad_norm": 3.1269216537475586, + "learning_rate": 1.4338837391175582e-05, + "loss": 1.0498, + "step": 6004 + }, + { + "epoch": 0.37651263402094176, + "grad_norm": 3.061488628387451, + "learning_rate": 1.433700764614495e-05, + "loss": 1.1878, + "step": 6005 + }, + { + "epoch": 0.3765753338767321, + "grad_norm": 2.853564739227295, + "learning_rate": 1.4335177722255933e-05, + "loss": 1.2094, + "step": 6006 + }, + { + "epoch": 0.3766380337325224, + "grad_norm": 2.9595723152160645, + "learning_rate": 1.4333347619583997e-05, + "loss": 1.1913, + "step": 6007 + }, + { + "epoch": 0.37670073358831274, + "grad_norm": 3.0217127799987793, + "learning_rate": 1.4331517338204618e-05, + "loss": 1.0936, + "step": 6008 + }, + { + "epoch": 0.37676343344410307, + "grad_norm": 2.965151786804199, + "learning_rate": 1.4329686878193271e-05, + "loss": 1.1029, + "step": 6009 + }, + { + "epoch": 0.3768261332998934, + "grad_norm": 3.156832218170166, + "learning_rate": 1.432785623962545e-05, + "loss": 1.2076, + "step": 6010 + }, + { + "epoch": 0.3768888331556837, + "grad_norm": 2.8896684646606445, + "learning_rate": 1.4326025422576648e-05, + "loss": 1.1863, + "step": 6011 + }, + { + "epoch": 0.37695153301147405, + "grad_norm": 3.0182089805603027, + "learning_rate": 1.4324194427122369e-05, + "loss": 1.1069, + "step": 6012 + }, + { + "epoch": 0.37701423286726443, + "grad_norm": 3.2674400806427, + "learning_rate": 1.4322363253338124e-05, + "loss": 1.1664, + "step": 6013 + }, + { + "epoch": 0.37707693272305476, + "grad_norm": 2.9897053241729736, + "learning_rate": 1.4320531901299429e-05, + "loss": 1.0405, + "step": 6014 + }, + { + "epoch": 0.3771396325788451, + "grad_norm": 3.02126407623291, + "learning_rate": 1.4318700371081811e-05, + "loss": 1.1003, + "step": 6015 + }, + { + "epoch": 0.3772023324346354, + "grad_norm": 3.0554497241973877, + "learning_rate": 1.4316868662760798e-05, + "loss": 0.9597, + "step": 6016 + }, + { + "epoch": 0.37726503229042574, + "grad_norm": 3.1233930587768555, + "learning_rate": 1.4315036776411937e-05, + "loss": 1.1063, + "step": 6017 + }, + { + "epoch": 0.37732773214621607, + "grad_norm": 2.6100687980651855, + "learning_rate": 1.4313204712110766e-05, + "loss": 1.1673, + "step": 6018 + }, + { + "epoch": 0.3773904320020064, + "grad_norm": 2.786839723587036, + "learning_rate": 1.4311372469932849e-05, + "loss": 1.1108, + "step": 6019 + }, + { + "epoch": 0.3774531318577967, + "grad_norm": 3.1680872440338135, + "learning_rate": 1.4309540049953741e-05, + "loss": 1.2619, + "step": 6020 + }, + { + "epoch": 0.37751583171358705, + "grad_norm": 3.2499380111694336, + "learning_rate": 1.4307707452249013e-05, + "loss": 0.984, + "step": 6021 + }, + { + "epoch": 0.3775785315693774, + "grad_norm": 3.255371570587158, + "learning_rate": 1.430587467689424e-05, + "loss": 1.0553, + "step": 6022 + }, + { + "epoch": 0.3776412314251677, + "grad_norm": 3.0169460773468018, + "learning_rate": 1.4304041723965009e-05, + "loss": 1.0129, + "step": 6023 + }, + { + "epoch": 0.377703931280958, + "grad_norm": 2.9245760440826416, + "learning_rate": 1.430220859353691e-05, + "loss": 1.2115, + "step": 6024 + }, + { + "epoch": 0.3777666311367484, + "grad_norm": 3.6872706413269043, + "learning_rate": 1.4300375285685539e-05, + "loss": 1.2081, + "step": 6025 + }, + { + "epoch": 0.37782933099253874, + "grad_norm": 2.9253110885620117, + "learning_rate": 1.4298541800486506e-05, + "loss": 1.1295, + "step": 6026 + }, + { + "epoch": 0.37789203084832906, + "grad_norm": 3.243767261505127, + "learning_rate": 1.4296708138015418e-05, + "loss": 1.157, + "step": 6027 + }, + { + "epoch": 0.3779547307041194, + "grad_norm": 3.074021339416504, + "learning_rate": 1.4294874298347903e-05, + "loss": 1.1118, + "step": 6028 + }, + { + "epoch": 0.3780174305599097, + "grad_norm": 3.2540407180786133, + "learning_rate": 1.429304028155958e-05, + "loss": 1.312, + "step": 6029 + }, + { + "epoch": 0.37808013041570004, + "grad_norm": 2.8704521656036377, + "learning_rate": 1.429120608772609e-05, + "loss": 1.1055, + "step": 6030 + }, + { + "epoch": 0.37814283027149037, + "grad_norm": 3.610649585723877, + "learning_rate": 1.4289371716923076e-05, + "loss": 1.2273, + "step": 6031 + }, + { + "epoch": 0.3782055301272807, + "grad_norm": 2.9576311111450195, + "learning_rate": 1.4287537169226183e-05, + "loss": 1.0752, + "step": 6032 + }, + { + "epoch": 0.378268229983071, + "grad_norm": 3.132061719894409, + "learning_rate": 1.428570244471107e-05, + "loss": 1.3726, + "step": 6033 + }, + { + "epoch": 0.37833092983886135, + "grad_norm": 3.105586290359497, + "learning_rate": 1.4283867543453398e-05, + "loss": 1.2867, + "step": 6034 + }, + { + "epoch": 0.3783936296946517, + "grad_norm": 3.205437660217285, + "learning_rate": 1.4282032465528846e-05, + "loss": 1.1277, + "step": 6035 + }, + { + "epoch": 0.37845632955044206, + "grad_norm": 3.142745018005371, + "learning_rate": 1.4280197211013084e-05, + "loss": 1.0188, + "step": 6036 + }, + { + "epoch": 0.3785190294062324, + "grad_norm": 2.851987600326538, + "learning_rate": 1.4278361779981806e-05, + "loss": 1.0701, + "step": 6037 + }, + { + "epoch": 0.3785817292620227, + "grad_norm": 3.1956467628479004, + "learning_rate": 1.42765261725107e-05, + "loss": 1.1588, + "step": 6038 + }, + { + "epoch": 0.37864442911781304, + "grad_norm": 2.7920994758605957, + "learning_rate": 1.4274690388675469e-05, + "loss": 1.1933, + "step": 6039 + }, + { + "epoch": 0.37870712897360337, + "grad_norm": 2.975864887237549, + "learning_rate": 1.4272854428551816e-05, + "loss": 1.1954, + "step": 6040 + }, + { + "epoch": 0.3787698288293937, + "grad_norm": 2.729619264602661, + "learning_rate": 1.4271018292215464e-05, + "loss": 1.1851, + "step": 6041 + }, + { + "epoch": 0.378832528685184, + "grad_norm": 2.7484047412872314, + "learning_rate": 1.4269181979742128e-05, + "loss": 1.1091, + "step": 6042 + }, + { + "epoch": 0.37889522854097435, + "grad_norm": 2.7608225345611572, + "learning_rate": 1.4267345491207543e-05, + "loss": 1.0672, + "step": 6043 + }, + { + "epoch": 0.3789579283967647, + "grad_norm": 2.9515626430511475, + "learning_rate": 1.4265508826687442e-05, + "loss": 1.0386, + "step": 6044 + }, + { + "epoch": 0.379020628252555, + "grad_norm": 2.851997137069702, + "learning_rate": 1.4263671986257572e-05, + "loss": 1.0467, + "step": 6045 + }, + { + "epoch": 0.37908332810834533, + "grad_norm": 2.9263956546783447, + "learning_rate": 1.4261834969993684e-05, + "loss": 1.0967, + "step": 6046 + }, + { + "epoch": 0.37914602796413566, + "grad_norm": 2.801574468612671, + "learning_rate": 1.4259997777971533e-05, + "loss": 0.9302, + "step": 6047 + }, + { + "epoch": 0.37920872781992604, + "grad_norm": 3.198699474334717, + "learning_rate": 1.425816041026689e-05, + "loss": 1.123, + "step": 6048 + }, + { + "epoch": 0.37927142767571637, + "grad_norm": 2.749572515487671, + "learning_rate": 1.4256322866955523e-05, + "loss": 1.2643, + "step": 6049 + }, + { + "epoch": 0.3793341275315067, + "grad_norm": 3.0259246826171875, + "learning_rate": 1.4254485148113217e-05, + "loss": 1.0388, + "step": 6050 + }, + { + "epoch": 0.379396827387297, + "grad_norm": 3.1263184547424316, + "learning_rate": 1.4252647253815757e-05, + "loss": 1.1666, + "step": 6051 + }, + { + "epoch": 0.37945952724308735, + "grad_norm": 3.1268413066864014, + "learning_rate": 1.4250809184138939e-05, + "loss": 1.108, + "step": 6052 + }, + { + "epoch": 0.3795222270988777, + "grad_norm": 3.2396111488342285, + "learning_rate": 1.4248970939158565e-05, + "loss": 1.1202, + "step": 6053 + }, + { + "epoch": 0.379584926954668, + "grad_norm": 3.24041748046875, + "learning_rate": 1.4247132518950441e-05, + "loss": 1.0745, + "step": 6054 + }, + { + "epoch": 0.3796476268104583, + "grad_norm": 3.0984280109405518, + "learning_rate": 1.424529392359039e-05, + "loss": 1.2425, + "step": 6055 + }, + { + "epoch": 0.37971032666624865, + "grad_norm": 2.9036548137664795, + "learning_rate": 1.4243455153154231e-05, + "loss": 1.1685, + "step": 6056 + }, + { + "epoch": 0.379773026522039, + "grad_norm": 3.0334746837615967, + "learning_rate": 1.4241616207717796e-05, + "loss": 1.1444, + "step": 6057 + }, + { + "epoch": 0.3798357263778293, + "grad_norm": 3.0442488193511963, + "learning_rate": 1.4239777087356923e-05, + "loss": 1.0552, + "step": 6058 + }, + { + "epoch": 0.3798984262336197, + "grad_norm": 2.773279905319214, + "learning_rate": 1.4237937792147461e-05, + "loss": 1.0236, + "step": 6059 + }, + { + "epoch": 0.37996112608941, + "grad_norm": 2.981313467025757, + "learning_rate": 1.4236098322165255e-05, + "loss": 1.0926, + "step": 6060 + }, + { + "epoch": 0.38002382594520034, + "grad_norm": 2.7710678577423096, + "learning_rate": 1.423425867748617e-05, + "loss": 1.2414, + "step": 6061 + }, + { + "epoch": 0.38008652580099067, + "grad_norm": 3.061830997467041, + "learning_rate": 1.4232418858186074e-05, + "loss": 1.1266, + "step": 6062 + }, + { + "epoch": 0.380149225656781, + "grad_norm": 2.81013822555542, + "learning_rate": 1.4230578864340838e-05, + "loss": 1.1482, + "step": 6063 + }, + { + "epoch": 0.3802119255125713, + "grad_norm": 2.8407046794891357, + "learning_rate": 1.4228738696026345e-05, + "loss": 1.0947, + "step": 6064 + }, + { + "epoch": 0.38027462536836165, + "grad_norm": 3.2979540824890137, + "learning_rate": 1.4226898353318483e-05, + "loss": 1.1432, + "step": 6065 + }, + { + "epoch": 0.380337325224152, + "grad_norm": 2.8850924968719482, + "learning_rate": 1.422505783629315e-05, + "loss": 1.079, + "step": 6066 + }, + { + "epoch": 0.3804000250799423, + "grad_norm": 2.7418429851531982, + "learning_rate": 1.4223217145026246e-05, + "loss": 1.0767, + "step": 6067 + }, + { + "epoch": 0.38046272493573263, + "grad_norm": 3.2393147945404053, + "learning_rate": 1.4221376279593684e-05, + "loss": 1.1051, + "step": 6068 + }, + { + "epoch": 0.38052542479152296, + "grad_norm": 3.29653000831604, + "learning_rate": 1.4219535240071378e-05, + "loss": 1.0185, + "step": 6069 + }, + { + "epoch": 0.3805881246473133, + "grad_norm": 2.950712203979492, + "learning_rate": 1.4217694026535258e-05, + "loss": 1.0534, + "step": 6070 + }, + { + "epoch": 0.38065082450310367, + "grad_norm": 3.090512752532959, + "learning_rate": 1.4215852639061247e-05, + "loss": 1.2309, + "step": 6071 + }, + { + "epoch": 0.380713524358894, + "grad_norm": 2.8810372352600098, + "learning_rate": 1.4214011077725293e-05, + "loss": 1.0873, + "step": 6072 + }, + { + "epoch": 0.3807762242146843, + "grad_norm": 2.798112392425537, + "learning_rate": 1.4212169342603337e-05, + "loss": 1.0939, + "step": 6073 + }, + { + "epoch": 0.38083892407047465, + "grad_norm": 2.729841709136963, + "learning_rate": 1.4210327433771331e-05, + "loss": 1.1538, + "step": 6074 + }, + { + "epoch": 0.380901623926265, + "grad_norm": 2.867126941680908, + "learning_rate": 1.4208485351305241e-05, + "loss": 1.1767, + "step": 6075 + }, + { + "epoch": 0.3809643237820553, + "grad_norm": 2.895918607711792, + "learning_rate": 1.420664309528103e-05, + "loss": 1.0756, + "step": 6076 + }, + { + "epoch": 0.38102702363784563, + "grad_norm": 2.9611451625823975, + "learning_rate": 1.4204800665774676e-05, + "loss": 1.2409, + "step": 6077 + }, + { + "epoch": 0.38108972349363596, + "grad_norm": 2.7855215072631836, + "learning_rate": 1.4202958062862155e-05, + "loss": 1.1395, + "step": 6078 + }, + { + "epoch": 0.3811524233494263, + "grad_norm": 3.4959397315979004, + "learning_rate": 1.4201115286619464e-05, + "loss": 1.0269, + "step": 6079 + }, + { + "epoch": 0.3812151232052166, + "grad_norm": 2.907796621322632, + "learning_rate": 1.419927233712259e-05, + "loss": 1.2583, + "step": 6080 + }, + { + "epoch": 0.38127782306100694, + "grad_norm": 3.3270342350006104, + "learning_rate": 1.4197429214447546e-05, + "loss": 1.1466, + "step": 6081 + }, + { + "epoch": 0.38134052291679726, + "grad_norm": 3.0389950275421143, + "learning_rate": 1.4195585918670338e-05, + "loss": 1.01, + "step": 6082 + }, + { + "epoch": 0.38140322277258765, + "grad_norm": 3.03822660446167, + "learning_rate": 1.4193742449866983e-05, + "loss": 1.0722, + "step": 6083 + }, + { + "epoch": 0.381465922628378, + "grad_norm": 3.0738179683685303, + "learning_rate": 1.4191898808113504e-05, + "loss": 1.1759, + "step": 6084 + }, + { + "epoch": 0.3815286224841683, + "grad_norm": 3.3581652641296387, + "learning_rate": 1.4190054993485936e-05, + "loss": 1.0888, + "step": 6085 + }, + { + "epoch": 0.3815913223399586, + "grad_norm": 3.166076183319092, + "learning_rate": 1.418821100606032e-05, + "loss": 0.9713, + "step": 6086 + }, + { + "epoch": 0.38165402219574895, + "grad_norm": 2.8993887901306152, + "learning_rate": 1.4186366845912694e-05, + "loss": 1.1268, + "step": 6087 + }, + { + "epoch": 0.3817167220515393, + "grad_norm": 3.1000053882598877, + "learning_rate": 1.4184522513119122e-05, + "loss": 1.1993, + "step": 6088 + }, + { + "epoch": 0.3817794219073296, + "grad_norm": 3.2617063522338867, + "learning_rate": 1.4182678007755653e-05, + "loss": 1.0813, + "step": 6089 + }, + { + "epoch": 0.38184212176311993, + "grad_norm": 3.0124106407165527, + "learning_rate": 1.4180833329898364e-05, + "loss": 0.9581, + "step": 6090 + }, + { + "epoch": 0.38190482161891026, + "grad_norm": 3.6102774143218994, + "learning_rate": 1.4178988479623326e-05, + "loss": 1.1976, + "step": 6091 + }, + { + "epoch": 0.3819675214747006, + "grad_norm": 3.289231777191162, + "learning_rate": 1.417714345700662e-05, + "loss": 1.2845, + "step": 6092 + }, + { + "epoch": 0.3820302213304909, + "grad_norm": 3.060948133468628, + "learning_rate": 1.4175298262124333e-05, + "loss": 1.1935, + "step": 6093 + }, + { + "epoch": 0.3820929211862813, + "grad_norm": 3.1295392513275146, + "learning_rate": 1.4173452895052566e-05, + "loss": 1.0484, + "step": 6094 + }, + { + "epoch": 0.3821556210420716, + "grad_norm": 3.029076337814331, + "learning_rate": 1.4171607355867419e-05, + "loss": 1.2456, + "step": 6095 + }, + { + "epoch": 0.38221832089786195, + "grad_norm": 2.883063554763794, + "learning_rate": 1.4169761644645002e-05, + "loss": 1.094, + "step": 6096 + }, + { + "epoch": 0.3822810207536523, + "grad_norm": 2.908907651901245, + "learning_rate": 1.4167915761461433e-05, + "loss": 1.1372, + "step": 6097 + }, + { + "epoch": 0.3823437206094426, + "grad_norm": 2.9466323852539062, + "learning_rate": 1.4166069706392835e-05, + "loss": 0.9652, + "step": 6098 + }, + { + "epoch": 0.38240642046523293, + "grad_norm": 2.7557010650634766, + "learning_rate": 1.4164223479515342e-05, + "loss": 1.0773, + "step": 6099 + }, + { + "epoch": 0.38246912032102326, + "grad_norm": 3.1250128746032715, + "learning_rate": 1.4162377080905093e-05, + "loss": 1.1691, + "step": 6100 + }, + { + "epoch": 0.3825318201768136, + "grad_norm": 2.531214475631714, + "learning_rate": 1.4160530510638228e-05, + "loss": 1.1509, + "step": 6101 + }, + { + "epoch": 0.3825945200326039, + "grad_norm": 3.487077474594116, + "learning_rate": 1.4158683768790902e-05, + "loss": 1.065, + "step": 6102 + }, + { + "epoch": 0.38265721988839424, + "grad_norm": 3.535898208618164, + "learning_rate": 1.415683685543928e-05, + "loss": 1.0699, + "step": 6103 + }, + { + "epoch": 0.38271991974418457, + "grad_norm": 2.8073818683624268, + "learning_rate": 1.415498977065952e-05, + "loss": 1.0831, + "step": 6104 + }, + { + "epoch": 0.3827826195999749, + "grad_norm": 3.213898181915283, + "learning_rate": 1.41531425145278e-05, + "loss": 1.0947, + "step": 6105 + }, + { + "epoch": 0.3828453194557653, + "grad_norm": 3.035919427871704, + "learning_rate": 1.4151295087120307e-05, + "loss": 1.108, + "step": 6106 + }, + { + "epoch": 0.3829080193115556, + "grad_norm": 2.685299873352051, + "learning_rate": 1.4149447488513217e-05, + "loss": 1.1602, + "step": 6107 + }, + { + "epoch": 0.38297071916734593, + "grad_norm": 3.311209201812744, + "learning_rate": 1.4147599718782737e-05, + "loss": 1.1829, + "step": 6108 + }, + { + "epoch": 0.38303341902313626, + "grad_norm": 3.321866989135742, + "learning_rate": 1.4145751778005061e-05, + "loss": 1.1236, + "step": 6109 + }, + { + "epoch": 0.3830961188789266, + "grad_norm": 3.3304383754730225, + "learning_rate": 1.41439036662564e-05, + "loss": 1.1458, + "step": 6110 + }, + { + "epoch": 0.3831588187347169, + "grad_norm": 2.96628737449646, + "learning_rate": 1.4142055383612972e-05, + "loss": 1.1348, + "step": 6111 + }, + { + "epoch": 0.38322151859050724, + "grad_norm": 3.3460500240325928, + "learning_rate": 1.4140206930151e-05, + "loss": 1.2128, + "step": 6112 + }, + { + "epoch": 0.38328421844629756, + "grad_norm": 2.9003407955169678, + "learning_rate": 1.4138358305946708e-05, + "loss": 1.1585, + "step": 6113 + }, + { + "epoch": 0.3833469183020879, + "grad_norm": 3.0186870098114014, + "learning_rate": 1.4136509511076347e-05, + "loss": 0.9997, + "step": 6114 + }, + { + "epoch": 0.3834096181578782, + "grad_norm": 2.812185287475586, + "learning_rate": 1.4134660545616145e-05, + "loss": 1.1194, + "step": 6115 + }, + { + "epoch": 0.38347231801366854, + "grad_norm": 2.7545766830444336, + "learning_rate": 1.4132811409642363e-05, + "loss": 1.0611, + "step": 6116 + }, + { + "epoch": 0.3835350178694589, + "grad_norm": 3.156301259994507, + "learning_rate": 1.4130962103231265e-05, + "loss": 1.1007, + "step": 6117 + }, + { + "epoch": 0.38359771772524925, + "grad_norm": 3.0383355617523193, + "learning_rate": 1.4129112626459103e-05, + "loss": 1.2184, + "step": 6118 + }, + { + "epoch": 0.3836604175810396, + "grad_norm": 2.89613676071167, + "learning_rate": 1.412726297940216e-05, + "loss": 1.2685, + "step": 6119 + }, + { + "epoch": 0.3837231174368299, + "grad_norm": 2.9666123390197754, + "learning_rate": 1.4125413162136705e-05, + "loss": 1.1136, + "step": 6120 + }, + { + "epoch": 0.38378581729262023, + "grad_norm": 3.3635733127593994, + "learning_rate": 1.4123563174739036e-05, + "loss": 1.0829, + "step": 6121 + }, + { + "epoch": 0.38384851714841056, + "grad_norm": 2.9642114639282227, + "learning_rate": 1.4121713017285442e-05, + "loss": 1.1742, + "step": 6122 + }, + { + "epoch": 0.3839112170042009, + "grad_norm": 2.9000489711761475, + "learning_rate": 1.4119862689852224e-05, + "loss": 1.0838, + "step": 6123 + }, + { + "epoch": 0.3839739168599912, + "grad_norm": 3.450667381286621, + "learning_rate": 1.4118012192515685e-05, + "loss": 1.0957, + "step": 6124 + }, + { + "epoch": 0.38403661671578154, + "grad_norm": 3.0322656631469727, + "learning_rate": 1.4116161525352143e-05, + "loss": 1.1481, + "step": 6125 + }, + { + "epoch": 0.38409931657157187, + "grad_norm": 3.2745821475982666, + "learning_rate": 1.4114310688437924e-05, + "loss": 1.0024, + "step": 6126 + }, + { + "epoch": 0.3841620164273622, + "grad_norm": 2.647148370742798, + "learning_rate": 1.4112459681849351e-05, + "loss": 1.0933, + "step": 6127 + }, + { + "epoch": 0.3842247162831525, + "grad_norm": 3.1777944564819336, + "learning_rate": 1.4110608505662759e-05, + "loss": 1.1595, + "step": 6128 + }, + { + "epoch": 0.3842874161389429, + "grad_norm": 2.9416236877441406, + "learning_rate": 1.4108757159954496e-05, + "loss": 1.1299, + "step": 6129 + }, + { + "epoch": 0.38435011599473323, + "grad_norm": 2.715505599975586, + "learning_rate": 1.4106905644800906e-05, + "loss": 1.1431, + "step": 6130 + }, + { + "epoch": 0.38441281585052356, + "grad_norm": 3.2964603900909424, + "learning_rate": 1.4105053960278348e-05, + "loss": 1.1074, + "step": 6131 + }, + { + "epoch": 0.3844755157063139, + "grad_norm": 2.8364431858062744, + "learning_rate": 1.4103202106463188e-05, + "loss": 1.0111, + "step": 6132 + }, + { + "epoch": 0.3845382155621042, + "grad_norm": 3.074537992477417, + "learning_rate": 1.410135008343179e-05, + "loss": 1.1435, + "step": 6133 + }, + { + "epoch": 0.38460091541789454, + "grad_norm": 3.1004483699798584, + "learning_rate": 1.4099497891260538e-05, + "loss": 1.2534, + "step": 6134 + }, + { + "epoch": 0.38466361527368487, + "grad_norm": 3.3373281955718994, + "learning_rate": 1.4097645530025812e-05, + "loss": 1.0097, + "step": 6135 + }, + { + "epoch": 0.3847263151294752, + "grad_norm": 3.0374643802642822, + "learning_rate": 1.4095792999804006e-05, + "loss": 1.1802, + "step": 6136 + }, + { + "epoch": 0.3847890149852655, + "grad_norm": 3.361532211303711, + "learning_rate": 1.4093940300671521e-05, + "loss": 1.0315, + "step": 6137 + }, + { + "epoch": 0.38485171484105585, + "grad_norm": 3.1321582794189453, + "learning_rate": 1.4092087432704755e-05, + "loss": 0.9937, + "step": 6138 + }, + { + "epoch": 0.3849144146968462, + "grad_norm": 3.2201287746429443, + "learning_rate": 1.4090234395980126e-05, + "loss": 1.1554, + "step": 6139 + }, + { + "epoch": 0.3849771145526365, + "grad_norm": 3.1305580139160156, + "learning_rate": 1.4088381190574051e-05, + "loss": 1.0716, + "step": 6140 + }, + { + "epoch": 0.3850398144084269, + "grad_norm": 2.8323862552642822, + "learning_rate": 1.4086527816562963e-05, + "loss": 1.1697, + "step": 6141 + }, + { + "epoch": 0.3851025142642172, + "grad_norm": 3.317112445831299, + "learning_rate": 1.4084674274023283e-05, + "loss": 1.1534, + "step": 6142 + }, + { + "epoch": 0.38516521412000754, + "grad_norm": 3.3243227005004883, + "learning_rate": 1.408282056303146e-05, + "loss": 1.1535, + "step": 6143 + }, + { + "epoch": 0.38522791397579786, + "grad_norm": 2.839904546737671, + "learning_rate": 1.4080966683663937e-05, + "loss": 1.184, + "step": 6144 + }, + { + "epoch": 0.3852906138315882, + "grad_norm": 2.9350266456604004, + "learning_rate": 1.4079112635997172e-05, + "loss": 1.2522, + "step": 6145 + }, + { + "epoch": 0.3853533136873785, + "grad_norm": 2.712820053100586, + "learning_rate": 1.4077258420107622e-05, + "loss": 1.0486, + "step": 6146 + }, + { + "epoch": 0.38541601354316884, + "grad_norm": 2.7050724029541016, + "learning_rate": 1.4075404036071755e-05, + "loss": 1.1482, + "step": 6147 + }, + { + "epoch": 0.38547871339895917, + "grad_norm": 2.9200360774993896, + "learning_rate": 1.4073549483966051e-05, + "loss": 1.0013, + "step": 6148 + }, + { + "epoch": 0.3855414132547495, + "grad_norm": 3.1547656059265137, + "learning_rate": 1.4071694763866988e-05, + "loss": 1.1076, + "step": 6149 + }, + { + "epoch": 0.3856041131105398, + "grad_norm": 3.1241395473480225, + "learning_rate": 1.4069839875851055e-05, + "loss": 1.0301, + "step": 6150 + }, + { + "epoch": 0.38566681296633015, + "grad_norm": 2.7469165325164795, + "learning_rate": 1.4067984819994745e-05, + "loss": 1.0354, + "step": 6151 + }, + { + "epoch": 0.38572951282212053, + "grad_norm": 2.728076934814453, + "learning_rate": 1.4066129596374567e-05, + "loss": 1.2214, + "step": 6152 + }, + { + "epoch": 0.38579221267791086, + "grad_norm": 3.201411485671997, + "learning_rate": 1.4064274205067023e-05, + "loss": 1.1357, + "step": 6153 + }, + { + "epoch": 0.3858549125337012, + "grad_norm": 3.0932459831237793, + "learning_rate": 1.4062418646148639e-05, + "loss": 1.1351, + "step": 6154 + }, + { + "epoch": 0.3859176123894915, + "grad_norm": 2.7511346340179443, + "learning_rate": 1.4060562919695926e-05, + "loss": 1.0662, + "step": 6155 + }, + { + "epoch": 0.38598031224528184, + "grad_norm": 2.734532356262207, + "learning_rate": 1.4058707025785423e-05, + "loss": 1.2577, + "step": 6156 + }, + { + "epoch": 0.38604301210107217, + "grad_norm": 3.008314609527588, + "learning_rate": 1.4056850964493668e-05, + "loss": 1.1677, + "step": 6157 + }, + { + "epoch": 0.3861057119568625, + "grad_norm": 2.735424757003784, + "learning_rate": 1.40549947358972e-05, + "loss": 0.9836, + "step": 6158 + }, + { + "epoch": 0.3861684118126528, + "grad_norm": 2.7973697185516357, + "learning_rate": 1.4053138340072571e-05, + "loss": 1.1368, + "step": 6159 + }, + { + "epoch": 0.38623111166844315, + "grad_norm": 2.9091479778289795, + "learning_rate": 1.4051281777096339e-05, + "loss": 1.197, + "step": 6160 + }, + { + "epoch": 0.3862938115242335, + "grad_norm": 2.9114930629730225, + "learning_rate": 1.4049425047045074e-05, + "loss": 1.067, + "step": 6161 + }, + { + "epoch": 0.3863565113800238, + "grad_norm": 2.916375160217285, + "learning_rate": 1.404756814999534e-05, + "loss": 1.1337, + "step": 6162 + }, + { + "epoch": 0.38641921123581413, + "grad_norm": 3.0290639400482178, + "learning_rate": 1.4045711086023721e-05, + "loss": 1.1782, + "step": 6163 + }, + { + "epoch": 0.3864819110916045, + "grad_norm": 2.9567530155181885, + "learning_rate": 1.4043853855206798e-05, + "loss": 1.0745, + "step": 6164 + }, + { + "epoch": 0.38654461094739484, + "grad_norm": 3.3047616481781006, + "learning_rate": 1.4041996457621165e-05, + "loss": 1.1306, + "step": 6165 + }, + { + "epoch": 0.38660731080318517, + "grad_norm": 3.0084946155548096, + "learning_rate": 1.4040138893343422e-05, + "loss": 1.2703, + "step": 6166 + }, + { + "epoch": 0.3866700106589755, + "grad_norm": 2.929187059402466, + "learning_rate": 1.4038281162450178e-05, + "loss": 1.153, + "step": 6167 + }, + { + "epoch": 0.3867327105147658, + "grad_norm": 3.3263418674468994, + "learning_rate": 1.403642326501804e-05, + "loss": 1.0832, + "step": 6168 + }, + { + "epoch": 0.38679541037055615, + "grad_norm": 3.0558037757873535, + "learning_rate": 1.4034565201123628e-05, + "loss": 1.1774, + "step": 6169 + }, + { + "epoch": 0.3868581102263465, + "grad_norm": 2.962153196334839, + "learning_rate": 1.4032706970843575e-05, + "loss": 1.0636, + "step": 6170 + }, + { + "epoch": 0.3869208100821368, + "grad_norm": 2.9738759994506836, + "learning_rate": 1.4030848574254507e-05, + "loss": 1.1994, + "step": 6171 + }, + { + "epoch": 0.38698350993792713, + "grad_norm": 2.9098949432373047, + "learning_rate": 1.402899001143307e-05, + "loss": 1.1774, + "step": 6172 + }, + { + "epoch": 0.38704620979371746, + "grad_norm": 2.985398054122925, + "learning_rate": 1.4027131282455908e-05, + "loss": 1.0738, + "step": 6173 + }, + { + "epoch": 0.3871089096495078, + "grad_norm": 3.211078405380249, + "learning_rate": 1.4025272387399676e-05, + "loss": 1.1231, + "step": 6174 + }, + { + "epoch": 0.38717160950529816, + "grad_norm": 3.3492982387542725, + "learning_rate": 1.4023413326341035e-05, + "loss": 1.0321, + "step": 6175 + }, + { + "epoch": 0.3872343093610885, + "grad_norm": 3.0472569465637207, + "learning_rate": 1.4021554099356654e-05, + "loss": 1.1206, + "step": 6176 + }, + { + "epoch": 0.3872970092168788, + "grad_norm": 2.959176778793335, + "learning_rate": 1.4019694706523203e-05, + "loss": 0.9296, + "step": 6177 + }, + { + "epoch": 0.38735970907266914, + "grad_norm": 2.9672317504882812, + "learning_rate": 1.4017835147917371e-05, + "loss": 1.0946, + "step": 6178 + }, + { + "epoch": 0.38742240892845947, + "grad_norm": 2.980614423751831, + "learning_rate": 1.401597542361584e-05, + "loss": 0.9024, + "step": 6179 + }, + { + "epoch": 0.3874851087842498, + "grad_norm": 2.795483350753784, + "learning_rate": 1.4014115533695307e-05, + "loss": 1.1917, + "step": 6180 + }, + { + "epoch": 0.3875478086400401, + "grad_norm": 3.0531039237976074, + "learning_rate": 1.4012255478232474e-05, + "loss": 1.0778, + "step": 6181 + }, + { + "epoch": 0.38761050849583045, + "grad_norm": 3.3689801692962646, + "learning_rate": 1.401039525730405e-05, + "loss": 1.2168, + "step": 6182 + }, + { + "epoch": 0.3876732083516208, + "grad_norm": 3.018070697784424, + "learning_rate": 1.4008534870986752e-05, + "loss": 0.9872, + "step": 6183 + }, + { + "epoch": 0.3877359082074111, + "grad_norm": 3.1134843826293945, + "learning_rate": 1.4006674319357298e-05, + "loss": 1.081, + "step": 6184 + }, + { + "epoch": 0.38779860806320143, + "grad_norm": 2.864600658416748, + "learning_rate": 1.4004813602492422e-05, + "loss": 1.1375, + "step": 6185 + }, + { + "epoch": 0.38786130791899176, + "grad_norm": 2.9332008361816406, + "learning_rate": 1.4002952720468856e-05, + "loss": 1.1557, + "step": 6186 + }, + { + "epoch": 0.38792400777478214, + "grad_norm": 3.4049370288848877, + "learning_rate": 1.400109167336335e-05, + "loss": 1.0427, + "step": 6187 + }, + { + "epoch": 0.38798670763057247, + "grad_norm": 3.1200830936431885, + "learning_rate": 1.3999230461252644e-05, + "loss": 1.2328, + "step": 6188 + }, + { + "epoch": 0.3880494074863628, + "grad_norm": 3.014726161956787, + "learning_rate": 1.39973690842135e-05, + "loss": 1.2295, + "step": 6189 + }, + { + "epoch": 0.3881121073421531, + "grad_norm": 3.0161166191101074, + "learning_rate": 1.3995507542322681e-05, + "loss": 1.1702, + "step": 6190 + }, + { + "epoch": 0.38817480719794345, + "grad_norm": 2.6983020305633545, + "learning_rate": 1.3993645835656955e-05, + "loss": 1.2616, + "step": 6191 + }, + { + "epoch": 0.3882375070537338, + "grad_norm": 2.860487699508667, + "learning_rate": 1.3991783964293104e-05, + "loss": 1.0507, + "step": 6192 + }, + { + "epoch": 0.3883002069095241, + "grad_norm": 2.761091470718384, + "learning_rate": 1.3989921928307904e-05, + "loss": 1.076, + "step": 6193 + }, + { + "epoch": 0.38836290676531443, + "grad_norm": 2.633751153945923, + "learning_rate": 1.3988059727778152e-05, + "loss": 1.1186, + "step": 6194 + }, + { + "epoch": 0.38842560662110476, + "grad_norm": 3.2507498264312744, + "learning_rate": 1.398619736278064e-05, + "loss": 1.0562, + "step": 6195 + }, + { + "epoch": 0.3884883064768951, + "grad_norm": 3.125580072402954, + "learning_rate": 1.3984334833392179e-05, + "loss": 1.2163, + "step": 6196 + }, + { + "epoch": 0.3885510063326854, + "grad_norm": 2.721212387084961, + "learning_rate": 1.398247213968957e-05, + "loss": 1.168, + "step": 6197 + }, + { + "epoch": 0.3886137061884758, + "grad_norm": 3.116546392440796, + "learning_rate": 1.3980609281749642e-05, + "loss": 1.0916, + "step": 6198 + }, + { + "epoch": 0.3886764060442661, + "grad_norm": 3.0724010467529297, + "learning_rate": 1.397874625964921e-05, + "loss": 1.1939, + "step": 6199 + }, + { + "epoch": 0.38873910590005645, + "grad_norm": 3.0280792713165283, + "learning_rate": 1.397688307346511e-05, + "loss": 1.1911, + "step": 6200 + }, + { + "epoch": 0.3888018057558468, + "grad_norm": 3.145737886428833, + "learning_rate": 1.3975019723274174e-05, + "loss": 1.1571, + "step": 6201 + }, + { + "epoch": 0.3888645056116371, + "grad_norm": 3.0186166763305664, + "learning_rate": 1.3973156209153256e-05, + "loss": 1.1858, + "step": 6202 + }, + { + "epoch": 0.38892720546742743, + "grad_norm": 2.68937611579895, + "learning_rate": 1.3971292531179202e-05, + "loss": 1.1115, + "step": 6203 + }, + { + "epoch": 0.38898990532321776, + "grad_norm": 2.892777442932129, + "learning_rate": 1.3969428689428868e-05, + "loss": 1.2791, + "step": 6204 + }, + { + "epoch": 0.3890526051790081, + "grad_norm": 3.268151044845581, + "learning_rate": 1.3967564683979125e-05, + "loss": 1.1657, + "step": 6205 + }, + { + "epoch": 0.3891153050347984, + "grad_norm": 2.8357508182525635, + "learning_rate": 1.3965700514906837e-05, + "loss": 1.0591, + "step": 6206 + }, + { + "epoch": 0.38917800489058874, + "grad_norm": 3.0757837295532227, + "learning_rate": 1.3963836182288888e-05, + "loss": 1.2613, + "step": 6207 + }, + { + "epoch": 0.38924070474637906, + "grad_norm": 3.027238130569458, + "learning_rate": 1.3961971686202163e-05, + "loss": 1.0811, + "step": 6208 + }, + { + "epoch": 0.3893034046021694, + "grad_norm": 2.9448883533477783, + "learning_rate": 1.3960107026723554e-05, + "loss": 1.2861, + "step": 6209 + }, + { + "epoch": 0.38936610445795977, + "grad_norm": 2.64239239692688, + "learning_rate": 1.3958242203929954e-05, + "loss": 1.1312, + "step": 6210 + }, + { + "epoch": 0.3894288043137501, + "grad_norm": 3.010343074798584, + "learning_rate": 1.3956377217898276e-05, + "loss": 1.1889, + "step": 6211 + }, + { + "epoch": 0.3894915041695404, + "grad_norm": 2.9527223110198975, + "learning_rate": 1.3954512068705425e-05, + "loss": 1.228, + "step": 6212 + }, + { + "epoch": 0.38955420402533075, + "grad_norm": 3.032623291015625, + "learning_rate": 1.3952646756428328e-05, + "loss": 0.9624, + "step": 6213 + }, + { + "epoch": 0.3896169038811211, + "grad_norm": 2.8606314659118652, + "learning_rate": 1.3950781281143906e-05, + "loss": 1.0861, + "step": 6214 + }, + { + "epoch": 0.3896796037369114, + "grad_norm": 2.8262274265289307, + "learning_rate": 1.3948915642929089e-05, + "loss": 1.0107, + "step": 6215 + }, + { + "epoch": 0.38974230359270173, + "grad_norm": 2.7801358699798584, + "learning_rate": 1.3947049841860818e-05, + "loss": 1.0406, + "step": 6216 + }, + { + "epoch": 0.38980500344849206, + "grad_norm": 3.1156091690063477, + "learning_rate": 1.394518387801604e-05, + "loss": 1.0319, + "step": 6217 + }, + { + "epoch": 0.3898677033042824, + "grad_norm": 2.876948118209839, + "learning_rate": 1.3943317751471707e-05, + "loss": 1.203, + "step": 6218 + }, + { + "epoch": 0.3899304031600727, + "grad_norm": 3.0957181453704834, + "learning_rate": 1.3941451462304778e-05, + "loss": 1.0233, + "step": 6219 + }, + { + "epoch": 0.38999310301586304, + "grad_norm": 3.050384521484375, + "learning_rate": 1.393958501059222e-05, + "loss": 0.9568, + "step": 6220 + }, + { + "epoch": 0.39005580287165337, + "grad_norm": 3.2419180870056152, + "learning_rate": 1.3937718396411002e-05, + "loss": 1.0313, + "step": 6221 + }, + { + "epoch": 0.39011850272744375, + "grad_norm": 2.9798195362091064, + "learning_rate": 1.3935851619838103e-05, + "loss": 1.3157, + "step": 6222 + }, + { + "epoch": 0.3901812025832341, + "grad_norm": 2.850456953048706, + "learning_rate": 1.3933984680950517e-05, + "loss": 1.1533, + "step": 6223 + }, + { + "epoch": 0.3902439024390244, + "grad_norm": 3.357036590576172, + "learning_rate": 1.3932117579825226e-05, + "loss": 1.11, + "step": 6224 + }, + { + "epoch": 0.39030660229481473, + "grad_norm": 3.069500684738159, + "learning_rate": 1.3930250316539237e-05, + "loss": 1.1159, + "step": 6225 + }, + { + "epoch": 0.39036930215060506, + "grad_norm": 3.293148994445801, + "learning_rate": 1.3928382891169552e-05, + "loss": 1.234, + "step": 6226 + }, + { + "epoch": 0.3904320020063954, + "grad_norm": 3.0187299251556396, + "learning_rate": 1.3926515303793187e-05, + "loss": 1.0112, + "step": 6227 + }, + { + "epoch": 0.3904947018621857, + "grad_norm": 3.385287284851074, + "learning_rate": 1.3924647554487157e-05, + "loss": 1.1872, + "step": 6228 + }, + { + "epoch": 0.39055740171797604, + "grad_norm": 2.8118765354156494, + "learning_rate": 1.3922779643328492e-05, + "loss": 1.0739, + "step": 6229 + }, + { + "epoch": 0.39062010157376637, + "grad_norm": 3.342360496520996, + "learning_rate": 1.3920911570394222e-05, + "loss": 1.2057, + "step": 6230 + }, + { + "epoch": 0.3906828014295567, + "grad_norm": 3.2713494300842285, + "learning_rate": 1.3919043335761391e-05, + "loss": 1.1069, + "step": 6231 + }, + { + "epoch": 0.390745501285347, + "grad_norm": 3.2020621299743652, + "learning_rate": 1.391717493950704e-05, + "loss": 1.0583, + "step": 6232 + }, + { + "epoch": 0.3908082011411374, + "grad_norm": 3.2680599689483643, + "learning_rate": 1.391530638170822e-05, + "loss": 1.0, + "step": 6233 + }, + { + "epoch": 0.39087090099692773, + "grad_norm": 3.14633846282959, + "learning_rate": 1.3913437662441997e-05, + "loss": 1.0933, + "step": 6234 + }, + { + "epoch": 0.39093360085271806, + "grad_norm": 2.875112533569336, + "learning_rate": 1.3911568781785432e-05, + "loss": 1.3411, + "step": 6235 + }, + { + "epoch": 0.3909963007085084, + "grad_norm": 2.665435791015625, + "learning_rate": 1.39096997398156e-05, + "loss": 1.125, + "step": 6236 + }, + { + "epoch": 0.3910590005642987, + "grad_norm": 2.790193796157837, + "learning_rate": 1.3907830536609582e-05, + "loss": 1.1155, + "step": 6237 + }, + { + "epoch": 0.39112170042008904, + "grad_norm": 3.326441764831543, + "learning_rate": 1.3905961172244462e-05, + "loss": 1.1726, + "step": 6238 + }, + { + "epoch": 0.39118440027587936, + "grad_norm": 2.9670934677124023, + "learning_rate": 1.3904091646797332e-05, + "loss": 1.2533, + "step": 6239 + }, + { + "epoch": 0.3912471001316697, + "grad_norm": 3.165081262588501, + "learning_rate": 1.3902221960345291e-05, + "loss": 0.9844, + "step": 6240 + }, + { + "epoch": 0.39130979998746, + "grad_norm": 3.2649848461151123, + "learning_rate": 1.3900352112965445e-05, + "loss": 1.2226, + "step": 6241 + }, + { + "epoch": 0.39137249984325034, + "grad_norm": 3.4078383445739746, + "learning_rate": 1.3898482104734909e-05, + "loss": 1.0926, + "step": 6242 + }, + { + "epoch": 0.39143519969904067, + "grad_norm": 2.867283821105957, + "learning_rate": 1.3896611935730802e-05, + "loss": 1.0078, + "step": 6243 + }, + { + "epoch": 0.391497899554831, + "grad_norm": 2.888291120529175, + "learning_rate": 1.3894741606030247e-05, + "loss": 1.1228, + "step": 6244 + }, + { + "epoch": 0.3915605994106214, + "grad_norm": 2.737161159515381, + "learning_rate": 1.3892871115710379e-05, + "loss": 1.2522, + "step": 6245 + }, + { + "epoch": 0.3916232992664117, + "grad_norm": 2.9915997982025146, + "learning_rate": 1.3891000464848334e-05, + "loss": 1.2177, + "step": 6246 + }, + { + "epoch": 0.39168599912220203, + "grad_norm": 2.965712547302246, + "learning_rate": 1.3889129653521262e-05, + "loss": 1.0942, + "step": 6247 + }, + { + "epoch": 0.39174869897799236, + "grad_norm": 2.8896427154541016, + "learning_rate": 1.3887258681806313e-05, + "loss": 1.2178, + "step": 6248 + }, + { + "epoch": 0.3918113988337827, + "grad_norm": 3.020298480987549, + "learning_rate": 1.3885387549780646e-05, + "loss": 1.2567, + "step": 6249 + }, + { + "epoch": 0.391874098689573, + "grad_norm": 2.7711598873138428, + "learning_rate": 1.3883516257521429e-05, + "loss": 1.0799, + "step": 6250 + }, + { + "epoch": 0.39193679854536334, + "grad_norm": 2.926136016845703, + "learning_rate": 1.3881644805105827e-05, + "loss": 1.1429, + "step": 6251 + }, + { + "epoch": 0.39199949840115367, + "grad_norm": 2.9122581481933594, + "learning_rate": 1.3879773192611028e-05, + "loss": 1.2306, + "step": 6252 + }, + { + "epoch": 0.392062198256944, + "grad_norm": 2.566502571105957, + "learning_rate": 1.3877901420114207e-05, + "loss": 1.0157, + "step": 6253 + }, + { + "epoch": 0.3921248981127343, + "grad_norm": 2.7351608276367188, + "learning_rate": 1.387602948769257e-05, + "loss": 1.2162, + "step": 6254 + }, + { + "epoch": 0.39218759796852465, + "grad_norm": 3.0208046436309814, + "learning_rate": 1.3874157395423304e-05, + "loss": 1.1132, + "step": 6255 + }, + { + "epoch": 0.39225029782431503, + "grad_norm": 2.8470494747161865, + "learning_rate": 1.387228514338362e-05, + "loss": 1.2005, + "step": 6256 + }, + { + "epoch": 0.39231299768010536, + "grad_norm": 2.893247365951538, + "learning_rate": 1.3870412731650724e-05, + "loss": 1.1303, + "step": 6257 + }, + { + "epoch": 0.3923756975358957, + "grad_norm": 2.90915584564209, + "learning_rate": 1.386854016030184e-05, + "loss": 1.1054, + "step": 6258 + }, + { + "epoch": 0.392438397391686, + "grad_norm": 2.9920010566711426, + "learning_rate": 1.3866667429414188e-05, + "loss": 1.2774, + "step": 6259 + }, + { + "epoch": 0.39250109724747634, + "grad_norm": 2.8770811557769775, + "learning_rate": 1.3864794539065007e-05, + "loss": 1.2446, + "step": 6260 + }, + { + "epoch": 0.39256379710326667, + "grad_norm": 3.05368971824646, + "learning_rate": 1.3862921489331526e-05, + "loss": 1.1238, + "step": 6261 + }, + { + "epoch": 0.392626496959057, + "grad_norm": 2.672194004058838, + "learning_rate": 1.3861048280291001e-05, + "loss": 1.2292, + "step": 6262 + }, + { + "epoch": 0.3926891968148473, + "grad_norm": 3.159641742706299, + "learning_rate": 1.3859174912020669e-05, + "loss": 1.0053, + "step": 6263 + }, + { + "epoch": 0.39275189667063765, + "grad_norm": 3.589210271835327, + "learning_rate": 1.3857301384597796e-05, + "loss": 1.1931, + "step": 6264 + }, + { + "epoch": 0.392814596526428, + "grad_norm": 3.1645700931549072, + "learning_rate": 1.3855427698099649e-05, + "loss": 1.1461, + "step": 6265 + }, + { + "epoch": 0.3928772963822183, + "grad_norm": 3.0030534267425537, + "learning_rate": 1.3853553852603492e-05, + "loss": 0.9822, + "step": 6266 + }, + { + "epoch": 0.3929399962380086, + "grad_norm": 2.7133264541625977, + "learning_rate": 1.385167984818661e-05, + "loss": 1.0309, + "step": 6267 + }, + { + "epoch": 0.393002696093799, + "grad_norm": 2.9005823135375977, + "learning_rate": 1.3849805684926279e-05, + "loss": 1.0535, + "step": 6268 + }, + { + "epoch": 0.39306539594958934, + "grad_norm": 3.1808178424835205, + "learning_rate": 1.3847931362899796e-05, + "loss": 0.9864, + "step": 6269 + }, + { + "epoch": 0.39312809580537966, + "grad_norm": 2.9003403186798096, + "learning_rate": 1.3846056882184453e-05, + "loss": 1.043, + "step": 6270 + }, + { + "epoch": 0.39319079566117, + "grad_norm": 3.2600643634796143, + "learning_rate": 1.3844182242857557e-05, + "loss": 1.0455, + "step": 6271 + }, + { + "epoch": 0.3932534955169603, + "grad_norm": 2.8344666957855225, + "learning_rate": 1.384230744499642e-05, + "loss": 1.2725, + "step": 6272 + }, + { + "epoch": 0.39331619537275064, + "grad_norm": 3.0888073444366455, + "learning_rate": 1.384043248867835e-05, + "loss": 1.1996, + "step": 6273 + }, + { + "epoch": 0.39337889522854097, + "grad_norm": 2.8058784008026123, + "learning_rate": 1.3838557373980683e-05, + "loss": 1.1368, + "step": 6274 + }, + { + "epoch": 0.3934415950843313, + "grad_norm": 3.082052230834961, + "learning_rate": 1.3836682100980739e-05, + "loss": 1.1212, + "step": 6275 + }, + { + "epoch": 0.3935042949401216, + "grad_norm": 2.8323822021484375, + "learning_rate": 1.383480666975586e-05, + "loss": 1.2719, + "step": 6276 + }, + { + "epoch": 0.39356699479591195, + "grad_norm": 3.178225040435791, + "learning_rate": 1.3832931080383383e-05, + "loss": 0.9758, + "step": 6277 + }, + { + "epoch": 0.3936296946517023, + "grad_norm": 3.0595204830169678, + "learning_rate": 1.3831055332940665e-05, + "loss": 1.1294, + "step": 6278 + }, + { + "epoch": 0.39369239450749266, + "grad_norm": 3.086498737335205, + "learning_rate": 1.3829179427505052e-05, + "loss": 1.0412, + "step": 6279 + }, + { + "epoch": 0.393755094363283, + "grad_norm": 3.1617648601531982, + "learning_rate": 1.3827303364153918e-05, + "loss": 1.0228, + "step": 6280 + }, + { + "epoch": 0.3938177942190733, + "grad_norm": 2.980397939682007, + "learning_rate": 1.3825427142964625e-05, + "loss": 1.131, + "step": 6281 + }, + { + "epoch": 0.39388049407486364, + "grad_norm": 2.905668020248413, + "learning_rate": 1.3823550764014548e-05, + "loss": 1.2327, + "step": 6282 + }, + { + "epoch": 0.39394319393065397, + "grad_norm": 2.7706241607666016, + "learning_rate": 1.382167422738107e-05, + "loss": 1.2154, + "step": 6283 + }, + { + "epoch": 0.3940058937864443, + "grad_norm": 2.940549612045288, + "learning_rate": 1.3819797533141581e-05, + "loss": 1.1481, + "step": 6284 + }, + { + "epoch": 0.3940685936422346, + "grad_norm": 2.738442897796631, + "learning_rate": 1.3817920681373474e-05, + "loss": 1.1219, + "step": 6285 + }, + { + "epoch": 0.39413129349802495, + "grad_norm": 3.174576759338379, + "learning_rate": 1.3816043672154151e-05, + "loss": 1.0084, + "step": 6286 + }, + { + "epoch": 0.3941939933538153, + "grad_norm": 2.747013807296753, + "learning_rate": 1.3814166505561024e-05, + "loss": 1.221, + "step": 6287 + }, + { + "epoch": 0.3942566932096056, + "grad_norm": 2.9633190631866455, + "learning_rate": 1.3812289181671501e-05, + "loss": 1.1224, + "step": 6288 + }, + { + "epoch": 0.39431939306539593, + "grad_norm": 3.134289264678955, + "learning_rate": 1.3810411700563005e-05, + "loss": 1.0545, + "step": 6289 + }, + { + "epoch": 0.39438209292118626, + "grad_norm": 3.0034120082855225, + "learning_rate": 1.3808534062312964e-05, + "loss": 1.1048, + "step": 6290 + }, + { + "epoch": 0.39444479277697664, + "grad_norm": 3.072474241256714, + "learning_rate": 1.3806656266998814e-05, + "loss": 1.2482, + "step": 6291 + }, + { + "epoch": 0.39450749263276697, + "grad_norm": 3.0794003009796143, + "learning_rate": 1.3804778314697994e-05, + "loss": 1.0724, + "step": 6292 + }, + { + "epoch": 0.3945701924885573, + "grad_norm": 2.9599721431732178, + "learning_rate": 1.3802900205487948e-05, + "loss": 1.1011, + "step": 6293 + }, + { + "epoch": 0.3946328923443476, + "grad_norm": 2.9528586864471436, + "learning_rate": 1.380102193944613e-05, + "loss": 1.256, + "step": 6294 + }, + { + "epoch": 0.39469559220013795, + "grad_norm": 3.1045761108398438, + "learning_rate": 1.3799143516650002e-05, + "loss": 0.9561, + "step": 6295 + }, + { + "epoch": 0.3947582920559283, + "grad_norm": 3.393165349960327, + "learning_rate": 1.3797264937177033e-05, + "loss": 0.9822, + "step": 6296 + }, + { + "epoch": 0.3948209919117186, + "grad_norm": 3.1690614223480225, + "learning_rate": 1.3795386201104689e-05, + "loss": 1.0813, + "step": 6297 + }, + { + "epoch": 0.3948836917675089, + "grad_norm": 3.1179535388946533, + "learning_rate": 1.3793507308510452e-05, + "loss": 1.057, + "step": 6298 + }, + { + "epoch": 0.39494639162329925, + "grad_norm": 2.9506735801696777, + "learning_rate": 1.379162825947181e-05, + "loss": 1.2637, + "step": 6299 + }, + { + "epoch": 0.3950090914790896, + "grad_norm": 3.234065055847168, + "learning_rate": 1.3789749054066252e-05, + "loss": 1.0762, + "step": 6300 + }, + { + "epoch": 0.3950717913348799, + "grad_norm": 2.9587349891662598, + "learning_rate": 1.3787869692371276e-05, + "loss": 1.2329, + "step": 6301 + }, + { + "epoch": 0.39513449119067023, + "grad_norm": 2.7152109146118164, + "learning_rate": 1.3785990174464392e-05, + "loss": 1.2037, + "step": 6302 + }, + { + "epoch": 0.3951971910464606, + "grad_norm": 3.215794801712036, + "learning_rate": 1.3784110500423104e-05, + "loss": 1.1862, + "step": 6303 + }, + { + "epoch": 0.39525989090225094, + "grad_norm": 3.2124831676483154, + "learning_rate": 1.3782230670324934e-05, + "loss": 1.0317, + "step": 6304 + }, + { + "epoch": 0.39532259075804127, + "grad_norm": 2.9000356197357178, + "learning_rate": 1.378035068424741e-05, + "loss": 1.1667, + "step": 6305 + }, + { + "epoch": 0.3953852906138316, + "grad_norm": 3.0786526203155518, + "learning_rate": 1.3778470542268055e-05, + "loss": 1.0302, + "step": 6306 + }, + { + "epoch": 0.3954479904696219, + "grad_norm": 2.992799997329712, + "learning_rate": 1.3776590244464412e-05, + "loss": 0.9142, + "step": 6307 + }, + { + "epoch": 0.39551069032541225, + "grad_norm": 2.9734745025634766, + "learning_rate": 1.3774709790914021e-05, + "loss": 1.332, + "step": 6308 + }, + { + "epoch": 0.3955733901812026, + "grad_norm": 2.789263963699341, + "learning_rate": 1.3772829181694434e-05, + "loss": 1.2056, + "step": 6309 + }, + { + "epoch": 0.3956360900369929, + "grad_norm": 2.857518196105957, + "learning_rate": 1.3770948416883205e-05, + "loss": 1.3362, + "step": 6310 + }, + { + "epoch": 0.39569878989278323, + "grad_norm": 3.078099250793457, + "learning_rate": 1.3769067496557901e-05, + "loss": 1.0707, + "step": 6311 + }, + { + "epoch": 0.39576148974857356, + "grad_norm": 2.8971197605133057, + "learning_rate": 1.3767186420796088e-05, + "loss": 1.122, + "step": 6312 + }, + { + "epoch": 0.3958241896043639, + "grad_norm": 3.0012717247009277, + "learning_rate": 1.3765305189675345e-05, + "loss": 1.0048, + "step": 6313 + }, + { + "epoch": 0.39588688946015427, + "grad_norm": 3.2734334468841553, + "learning_rate": 1.3763423803273245e-05, + "loss": 1.1248, + "step": 6314 + }, + { + "epoch": 0.3959495893159446, + "grad_norm": 3.0637993812561035, + "learning_rate": 1.3761542261667388e-05, + "loss": 1.1735, + "step": 6315 + }, + { + "epoch": 0.3960122891717349, + "grad_norm": 3.419674873352051, + "learning_rate": 1.3759660564935363e-05, + "loss": 0.927, + "step": 6316 + }, + { + "epoch": 0.39607498902752525, + "grad_norm": 3.493014097213745, + "learning_rate": 1.3757778713154772e-05, + "loss": 1.1298, + "step": 6317 + }, + { + "epoch": 0.3961376888833156, + "grad_norm": 3.374001979827881, + "learning_rate": 1.3755896706403224e-05, + "loss": 1.137, + "step": 6318 + }, + { + "epoch": 0.3962003887391059, + "grad_norm": 2.958890199661255, + "learning_rate": 1.375401454475833e-05, + "loss": 1.2441, + "step": 6319 + }, + { + "epoch": 0.39626308859489623, + "grad_norm": 2.878674268722534, + "learning_rate": 1.3752132228297715e-05, + "loss": 1.0099, + "step": 6320 + }, + { + "epoch": 0.39632578845068656, + "grad_norm": 2.7147269248962402, + "learning_rate": 1.3750249757098999e-05, + "loss": 1.1288, + "step": 6321 + }, + { + "epoch": 0.3963884883064769, + "grad_norm": 3.079490900039673, + "learning_rate": 1.3748367131239822e-05, + "loss": 1.1728, + "step": 6322 + }, + { + "epoch": 0.3964511881622672, + "grad_norm": 2.934997081756592, + "learning_rate": 1.374648435079782e-05, + "loss": 1.1962, + "step": 6323 + }, + { + "epoch": 0.39651388801805754, + "grad_norm": 3.077303171157837, + "learning_rate": 1.3744601415850637e-05, + "loss": 0.9609, + "step": 6324 + }, + { + "epoch": 0.39657658787384786, + "grad_norm": 2.872638463973999, + "learning_rate": 1.3742718326475933e-05, + "loss": 1.0819, + "step": 6325 + }, + { + "epoch": 0.39663928772963825, + "grad_norm": 3.1493382453918457, + "learning_rate": 1.3740835082751358e-05, + "loss": 1.138, + "step": 6326 + }, + { + "epoch": 0.3967019875854286, + "grad_norm": 2.7364325523376465, + "learning_rate": 1.3738951684754585e-05, + "loss": 1.1319, + "step": 6327 + }, + { + "epoch": 0.3967646874412189, + "grad_norm": 2.8105239868164062, + "learning_rate": 1.3737068132563275e-05, + "loss": 1.1985, + "step": 6328 + }, + { + "epoch": 0.3968273872970092, + "grad_norm": 2.666271448135376, + "learning_rate": 1.3735184426255117e-05, + "loss": 1.1874, + "step": 6329 + }, + { + "epoch": 0.39689008715279955, + "grad_norm": 2.8550307750701904, + "learning_rate": 1.373330056590779e-05, + "loss": 1.0765, + "step": 6330 + }, + { + "epoch": 0.3969527870085899, + "grad_norm": 3.0387322902679443, + "learning_rate": 1.3731416551598984e-05, + "loss": 1.0656, + "step": 6331 + }, + { + "epoch": 0.3970154868643802, + "grad_norm": 3.1054136753082275, + "learning_rate": 1.3729532383406394e-05, + "loss": 1.0325, + "step": 6332 + }, + { + "epoch": 0.39707818672017053, + "grad_norm": 3.003427505493164, + "learning_rate": 1.372764806140773e-05, + "loss": 1.0903, + "step": 6333 + }, + { + "epoch": 0.39714088657596086, + "grad_norm": 3.0389397144317627, + "learning_rate": 1.3725763585680693e-05, + "loss": 1.2812, + "step": 6334 + }, + { + "epoch": 0.3972035864317512, + "grad_norm": 3.110966444015503, + "learning_rate": 1.3723878956303005e-05, + "loss": 1.1491, + "step": 6335 + }, + { + "epoch": 0.3972662862875415, + "grad_norm": 2.8763504028320312, + "learning_rate": 1.3721994173352386e-05, + "loss": 1.0566, + "step": 6336 + }, + { + "epoch": 0.3973289861433319, + "grad_norm": 2.988402843475342, + "learning_rate": 1.3720109236906564e-05, + "loss": 1.0533, + "step": 6337 + }, + { + "epoch": 0.3973916859991222, + "grad_norm": 2.9723002910614014, + "learning_rate": 1.3718224147043276e-05, + "loss": 1.2459, + "step": 6338 + }, + { + "epoch": 0.39745438585491255, + "grad_norm": 3.0251636505126953, + "learning_rate": 1.371633890384026e-05, + "loss": 1.1264, + "step": 6339 + }, + { + "epoch": 0.3975170857107029, + "grad_norm": 2.9011876583099365, + "learning_rate": 1.3714453507375267e-05, + "loss": 1.0363, + "step": 6340 + }, + { + "epoch": 0.3975797855664932, + "grad_norm": 2.833113431930542, + "learning_rate": 1.3712567957726048e-05, + "loss": 1.1277, + "step": 6341 + }, + { + "epoch": 0.39764248542228353, + "grad_norm": 2.933448314666748, + "learning_rate": 1.3710682254970366e-05, + "loss": 1.0733, + "step": 6342 + }, + { + "epoch": 0.39770518527807386, + "grad_norm": 3.160203218460083, + "learning_rate": 1.3708796399185985e-05, + "loss": 1.3011, + "step": 6343 + }, + { + "epoch": 0.3977678851338642, + "grad_norm": 3.3332736492156982, + "learning_rate": 1.3706910390450679e-05, + "loss": 1.0217, + "step": 6344 + }, + { + "epoch": 0.3978305849896545, + "grad_norm": 3.263803720474243, + "learning_rate": 1.3705024228842223e-05, + "loss": 1.2873, + "step": 6345 + }, + { + "epoch": 0.39789328484544484, + "grad_norm": 3.158966302871704, + "learning_rate": 1.3703137914438408e-05, + "loss": 1.0163, + "step": 6346 + }, + { + "epoch": 0.39795598470123517, + "grad_norm": 3.0893449783325195, + "learning_rate": 1.3701251447317025e-05, + "loss": 0.9841, + "step": 6347 + }, + { + "epoch": 0.3980186845570255, + "grad_norm": 3.040210723876953, + "learning_rate": 1.369936482755587e-05, + "loss": 1.1726, + "step": 6348 + }, + { + "epoch": 0.3980813844128159, + "grad_norm": 3.0407209396362305, + "learning_rate": 1.3697478055232749e-05, + "loss": 0.9361, + "step": 6349 + }, + { + "epoch": 0.3981440842686062, + "grad_norm": 2.9187872409820557, + "learning_rate": 1.369559113042547e-05, + "loss": 1.1821, + "step": 6350 + }, + { + "epoch": 0.39820678412439653, + "grad_norm": 2.638563871383667, + "learning_rate": 1.3693704053211853e-05, + "loss": 1.1549, + "step": 6351 + }, + { + "epoch": 0.39826948398018686, + "grad_norm": 2.8796608448028564, + "learning_rate": 1.3691816823669717e-05, + "loss": 1.0889, + "step": 6352 + }, + { + "epoch": 0.3983321838359772, + "grad_norm": 3.037411689758301, + "learning_rate": 1.3689929441876899e-05, + "loss": 1.0711, + "step": 6353 + }, + { + "epoch": 0.3983948836917675, + "grad_norm": 3.140369415283203, + "learning_rate": 1.3688041907911225e-05, + "loss": 1.1991, + "step": 6354 + }, + { + "epoch": 0.39845758354755784, + "grad_norm": 3.009690284729004, + "learning_rate": 1.368615422185054e-05, + "loss": 1.2425, + "step": 6355 + }, + { + "epoch": 0.39852028340334816, + "grad_norm": 2.970268964767456, + "learning_rate": 1.3684266383772697e-05, + "loss": 1.0777, + "step": 6356 + }, + { + "epoch": 0.3985829832591385, + "grad_norm": 3.2784905433654785, + "learning_rate": 1.3682378393755549e-05, + "loss": 1.1815, + "step": 6357 + }, + { + "epoch": 0.3986456831149288, + "grad_norm": 3.3843700885772705, + "learning_rate": 1.3680490251876954e-05, + "loss": 0.9926, + "step": 6358 + }, + { + "epoch": 0.39870838297071914, + "grad_norm": 2.990938425064087, + "learning_rate": 1.3678601958214779e-05, + "loss": 0.9453, + "step": 6359 + }, + { + "epoch": 0.3987710828265095, + "grad_norm": 2.8628597259521484, + "learning_rate": 1.36767135128469e-05, + "loss": 1.0441, + "step": 6360 + }, + { + "epoch": 0.39883378268229985, + "grad_norm": 3.1199698448181152, + "learning_rate": 1.3674824915851193e-05, + "loss": 1.2958, + "step": 6361 + }, + { + "epoch": 0.3988964825380902, + "grad_norm": 2.8933258056640625, + "learning_rate": 1.3672936167305549e-05, + "loss": 1.1635, + "step": 6362 + }, + { + "epoch": 0.3989591823938805, + "grad_norm": 2.7906851768493652, + "learning_rate": 1.3671047267287855e-05, + "loss": 1.113, + "step": 6363 + }, + { + "epoch": 0.39902188224967083, + "grad_norm": 3.3131885528564453, + "learning_rate": 1.3669158215876013e-05, + "loss": 1.3102, + "step": 6364 + }, + { + "epoch": 0.39908458210546116, + "grad_norm": 3.0494918823242188, + "learning_rate": 1.3667269013147924e-05, + "loss": 1.0861, + "step": 6365 + }, + { + "epoch": 0.3991472819612515, + "grad_norm": 2.8851301670074463, + "learning_rate": 1.3665379659181501e-05, + "loss": 1.2687, + "step": 6366 + }, + { + "epoch": 0.3992099818170418, + "grad_norm": 3.2645866870880127, + "learning_rate": 1.3663490154054661e-05, + "loss": 1.1815, + "step": 6367 + }, + { + "epoch": 0.39927268167283214, + "grad_norm": 3.1476707458496094, + "learning_rate": 1.3661600497845329e-05, + "loss": 1.2572, + "step": 6368 + }, + { + "epoch": 0.39933538152862247, + "grad_norm": 2.949082136154175, + "learning_rate": 1.3659710690631432e-05, + "loss": 1.1686, + "step": 6369 + }, + { + "epoch": 0.3993980813844128, + "grad_norm": 3.1134583950042725, + "learning_rate": 1.3657820732490902e-05, + "loss": 0.9342, + "step": 6370 + }, + { + "epoch": 0.3994607812402031, + "grad_norm": 2.7938590049743652, + "learning_rate": 1.3655930623501692e-05, + "loss": 1.0362, + "step": 6371 + }, + { + "epoch": 0.3995234810959935, + "grad_norm": 2.6742260456085205, + "learning_rate": 1.365404036374174e-05, + "loss": 1.258, + "step": 6372 + }, + { + "epoch": 0.39958618095178383, + "grad_norm": 2.87787127494812, + "learning_rate": 1.3652149953289002e-05, + "loss": 1.0754, + "step": 6373 + }, + { + "epoch": 0.39964888080757416, + "grad_norm": 2.969392776489258, + "learning_rate": 1.3650259392221444e-05, + "loss": 1.0657, + "step": 6374 + }, + { + "epoch": 0.3997115806633645, + "grad_norm": 3.047607421875, + "learning_rate": 1.3648368680617025e-05, + "loss": 1.1713, + "step": 6375 + }, + { + "epoch": 0.3997742805191548, + "grad_norm": 3.1055805683135986, + "learning_rate": 1.3646477818553725e-05, + "loss": 1.175, + "step": 6376 + }, + { + "epoch": 0.39983698037494514, + "grad_norm": 2.859954357147217, + "learning_rate": 1.3644586806109521e-05, + "loss": 1.0023, + "step": 6377 + }, + { + "epoch": 0.39989968023073547, + "grad_norm": 3.088850259780884, + "learning_rate": 1.3642695643362398e-05, + "loss": 1.1724, + "step": 6378 + }, + { + "epoch": 0.3999623800865258, + "grad_norm": 3.302955150604248, + "learning_rate": 1.3640804330390345e-05, + "loss": 1.0623, + "step": 6379 + }, + { + "epoch": 0.4000250799423161, + "grad_norm": 3.0662379264831543, + "learning_rate": 1.3638912867271365e-05, + "loss": 1.269, + "step": 6380 + }, + { + "epoch": 0.40008777979810645, + "grad_norm": 3.325117349624634, + "learning_rate": 1.363702125408346e-05, + "loss": 0.9267, + "step": 6381 + }, + { + "epoch": 0.4001504796538968, + "grad_norm": 3.2098278999328613, + "learning_rate": 1.3635129490904636e-05, + "loss": 1.1374, + "step": 6382 + }, + { + "epoch": 0.4002131795096871, + "grad_norm": 3.3330421447753906, + "learning_rate": 1.3633237577812917e-05, + "loss": 1.1457, + "step": 6383 + }, + { + "epoch": 0.4002758793654775, + "grad_norm": 3.157597303390503, + "learning_rate": 1.363134551488632e-05, + "loss": 1.1786, + "step": 6384 + }, + { + "epoch": 0.4003385792212678, + "grad_norm": 2.986295461654663, + "learning_rate": 1.3629453302202875e-05, + "loss": 1.1419, + "step": 6385 + }, + { + "epoch": 0.40040127907705814, + "grad_norm": 3.072444438934326, + "learning_rate": 1.3627560939840616e-05, + "loss": 1.0473, + "step": 6386 + }, + { + "epoch": 0.40046397893284846, + "grad_norm": 3.1980648040771484, + "learning_rate": 1.362566842787759e-05, + "loss": 1.0702, + "step": 6387 + }, + { + "epoch": 0.4005266787886388, + "grad_norm": 3.0219056606292725, + "learning_rate": 1.3623775766391836e-05, + "loss": 1.3547, + "step": 6388 + }, + { + "epoch": 0.4005893786444291, + "grad_norm": 2.9702649116516113, + "learning_rate": 1.3621882955461411e-05, + "loss": 1.1416, + "step": 6389 + }, + { + "epoch": 0.40065207850021944, + "grad_norm": 2.929595947265625, + "learning_rate": 1.3619989995164378e-05, + "loss": 1.203, + "step": 6390 + }, + { + "epoch": 0.40071477835600977, + "grad_norm": 2.9952285289764404, + "learning_rate": 1.3618096885578798e-05, + "loss": 1.0944, + "step": 6391 + }, + { + "epoch": 0.4007774782118001, + "grad_norm": 2.982046127319336, + "learning_rate": 1.3616203626782743e-05, + "loss": 1.1704, + "step": 6392 + }, + { + "epoch": 0.4008401780675904, + "grad_norm": 3.0384223461151123, + "learning_rate": 1.3614310218854294e-05, + "loss": 1.1268, + "step": 6393 + }, + { + "epoch": 0.40090287792338075, + "grad_norm": 2.708839178085327, + "learning_rate": 1.3612416661871532e-05, + "loss": 1.1758, + "step": 6394 + }, + { + "epoch": 0.40096557777917113, + "grad_norm": 2.7870731353759766, + "learning_rate": 1.3610522955912551e-05, + "loss": 1.1661, + "step": 6395 + }, + { + "epoch": 0.40102827763496146, + "grad_norm": 2.893267869949341, + "learning_rate": 1.3608629101055443e-05, + "loss": 1.0637, + "step": 6396 + }, + { + "epoch": 0.4010909774907518, + "grad_norm": 2.9702186584472656, + "learning_rate": 1.3606735097378315e-05, + "loss": 1.2216, + "step": 6397 + }, + { + "epoch": 0.4011536773465421, + "grad_norm": 3.2990365028381348, + "learning_rate": 1.3604840944959275e-05, + "loss": 1.0214, + "step": 6398 + }, + { + "epoch": 0.40121637720233244, + "grad_norm": 3.063006639480591, + "learning_rate": 1.3602946643876436e-05, + "loss": 1.1455, + "step": 6399 + }, + { + "epoch": 0.40127907705812277, + "grad_norm": 2.987504243850708, + "learning_rate": 1.360105219420792e-05, + "loss": 1.2395, + "step": 6400 + }, + { + "epoch": 0.4013417769139131, + "grad_norm": 2.6879830360412598, + "learning_rate": 1.3599157596031853e-05, + "loss": 1.101, + "step": 6401 + }, + { + "epoch": 0.4014044767697034, + "grad_norm": 3.0964324474334717, + "learning_rate": 1.3597262849426371e-05, + "loss": 1.0146, + "step": 6402 + }, + { + "epoch": 0.40146717662549375, + "grad_norm": 2.981854200363159, + "learning_rate": 1.3595367954469612e-05, + "loss": 1.2352, + "step": 6403 + }, + { + "epoch": 0.4015298764812841, + "grad_norm": 3.042703628540039, + "learning_rate": 1.3593472911239723e-05, + "loss": 1.0856, + "step": 6404 + }, + { + "epoch": 0.4015925763370744, + "grad_norm": 2.755108118057251, + "learning_rate": 1.359157771981485e-05, + "loss": 1.2107, + "step": 6405 + }, + { + "epoch": 0.40165527619286473, + "grad_norm": 3.2460503578186035, + "learning_rate": 1.3589682380273159e-05, + "loss": 1.0556, + "step": 6406 + }, + { + "epoch": 0.4017179760486551, + "grad_norm": 3.1467885971069336, + "learning_rate": 1.358778689269281e-05, + "loss": 1.1686, + "step": 6407 + }, + { + "epoch": 0.40178067590444544, + "grad_norm": 3.0374791622161865, + "learning_rate": 1.3585891257151971e-05, + "loss": 1.0651, + "step": 6408 + }, + { + "epoch": 0.40184337576023577, + "grad_norm": 3.124704599380493, + "learning_rate": 1.358399547372882e-05, + "loss": 1.2547, + "step": 6409 + }, + { + "epoch": 0.4019060756160261, + "grad_norm": 3.036905527114868, + "learning_rate": 1.3582099542501542e-05, + "loss": 1.1744, + "step": 6410 + }, + { + "epoch": 0.4019687754718164, + "grad_norm": 3.005028486251831, + "learning_rate": 1.3580203463548319e-05, + "loss": 1.0123, + "step": 6411 + }, + { + "epoch": 0.40203147532760675, + "grad_norm": 3.0352261066436768, + "learning_rate": 1.3578307236947348e-05, + "loss": 1.0122, + "step": 6412 + }, + { + "epoch": 0.4020941751833971, + "grad_norm": 2.4588236808776855, + "learning_rate": 1.3576410862776832e-05, + "loss": 1.2657, + "step": 6413 + }, + { + "epoch": 0.4021568750391874, + "grad_norm": 3.1490955352783203, + "learning_rate": 1.3574514341114974e-05, + "loss": 1.0204, + "step": 6414 + }, + { + "epoch": 0.4022195748949777, + "grad_norm": 3.1615586280822754, + "learning_rate": 1.3572617672039994e-05, + "loss": 1.0151, + "step": 6415 + }, + { + "epoch": 0.40228227475076805, + "grad_norm": 2.782874822616577, + "learning_rate": 1.35707208556301e-05, + "loss": 1.182, + "step": 6416 + }, + { + "epoch": 0.4023449746065584, + "grad_norm": 3.1044840812683105, + "learning_rate": 1.3568823891963523e-05, + "loss": 1.0671, + "step": 6417 + }, + { + "epoch": 0.40240767446234876, + "grad_norm": 2.7562246322631836, + "learning_rate": 1.3566926781118495e-05, + "loss": 1.2055, + "step": 6418 + }, + { + "epoch": 0.4024703743181391, + "grad_norm": 2.782029390335083, + "learning_rate": 1.356502952317325e-05, + "loss": 1.0844, + "step": 6419 + }, + { + "epoch": 0.4025330741739294, + "grad_norm": 3.1186649799346924, + "learning_rate": 1.3563132118206031e-05, + "loss": 1.3132, + "step": 6420 + }, + { + "epoch": 0.40259577402971974, + "grad_norm": 3.1656436920166016, + "learning_rate": 1.3561234566295084e-05, + "loss": 1.138, + "step": 6421 + }, + { + "epoch": 0.40265847388551007, + "grad_norm": 2.954268455505371, + "learning_rate": 1.3559336867518674e-05, + "loss": 1.1322, + "step": 6422 + }, + { + "epoch": 0.4027211737413004, + "grad_norm": 2.9218637943267822, + "learning_rate": 1.3557439021955055e-05, + "loss": 1.1956, + "step": 6423 + }, + { + "epoch": 0.4027838735970907, + "grad_norm": 2.88554048538208, + "learning_rate": 1.3555541029682492e-05, + "loss": 1.1387, + "step": 6424 + }, + { + "epoch": 0.40284657345288105, + "grad_norm": 2.8823349475860596, + "learning_rate": 1.3553642890779263e-05, + "loss": 1.1138, + "step": 6425 + }, + { + "epoch": 0.4029092733086714, + "grad_norm": 3.0901401042938232, + "learning_rate": 1.3551744605323649e-05, + "loss": 1.021, + "step": 6426 + }, + { + "epoch": 0.4029719731644617, + "grad_norm": 2.825695037841797, + "learning_rate": 1.354984617339393e-05, + "loss": 1.2793, + "step": 6427 + }, + { + "epoch": 0.40303467302025203, + "grad_norm": 3.584963798522949, + "learning_rate": 1.3547947595068402e-05, + "loss": 1.0538, + "step": 6428 + }, + { + "epoch": 0.40309737287604236, + "grad_norm": 2.9545700550079346, + "learning_rate": 1.3546048870425356e-05, + "loss": 1.129, + "step": 6429 + }, + { + "epoch": 0.40316007273183274, + "grad_norm": 3.0731451511383057, + "learning_rate": 1.3544149999543103e-05, + "loss": 1.2083, + "step": 6430 + }, + { + "epoch": 0.40322277258762307, + "grad_norm": 2.9824671745300293, + "learning_rate": 1.3542250982499949e-05, + "loss": 1.1041, + "step": 6431 + }, + { + "epoch": 0.4032854724434134, + "grad_norm": 3.0283994674682617, + "learning_rate": 1.3540351819374209e-05, + "loss": 1.1075, + "step": 6432 + }, + { + "epoch": 0.4033481722992037, + "grad_norm": 3.00024151802063, + "learning_rate": 1.3538452510244207e-05, + "loss": 1.1401, + "step": 6433 + }, + { + "epoch": 0.40341087215499405, + "grad_norm": 3.0369508266448975, + "learning_rate": 1.3536553055188266e-05, + "loss": 1.1698, + "step": 6434 + }, + { + "epoch": 0.4034735720107844, + "grad_norm": 2.7318809032440186, + "learning_rate": 1.3534653454284726e-05, + "loss": 1.0819, + "step": 6435 + }, + { + "epoch": 0.4035362718665747, + "grad_norm": 3.144602060317993, + "learning_rate": 1.3532753707611919e-05, + "loss": 1.1583, + "step": 6436 + }, + { + "epoch": 0.40359897172236503, + "grad_norm": 3.1740450859069824, + "learning_rate": 1.3530853815248198e-05, + "loss": 0.981, + "step": 6437 + }, + { + "epoch": 0.40366167157815536, + "grad_norm": 3.2256112098693848, + "learning_rate": 1.3528953777271909e-05, + "loss": 0.9346, + "step": 6438 + }, + { + "epoch": 0.4037243714339457, + "grad_norm": 3.364980459213257, + "learning_rate": 1.3527053593761415e-05, + "loss": 1.2557, + "step": 6439 + }, + { + "epoch": 0.403787071289736, + "grad_norm": 2.9345178604125977, + "learning_rate": 1.3525153264795075e-05, + "loss": 1.1363, + "step": 6440 + }, + { + "epoch": 0.4038497711455264, + "grad_norm": 2.9170234203338623, + "learning_rate": 1.352325279045126e-05, + "loss": 1.0946, + "step": 6441 + }, + { + "epoch": 0.4039124710013167, + "grad_norm": 3.2472586631774902, + "learning_rate": 1.3521352170808349e-05, + "loss": 1.0174, + "step": 6442 + }, + { + "epoch": 0.40397517085710705, + "grad_norm": 2.8384249210357666, + "learning_rate": 1.3519451405944717e-05, + "loss": 1.0844, + "step": 6443 + }, + { + "epoch": 0.4040378707128974, + "grad_norm": 3.0031538009643555, + "learning_rate": 1.3517550495938755e-05, + "loss": 1.0493, + "step": 6444 + }, + { + "epoch": 0.4041005705686877, + "grad_norm": 2.860435962677002, + "learning_rate": 1.3515649440868859e-05, + "loss": 1.0246, + "step": 6445 + }, + { + "epoch": 0.404163270424478, + "grad_norm": 2.8767306804656982, + "learning_rate": 1.3513748240813429e-05, + "loss": 1.1795, + "step": 6446 + }, + { + "epoch": 0.40422597028026835, + "grad_norm": 3.089536428451538, + "learning_rate": 1.3511846895850864e-05, + "loss": 1.2032, + "step": 6447 + }, + { + "epoch": 0.4042886701360587, + "grad_norm": 3.1729209423065186, + "learning_rate": 1.3509945406059584e-05, + "loss": 1.2728, + "step": 6448 + }, + { + "epoch": 0.404351369991849, + "grad_norm": 2.8809516429901123, + "learning_rate": 1.3508043771517998e-05, + "loss": 1.1753, + "step": 6449 + }, + { + "epoch": 0.40441406984763933, + "grad_norm": 3.2222390174865723, + "learning_rate": 1.3506141992304535e-05, + "loss": 1.0338, + "step": 6450 + }, + { + "epoch": 0.40447676970342966, + "grad_norm": 3.3667850494384766, + "learning_rate": 1.3504240068497623e-05, + "loss": 1.1914, + "step": 6451 + }, + { + "epoch": 0.40453946955922, + "grad_norm": 2.9360098838806152, + "learning_rate": 1.3502338000175695e-05, + "loss": 1.1861, + "step": 6452 + }, + { + "epoch": 0.40460216941501037, + "grad_norm": 3.049827814102173, + "learning_rate": 1.35004357874172e-05, + "loss": 1.0878, + "step": 6453 + }, + { + "epoch": 0.4046648692708007, + "grad_norm": 3.252020835876465, + "learning_rate": 1.3498533430300576e-05, + "loss": 1.1133, + "step": 6454 + }, + { + "epoch": 0.404727569126591, + "grad_norm": 3.3454811573028564, + "learning_rate": 1.3496630928904285e-05, + "loss": 1.1791, + "step": 6455 + }, + { + "epoch": 0.40479026898238135, + "grad_norm": 3.1780664920806885, + "learning_rate": 1.3494728283306777e-05, + "loss": 1.1288, + "step": 6456 + }, + { + "epoch": 0.4048529688381717, + "grad_norm": 3.0114662647247314, + "learning_rate": 1.3492825493586526e-05, + "loss": 1.2593, + "step": 6457 + }, + { + "epoch": 0.404915668693962, + "grad_norm": 3.6332366466522217, + "learning_rate": 1.3490922559821995e-05, + "loss": 1.2272, + "step": 6458 + }, + { + "epoch": 0.40497836854975233, + "grad_norm": 3.0656182765960693, + "learning_rate": 1.348901948209167e-05, + "loss": 0.9977, + "step": 6459 + }, + { + "epoch": 0.40504106840554266, + "grad_norm": 3.231945753097534, + "learning_rate": 1.3487116260474024e-05, + "loss": 1.1798, + "step": 6460 + }, + { + "epoch": 0.405103768261333, + "grad_norm": 3.0354015827178955, + "learning_rate": 1.3485212895047557e-05, + "loss": 1.1247, + "step": 6461 + }, + { + "epoch": 0.4051664681171233, + "grad_norm": 3.0222182273864746, + "learning_rate": 1.3483309385890753e-05, + "loss": 1.1287, + "step": 6462 + }, + { + "epoch": 0.40522916797291364, + "grad_norm": 3.1497838497161865, + "learning_rate": 1.3481405733082118e-05, + "loss": 1.1648, + "step": 6463 + }, + { + "epoch": 0.40529186782870397, + "grad_norm": 3.3243179321289062, + "learning_rate": 1.3479501936700161e-05, + "loss": 1.2921, + "step": 6464 + }, + { + "epoch": 0.40535456768449435, + "grad_norm": 2.5952329635620117, + "learning_rate": 1.347759799682339e-05, + "loss": 1.1723, + "step": 6465 + }, + { + "epoch": 0.4054172675402847, + "grad_norm": 2.8401198387145996, + "learning_rate": 1.3475693913530327e-05, + "loss": 1.2707, + "step": 6466 + }, + { + "epoch": 0.405479967396075, + "grad_norm": 3.00252366065979, + "learning_rate": 1.3473789686899496e-05, + "loss": 1.0605, + "step": 6467 + }, + { + "epoch": 0.40554266725186533, + "grad_norm": 2.936014413833618, + "learning_rate": 1.3471885317009427e-05, + "loss": 1.1094, + "step": 6468 + }, + { + "epoch": 0.40560536710765566, + "grad_norm": 2.982726812362671, + "learning_rate": 1.3469980803938654e-05, + "loss": 1.1382, + "step": 6469 + }, + { + "epoch": 0.405668066963446, + "grad_norm": 3.1444196701049805, + "learning_rate": 1.3468076147765723e-05, + "loss": 1.1028, + "step": 6470 + }, + { + "epoch": 0.4057307668192363, + "grad_norm": 2.8096985816955566, + "learning_rate": 1.346617134856918e-05, + "loss": 1.1941, + "step": 6471 + }, + { + "epoch": 0.40579346667502664, + "grad_norm": 3.09637713432312, + "learning_rate": 1.3464266406427575e-05, + "loss": 1.168, + "step": 6472 + }, + { + "epoch": 0.40585616653081696, + "grad_norm": 2.8308544158935547, + "learning_rate": 1.3462361321419478e-05, + "loss": 1.151, + "step": 6473 + }, + { + "epoch": 0.4059188663866073, + "grad_norm": 2.8136751651763916, + "learning_rate": 1.3460456093623443e-05, + "loss": 1.1369, + "step": 6474 + }, + { + "epoch": 0.4059815662423976, + "grad_norm": 2.7368850708007812, + "learning_rate": 1.3458550723118053e-05, + "loss": 1.1069, + "step": 6475 + }, + { + "epoch": 0.406044266098188, + "grad_norm": 3.2828025817871094, + "learning_rate": 1.3456645209981877e-05, + "loss": 1.2209, + "step": 6476 + }, + { + "epoch": 0.4061069659539783, + "grad_norm": 2.9211411476135254, + "learning_rate": 1.3454739554293506e-05, + "loss": 1.0285, + "step": 6477 + }, + { + "epoch": 0.40616966580976865, + "grad_norm": 2.7296414375305176, + "learning_rate": 1.345283375613152e-05, + "loss": 1.178, + "step": 6478 + }, + { + "epoch": 0.406232365665559, + "grad_norm": 3.3680973052978516, + "learning_rate": 1.3450927815574522e-05, + "loss": 1.0226, + "step": 6479 + }, + { + "epoch": 0.4062950655213493, + "grad_norm": 3.206752061843872, + "learning_rate": 1.3449021732701106e-05, + "loss": 1.0356, + "step": 6480 + }, + { + "epoch": 0.40635776537713963, + "grad_norm": 2.8700308799743652, + "learning_rate": 1.3447115507589889e-05, + "loss": 1.2562, + "step": 6481 + }, + { + "epoch": 0.40642046523292996, + "grad_norm": 3.2696139812469482, + "learning_rate": 1.3445209140319472e-05, + "loss": 1.3262, + "step": 6482 + }, + { + "epoch": 0.4064831650887203, + "grad_norm": 3.099283218383789, + "learning_rate": 1.3443302630968482e-05, + "loss": 1.17, + "step": 6483 + }, + { + "epoch": 0.4065458649445106, + "grad_norm": 2.7348175048828125, + "learning_rate": 1.3441395979615543e-05, + "loss": 1.2373, + "step": 6484 + }, + { + "epoch": 0.40660856480030094, + "grad_norm": 3.3146207332611084, + "learning_rate": 1.3439489186339283e-05, + "loss": 1.0877, + "step": 6485 + }, + { + "epoch": 0.40667126465609127, + "grad_norm": 2.919504404067993, + "learning_rate": 1.3437582251218338e-05, + "loss": 1.1571, + "step": 6486 + }, + { + "epoch": 0.4067339645118816, + "grad_norm": 2.8979177474975586, + "learning_rate": 1.3435675174331348e-05, + "loss": 1.1462, + "step": 6487 + }, + { + "epoch": 0.406796664367672, + "grad_norm": 3.081197500228882, + "learning_rate": 1.343376795575697e-05, + "loss": 1.025, + "step": 6488 + }, + { + "epoch": 0.4068593642234623, + "grad_norm": 3.0159153938293457, + "learning_rate": 1.3431860595573847e-05, + "loss": 1.0389, + "step": 6489 + }, + { + "epoch": 0.40692206407925263, + "grad_norm": 2.8391335010528564, + "learning_rate": 1.3429953093860646e-05, + "loss": 1.0792, + "step": 6490 + }, + { + "epoch": 0.40698476393504296, + "grad_norm": 3.2010467052459717, + "learning_rate": 1.3428045450696025e-05, + "loss": 1.1161, + "step": 6491 + }, + { + "epoch": 0.4070474637908333, + "grad_norm": 3.159762382507324, + "learning_rate": 1.3426137666158665e-05, + "loss": 1.2645, + "step": 6492 + }, + { + "epoch": 0.4071101636466236, + "grad_norm": 3.1846845149993896, + "learning_rate": 1.3424229740327235e-05, + "loss": 1.2196, + "step": 6493 + }, + { + "epoch": 0.40717286350241394, + "grad_norm": 3.094289541244507, + "learning_rate": 1.342232167328042e-05, + "loss": 1.0433, + "step": 6494 + }, + { + "epoch": 0.40723556335820427, + "grad_norm": 3.174675703048706, + "learning_rate": 1.3420413465096913e-05, + "loss": 1.0531, + "step": 6495 + }, + { + "epoch": 0.4072982632139946, + "grad_norm": 3.008413553237915, + "learning_rate": 1.3418505115855402e-05, + "loss": 0.9482, + "step": 6496 + }, + { + "epoch": 0.4073609630697849, + "grad_norm": 2.929790735244751, + "learning_rate": 1.3416596625634595e-05, + "loss": 1.1724, + "step": 6497 + }, + { + "epoch": 0.40742366292557525, + "grad_norm": 2.7385265827178955, + "learning_rate": 1.3414687994513191e-05, + "loss": 1.0937, + "step": 6498 + }, + { + "epoch": 0.40748636278136563, + "grad_norm": 3.1430318355560303, + "learning_rate": 1.3412779222569907e-05, + "loss": 1.158, + "step": 6499 + }, + { + "epoch": 0.40754906263715596, + "grad_norm": 3.1998746395111084, + "learning_rate": 1.3410870309883457e-05, + "loss": 0.8929, + "step": 6500 + }, + { + "epoch": 0.4076117624929463, + "grad_norm": 3.0693535804748535, + "learning_rate": 1.3408961256532566e-05, + "loss": 1.1034, + "step": 6501 + }, + { + "epoch": 0.4076744623487366, + "grad_norm": 2.9727275371551514, + "learning_rate": 1.3407052062595965e-05, + "loss": 1.2685, + "step": 6502 + }, + { + "epoch": 0.40773716220452694, + "grad_norm": 2.7804884910583496, + "learning_rate": 1.3405142728152388e-05, + "loss": 1.2462, + "step": 6503 + }, + { + "epoch": 0.40779986206031726, + "grad_norm": 2.8125433921813965, + "learning_rate": 1.3403233253280577e-05, + "loss": 1.24, + "step": 6504 + }, + { + "epoch": 0.4078625619161076, + "grad_norm": 3.317736864089966, + "learning_rate": 1.3401323638059273e-05, + "loss": 1.1098, + "step": 6505 + }, + { + "epoch": 0.4079252617718979, + "grad_norm": 3.0640437602996826, + "learning_rate": 1.3399413882567241e-05, + "loss": 1.1404, + "step": 6506 + }, + { + "epoch": 0.40798796162768824, + "grad_norm": 2.8002519607543945, + "learning_rate": 1.3397503986883227e-05, + "loss": 1.1737, + "step": 6507 + }, + { + "epoch": 0.40805066148347857, + "grad_norm": 2.7187187671661377, + "learning_rate": 1.3395593951086003e-05, + "loss": 1.299, + "step": 6508 + }, + { + "epoch": 0.4081133613392689, + "grad_norm": 2.9840102195739746, + "learning_rate": 1.3393683775254336e-05, + "loss": 1.0585, + "step": 6509 + }, + { + "epoch": 0.4081760611950592, + "grad_norm": 2.988757371902466, + "learning_rate": 1.3391773459467e-05, + "loss": 1.1933, + "step": 6510 + }, + { + "epoch": 0.4082387610508496, + "grad_norm": 2.8919296264648438, + "learning_rate": 1.3389863003802779e-05, + "loss": 1.0547, + "step": 6511 + }, + { + "epoch": 0.40830146090663993, + "grad_norm": 3.009077548980713, + "learning_rate": 1.338795240834046e-05, + "loss": 1.1257, + "step": 6512 + }, + { + "epoch": 0.40836416076243026, + "grad_norm": 2.961228370666504, + "learning_rate": 1.3386041673158836e-05, + "loss": 1.0724, + "step": 6513 + }, + { + "epoch": 0.4084268606182206, + "grad_norm": 3.078569173812866, + "learning_rate": 1.3384130798336705e-05, + "loss": 0.9971, + "step": 6514 + }, + { + "epoch": 0.4084895604740109, + "grad_norm": 3.1867265701293945, + "learning_rate": 1.3382219783952875e-05, + "loss": 1.0919, + "step": 6515 + }, + { + "epoch": 0.40855226032980124, + "grad_norm": 2.875943660736084, + "learning_rate": 1.3380308630086147e-05, + "loss": 1.1402, + "step": 6516 + }, + { + "epoch": 0.40861496018559157, + "grad_norm": 2.9540014266967773, + "learning_rate": 1.3378397336815349e-05, + "loss": 1.1066, + "step": 6517 + }, + { + "epoch": 0.4086776600413819, + "grad_norm": 3.2711427211761475, + "learning_rate": 1.3376485904219296e-05, + "loss": 0.9375, + "step": 6518 + }, + { + "epoch": 0.4087403598971722, + "grad_norm": 3.2572875022888184, + "learning_rate": 1.3374574332376817e-05, + "loss": 1.1141, + "step": 6519 + }, + { + "epoch": 0.40880305975296255, + "grad_norm": 2.8532028198242188, + "learning_rate": 1.3372662621366744e-05, + "loss": 1.0852, + "step": 6520 + }, + { + "epoch": 0.4088657596087529, + "grad_norm": 2.8463294506073, + "learning_rate": 1.337075077126792e-05, + "loss": 1.1876, + "step": 6521 + }, + { + "epoch": 0.40892845946454326, + "grad_norm": 3.098344326019287, + "learning_rate": 1.3368838782159184e-05, + "loss": 1.153, + "step": 6522 + }, + { + "epoch": 0.4089911593203336, + "grad_norm": 3.196949005126953, + "learning_rate": 1.3366926654119388e-05, + "loss": 1.2271, + "step": 6523 + }, + { + "epoch": 0.4090538591761239, + "grad_norm": 3.0952272415161133, + "learning_rate": 1.3365014387227393e-05, + "loss": 1.1008, + "step": 6524 + }, + { + "epoch": 0.40911655903191424, + "grad_norm": 3.3432791233062744, + "learning_rate": 1.3363101981562056e-05, + "loss": 1.0096, + "step": 6525 + }, + { + "epoch": 0.40917925888770457, + "grad_norm": 3.1374611854553223, + "learning_rate": 1.336118943720225e-05, + "loss": 1.1588, + "step": 6526 + }, + { + "epoch": 0.4092419587434949, + "grad_norm": 3.217696189880371, + "learning_rate": 1.3359276754226839e-05, + "loss": 1.2413, + "step": 6527 + }, + { + "epoch": 0.4093046585992852, + "grad_norm": 2.998763084411621, + "learning_rate": 1.3357363932714713e-05, + "loss": 1.1318, + "step": 6528 + }, + { + "epoch": 0.40936735845507555, + "grad_norm": 2.512836217880249, + "learning_rate": 1.335545097274475e-05, + "loss": 1.0426, + "step": 6529 + }, + { + "epoch": 0.4094300583108659, + "grad_norm": 3.2916250228881836, + "learning_rate": 1.3353537874395843e-05, + "loss": 1.0783, + "step": 6530 + }, + { + "epoch": 0.4094927581666562, + "grad_norm": 2.911348819732666, + "learning_rate": 1.3351624637746885e-05, + "loss": 1.1825, + "step": 6531 + }, + { + "epoch": 0.40955545802244653, + "grad_norm": 3.3513898849487305, + "learning_rate": 1.3349711262876786e-05, + "loss": 1.2924, + "step": 6532 + }, + { + "epoch": 0.40961815787823685, + "grad_norm": 3.0907506942749023, + "learning_rate": 1.3347797749864443e-05, + "loss": 1.2407, + "step": 6533 + }, + { + "epoch": 0.40968085773402724, + "grad_norm": 2.9146227836608887, + "learning_rate": 1.3345884098788775e-05, + "loss": 1.0964, + "step": 6534 + }, + { + "epoch": 0.40974355758981756, + "grad_norm": 2.8671231269836426, + "learning_rate": 1.3343970309728703e-05, + "loss": 1.3154, + "step": 6535 + }, + { + "epoch": 0.4098062574456079, + "grad_norm": 2.701657772064209, + "learning_rate": 1.3342056382763147e-05, + "loss": 1.0655, + "step": 6536 + }, + { + "epoch": 0.4098689573013982, + "grad_norm": 3.2027454376220703, + "learning_rate": 1.3340142317971042e-05, + "loss": 1.2246, + "step": 6537 + }, + { + "epoch": 0.40993165715718854, + "grad_norm": 2.9883012771606445, + "learning_rate": 1.333822811543132e-05, + "loss": 1.1082, + "step": 6538 + }, + { + "epoch": 0.40999435701297887, + "grad_norm": 3.0916128158569336, + "learning_rate": 1.3336313775222927e-05, + "loss": 1.238, + "step": 6539 + }, + { + "epoch": 0.4100570568687692, + "grad_norm": 3.096111536026001, + "learning_rate": 1.3334399297424807e-05, + "loss": 1.0465, + "step": 6540 + }, + { + "epoch": 0.4101197567245595, + "grad_norm": 3.3790018558502197, + "learning_rate": 1.3332484682115916e-05, + "loss": 0.9508, + "step": 6541 + }, + { + "epoch": 0.41018245658034985, + "grad_norm": 2.773409366607666, + "learning_rate": 1.3330569929375208e-05, + "loss": 1.1385, + "step": 6542 + }, + { + "epoch": 0.4102451564361402, + "grad_norm": 2.901301383972168, + "learning_rate": 1.3328655039281653e-05, + "loss": 1.1459, + "step": 6543 + }, + { + "epoch": 0.4103078562919305, + "grad_norm": 2.9079227447509766, + "learning_rate": 1.3326740011914216e-05, + "loss": 1.0832, + "step": 6544 + }, + { + "epoch": 0.41037055614772083, + "grad_norm": 3.337364912033081, + "learning_rate": 1.3324824847351878e-05, + "loss": 0.9896, + "step": 6545 + }, + { + "epoch": 0.4104332560035112, + "grad_norm": 3.206822156906128, + "learning_rate": 1.332290954567362e-05, + "loss": 1.2656, + "step": 6546 + }, + { + "epoch": 0.41049595585930154, + "grad_norm": 2.991605520248413, + "learning_rate": 1.3320994106958423e-05, + "loss": 1.2077, + "step": 6547 + }, + { + "epoch": 0.41055865571509187, + "grad_norm": 2.5521042346954346, + "learning_rate": 1.3319078531285286e-05, + "loss": 1.3067, + "step": 6548 + }, + { + "epoch": 0.4106213555708822, + "grad_norm": 3.3746142387390137, + "learning_rate": 1.3317162818733205e-05, + "loss": 0.9088, + "step": 6549 + }, + { + "epoch": 0.4106840554266725, + "grad_norm": 3.0315942764282227, + "learning_rate": 1.3315246969381186e-05, + "loss": 1.1553, + "step": 6550 + }, + { + "epoch": 0.41074675528246285, + "grad_norm": 3.159179210662842, + "learning_rate": 1.3313330983308235e-05, + "loss": 1.1203, + "step": 6551 + }, + { + "epoch": 0.4108094551382532, + "grad_norm": 3.128523111343384, + "learning_rate": 1.3311414860593373e-05, + "loss": 1.1098, + "step": 6552 + }, + { + "epoch": 0.4108721549940435, + "grad_norm": 3.0323755741119385, + "learning_rate": 1.3309498601315614e-05, + "loss": 1.1495, + "step": 6553 + }, + { + "epoch": 0.41093485484983383, + "grad_norm": 3.1089231967926025, + "learning_rate": 1.3307582205553988e-05, + "loss": 1.1324, + "step": 6554 + }, + { + "epoch": 0.41099755470562416, + "grad_norm": 3.140498399734497, + "learning_rate": 1.3305665673387528e-05, + "loss": 1.1161, + "step": 6555 + }, + { + "epoch": 0.4110602545614145, + "grad_norm": 3.462533473968506, + "learning_rate": 1.3303749004895273e-05, + "loss": 1.0828, + "step": 6556 + }, + { + "epoch": 0.41112295441720487, + "grad_norm": 3.283485174179077, + "learning_rate": 1.3301832200156263e-05, + "loss": 1.0431, + "step": 6557 + }, + { + "epoch": 0.4111856542729952, + "grad_norm": 3.250140905380249, + "learning_rate": 1.329991525924955e-05, + "loss": 1.0649, + "step": 6558 + }, + { + "epoch": 0.4112483541287855, + "grad_norm": 3.4732894897460938, + "learning_rate": 1.3297998182254189e-05, + "loss": 1.1975, + "step": 6559 + }, + { + "epoch": 0.41131105398457585, + "grad_norm": 3.1449570655822754, + "learning_rate": 1.3296080969249237e-05, + "loss": 1.0743, + "step": 6560 + }, + { + "epoch": 0.4113737538403662, + "grad_norm": 2.738337993621826, + "learning_rate": 1.3294163620313763e-05, + "loss": 1.1166, + "step": 6561 + }, + { + "epoch": 0.4114364536961565, + "grad_norm": 3.3542017936706543, + "learning_rate": 1.3292246135526834e-05, + "loss": 1.3008, + "step": 6562 + }, + { + "epoch": 0.41149915355194683, + "grad_norm": 2.8400466442108154, + "learning_rate": 1.3290328514967535e-05, + "loss": 1.039, + "step": 6563 + }, + { + "epoch": 0.41156185340773715, + "grad_norm": 3.243938446044922, + "learning_rate": 1.3288410758714943e-05, + "loss": 1.1445, + "step": 6564 + }, + { + "epoch": 0.4116245532635275, + "grad_norm": 3.0612218379974365, + "learning_rate": 1.3286492866848143e-05, + "loss": 1.0954, + "step": 6565 + }, + { + "epoch": 0.4116872531193178, + "grad_norm": 3.4620206356048584, + "learning_rate": 1.328457483944624e-05, + "loss": 1.1217, + "step": 6566 + }, + { + "epoch": 0.41174995297510814, + "grad_norm": 3.153318405151367, + "learning_rate": 1.3282656676588324e-05, + "loss": 1.0309, + "step": 6567 + }, + { + "epoch": 0.41181265283089846, + "grad_norm": 2.6319503784179688, + "learning_rate": 1.3280738378353506e-05, + "loss": 1.2782, + "step": 6568 + }, + { + "epoch": 0.41187535268668884, + "grad_norm": 2.9276883602142334, + "learning_rate": 1.3278819944820893e-05, + "loss": 1.1481, + "step": 6569 + }, + { + "epoch": 0.41193805254247917, + "grad_norm": 3.0660593509674072, + "learning_rate": 1.3276901376069602e-05, + "loss": 1.0881, + "step": 6570 + }, + { + "epoch": 0.4120007523982695, + "grad_norm": 3.2263262271881104, + "learning_rate": 1.3274982672178753e-05, + "loss": 1.1039, + "step": 6571 + }, + { + "epoch": 0.4120634522540598, + "grad_norm": 2.8700084686279297, + "learning_rate": 1.327306383322748e-05, + "loss": 1.1997, + "step": 6572 + }, + { + "epoch": 0.41212615210985015, + "grad_norm": 3.6623528003692627, + "learning_rate": 1.3271144859294907e-05, + "loss": 1.2124, + "step": 6573 + }, + { + "epoch": 0.4121888519656405, + "grad_norm": 3.0268518924713135, + "learning_rate": 1.326922575046018e-05, + "loss": 1.1772, + "step": 6574 + }, + { + "epoch": 0.4122515518214308, + "grad_norm": 3.082772970199585, + "learning_rate": 1.326730650680244e-05, + "loss": 1.2159, + "step": 6575 + }, + { + "epoch": 0.41231425167722113, + "grad_norm": 2.875910758972168, + "learning_rate": 1.3265387128400833e-05, + "loss": 1.0916, + "step": 6576 + }, + { + "epoch": 0.41237695153301146, + "grad_norm": 3.382721185684204, + "learning_rate": 1.3263467615334524e-05, + "loss": 1.1016, + "step": 6577 + }, + { + "epoch": 0.4124396513888018, + "grad_norm": 2.8237361907958984, + "learning_rate": 1.3261547967682664e-05, + "loss": 1.1856, + "step": 6578 + }, + { + "epoch": 0.4125023512445921, + "grad_norm": 2.7922229766845703, + "learning_rate": 1.3259628185524426e-05, + "loss": 0.9844, + "step": 6579 + }, + { + "epoch": 0.4125650511003825, + "grad_norm": 2.4797542095184326, + "learning_rate": 1.3257708268938978e-05, + "loss": 1.2218, + "step": 6580 + }, + { + "epoch": 0.4126277509561728, + "grad_norm": 2.978212356567383, + "learning_rate": 1.3255788218005501e-05, + "loss": 1.1765, + "step": 6581 + }, + { + "epoch": 0.41269045081196315, + "grad_norm": 3.137542963027954, + "learning_rate": 1.3253868032803171e-05, + "loss": 1.1905, + "step": 6582 + }, + { + "epoch": 0.4127531506677535, + "grad_norm": 3.14620041847229, + "learning_rate": 1.3251947713411187e-05, + "loss": 1.1211, + "step": 6583 + }, + { + "epoch": 0.4128158505235438, + "grad_norm": 3.2188608646392822, + "learning_rate": 1.3250027259908733e-05, + "loss": 1.2599, + "step": 6584 + }, + { + "epoch": 0.41287855037933413, + "grad_norm": 3.360032796859741, + "learning_rate": 1.3248106672375012e-05, + "loss": 1.2554, + "step": 6585 + }, + { + "epoch": 0.41294125023512446, + "grad_norm": 2.978922128677368, + "learning_rate": 1.3246185950889232e-05, + "loss": 1.1717, + "step": 6586 + }, + { + "epoch": 0.4130039500909148, + "grad_norm": 2.9541592597961426, + "learning_rate": 1.3244265095530602e-05, + "loss": 1.084, + "step": 6587 + }, + { + "epoch": 0.4130666499467051, + "grad_norm": 3.107982635498047, + "learning_rate": 1.3242344106378338e-05, + "loss": 1.2223, + "step": 6588 + }, + { + "epoch": 0.41312934980249544, + "grad_norm": 2.9445714950561523, + "learning_rate": 1.324042298351166e-05, + "loss": 1.1505, + "step": 6589 + }, + { + "epoch": 0.41319204965828576, + "grad_norm": 2.8940277099609375, + "learning_rate": 1.32385017270098e-05, + "loss": 1.2493, + "step": 6590 + }, + { + "epoch": 0.4132547495140761, + "grad_norm": 2.9841842651367188, + "learning_rate": 1.3236580336951983e-05, + "loss": 1.0097, + "step": 6591 + }, + { + "epoch": 0.4133174493698665, + "grad_norm": 3.1441738605499268, + "learning_rate": 1.3234658813417456e-05, + "loss": 1.0405, + "step": 6592 + }, + { + "epoch": 0.4133801492256568, + "grad_norm": 3.0928537845611572, + "learning_rate": 1.3232737156485453e-05, + "loss": 1.1257, + "step": 6593 + }, + { + "epoch": 0.41344284908144713, + "grad_norm": 3.041438102722168, + "learning_rate": 1.3230815366235232e-05, + "loss": 1.2051, + "step": 6594 + }, + { + "epoch": 0.41350554893723745, + "grad_norm": 3.2165701389312744, + "learning_rate": 1.3228893442746045e-05, + "loss": 1.1931, + "step": 6595 + }, + { + "epoch": 0.4135682487930278, + "grad_norm": 3.136676788330078, + "learning_rate": 1.3226971386097148e-05, + "loss": 0.9383, + "step": 6596 + }, + { + "epoch": 0.4136309486488181, + "grad_norm": 3.1508867740631104, + "learning_rate": 1.3225049196367814e-05, + "loss": 1.039, + "step": 6597 + }, + { + "epoch": 0.41369364850460844, + "grad_norm": 2.98850679397583, + "learning_rate": 1.3223126873637307e-05, + "loss": 1.0262, + "step": 6598 + }, + { + "epoch": 0.41375634836039876, + "grad_norm": 3.229318380355835, + "learning_rate": 1.3221204417984907e-05, + "loss": 1.2327, + "step": 6599 + }, + { + "epoch": 0.4138190482161891, + "grad_norm": 2.885890007019043, + "learning_rate": 1.3219281829489899e-05, + "loss": 1.1077, + "step": 6600 + }, + { + "epoch": 0.4138817480719794, + "grad_norm": 3.192030191421509, + "learning_rate": 1.3217359108231567e-05, + "loss": 1.1373, + "step": 6601 + }, + { + "epoch": 0.41394444792776974, + "grad_norm": 2.7372355461120605, + "learning_rate": 1.3215436254289202e-05, + "loss": 1.0935, + "step": 6602 + }, + { + "epoch": 0.4140071477835601, + "grad_norm": 2.8747692108154297, + "learning_rate": 1.3213513267742109e-05, + "loss": 1.1027, + "step": 6603 + }, + { + "epoch": 0.41406984763935045, + "grad_norm": 3.1437034606933594, + "learning_rate": 1.3211590148669586e-05, + "loss": 1.1622, + "step": 6604 + }, + { + "epoch": 0.4141325474951408, + "grad_norm": 3.0369527339935303, + "learning_rate": 1.3209666897150945e-05, + "loss": 1.0047, + "step": 6605 + }, + { + "epoch": 0.4141952473509311, + "grad_norm": 2.878842353820801, + "learning_rate": 1.3207743513265503e-05, + "loss": 1.1605, + "step": 6606 + }, + { + "epoch": 0.41425794720672143, + "grad_norm": 3.185363531112671, + "learning_rate": 1.3205819997092578e-05, + "loss": 1.2177, + "step": 6607 + }, + { + "epoch": 0.41432064706251176, + "grad_norm": 3.0038068294525146, + "learning_rate": 1.3203896348711496e-05, + "loss": 1.2219, + "step": 6608 + }, + { + "epoch": 0.4143833469183021, + "grad_norm": 3.5799126625061035, + "learning_rate": 1.3201972568201588e-05, + "loss": 1.1764, + "step": 6609 + }, + { + "epoch": 0.4144460467740924, + "grad_norm": 3.049349784851074, + "learning_rate": 1.3200048655642192e-05, + "loss": 1.0189, + "step": 6610 + }, + { + "epoch": 0.41450874662988274, + "grad_norm": 2.6322147846221924, + "learning_rate": 1.319812461111265e-05, + "loss": 1.2291, + "step": 6611 + }, + { + "epoch": 0.41457144648567307, + "grad_norm": 3.064544200897217, + "learning_rate": 1.319620043469231e-05, + "loss": 1.2354, + "step": 6612 + }, + { + "epoch": 0.4146341463414634, + "grad_norm": 2.8242719173431396, + "learning_rate": 1.319427612646052e-05, + "loss": 1.1204, + "step": 6613 + }, + { + "epoch": 0.4146968461972537, + "grad_norm": 2.840441942214966, + "learning_rate": 1.319235168649665e-05, + "loss": 1.1875, + "step": 6614 + }, + { + "epoch": 0.4147595460530441, + "grad_norm": 3.058563709259033, + "learning_rate": 1.3190427114880052e-05, + "loss": 1.1514, + "step": 6615 + }, + { + "epoch": 0.41482224590883443, + "grad_norm": 2.9967989921569824, + "learning_rate": 1.3188502411690101e-05, + "loss": 1.0461, + "step": 6616 + }, + { + "epoch": 0.41488494576462476, + "grad_norm": 3.4142916202545166, + "learning_rate": 1.3186577577006174e-05, + "loss": 1.1133, + "step": 6617 + }, + { + "epoch": 0.4149476456204151, + "grad_norm": 2.9031143188476562, + "learning_rate": 1.3184652610907644e-05, + "loss": 1.0917, + "step": 6618 + }, + { + "epoch": 0.4150103454762054, + "grad_norm": 3.1529481410980225, + "learning_rate": 1.3182727513473907e-05, + "loss": 1.0678, + "step": 6619 + }, + { + "epoch": 0.41507304533199574, + "grad_norm": 3.353006601333618, + "learning_rate": 1.3180802284784344e-05, + "loss": 1.1256, + "step": 6620 + }, + { + "epoch": 0.41513574518778606, + "grad_norm": 2.918092966079712, + "learning_rate": 1.3178876924918358e-05, + "loss": 1.0979, + "step": 6621 + }, + { + "epoch": 0.4151984450435764, + "grad_norm": 3.2282798290252686, + "learning_rate": 1.3176951433955347e-05, + "loss": 1.0394, + "step": 6622 + }, + { + "epoch": 0.4152611448993667, + "grad_norm": 3.3155531883239746, + "learning_rate": 1.3175025811974725e-05, + "loss": 1.179, + "step": 6623 + }, + { + "epoch": 0.41532384475515705, + "grad_norm": 2.9166886806488037, + "learning_rate": 1.3173100059055894e-05, + "loss": 1.046, + "step": 6624 + }, + { + "epoch": 0.4153865446109474, + "grad_norm": 2.8432624340057373, + "learning_rate": 1.317117417527828e-05, + "loss": 1.2519, + "step": 6625 + }, + { + "epoch": 0.4154492444667377, + "grad_norm": 3.2980031967163086, + "learning_rate": 1.3169248160721304e-05, + "loss": 1.0315, + "step": 6626 + }, + { + "epoch": 0.4155119443225281, + "grad_norm": 3.5027031898498535, + "learning_rate": 1.3167322015464396e-05, + "loss": 1.2309, + "step": 6627 + }, + { + "epoch": 0.4155746441783184, + "grad_norm": 3.5821094512939453, + "learning_rate": 1.316539573958699e-05, + "loss": 0.9309, + "step": 6628 + }, + { + "epoch": 0.41563734403410874, + "grad_norm": 3.1975433826446533, + "learning_rate": 1.3163469333168526e-05, + "loss": 1.1781, + "step": 6629 + }, + { + "epoch": 0.41570004388989906, + "grad_norm": 3.180835247039795, + "learning_rate": 1.316154279628845e-05, + "loss": 1.1067, + "step": 6630 + }, + { + "epoch": 0.4157627437456894, + "grad_norm": 3.1230015754699707, + "learning_rate": 1.3159616129026207e-05, + "loss": 1.0845, + "step": 6631 + }, + { + "epoch": 0.4158254436014797, + "grad_norm": 3.029829263687134, + "learning_rate": 1.315768933146126e-05, + "loss": 1.198, + "step": 6632 + }, + { + "epoch": 0.41588814345727004, + "grad_norm": 3.0678064823150635, + "learning_rate": 1.3155762403673065e-05, + "loss": 1.0713, + "step": 6633 + }, + { + "epoch": 0.41595084331306037, + "grad_norm": 2.600766658782959, + "learning_rate": 1.3153835345741092e-05, + "loss": 1.1391, + "step": 6634 + }, + { + "epoch": 0.4160135431688507, + "grad_norm": 3.151118755340576, + "learning_rate": 1.315190815774481e-05, + "loss": 0.9585, + "step": 6635 + }, + { + "epoch": 0.416076243024641, + "grad_norm": 3.241779088973999, + "learning_rate": 1.3149980839763698e-05, + "loss": 1.0451, + "step": 6636 + }, + { + "epoch": 0.41613894288043135, + "grad_norm": 2.751880168914795, + "learning_rate": 1.314805339187724e-05, + "loss": 1.2406, + "step": 6637 + }, + { + "epoch": 0.41620164273622173, + "grad_norm": 3.1544535160064697, + "learning_rate": 1.3146125814164922e-05, + "loss": 1.1465, + "step": 6638 + }, + { + "epoch": 0.41626434259201206, + "grad_norm": 2.8286800384521484, + "learning_rate": 1.314419810670624e-05, + "loss": 1.1351, + "step": 6639 + }, + { + "epoch": 0.4163270424478024, + "grad_norm": 2.908759355545044, + "learning_rate": 1.3142270269580688e-05, + "loss": 1.0819, + "step": 6640 + }, + { + "epoch": 0.4163897423035927, + "grad_norm": 3.030313491821289, + "learning_rate": 1.3140342302867774e-05, + "loss": 1.0667, + "step": 6641 + }, + { + "epoch": 0.41645244215938304, + "grad_norm": 3.0257515907287598, + "learning_rate": 1.3138414206647006e-05, + "loss": 1.0758, + "step": 6642 + }, + { + "epoch": 0.41651514201517337, + "grad_norm": 3.1970956325531006, + "learning_rate": 1.3136485980997899e-05, + "loss": 1.0317, + "step": 6643 + }, + { + "epoch": 0.4165778418709637, + "grad_norm": 2.9147443771362305, + "learning_rate": 1.3134557625999973e-05, + "loss": 1.3278, + "step": 6644 + }, + { + "epoch": 0.416640541726754, + "grad_norm": 2.983607292175293, + "learning_rate": 1.3132629141732754e-05, + "loss": 1.0087, + "step": 6645 + }, + { + "epoch": 0.41670324158254435, + "grad_norm": 2.9893815517425537, + "learning_rate": 1.3130700528275773e-05, + "loss": 0.9777, + "step": 6646 + }, + { + "epoch": 0.4167659414383347, + "grad_norm": 2.8242733478546143, + "learning_rate": 1.3128771785708564e-05, + "loss": 1.1779, + "step": 6647 + }, + { + "epoch": 0.416828641294125, + "grad_norm": 2.786221981048584, + "learning_rate": 1.3126842914110671e-05, + "loss": 1.0237, + "step": 6648 + }, + { + "epoch": 0.41689134114991533, + "grad_norm": 3.043030261993408, + "learning_rate": 1.3124913913561642e-05, + "loss": 1.1319, + "step": 6649 + }, + { + "epoch": 0.4169540410057057, + "grad_norm": 2.9496798515319824, + "learning_rate": 1.3122984784141021e-05, + "loss": 1.1376, + "step": 6650 + }, + { + "epoch": 0.41701674086149604, + "grad_norm": 3.074320077896118, + "learning_rate": 1.3121055525928375e-05, + "loss": 0.9999, + "step": 6651 + }, + { + "epoch": 0.41707944071728636, + "grad_norm": 2.8961968421936035, + "learning_rate": 1.3119126139003262e-05, + "loss": 1.2762, + "step": 6652 + }, + { + "epoch": 0.4171421405730767, + "grad_norm": 3.4671425819396973, + "learning_rate": 1.3117196623445253e-05, + "loss": 1.0812, + "step": 6653 + }, + { + "epoch": 0.417204840428867, + "grad_norm": 3.373074769973755, + "learning_rate": 1.3115266979333917e-05, + "loss": 0.9887, + "step": 6654 + }, + { + "epoch": 0.41726754028465735, + "grad_norm": 3.1914682388305664, + "learning_rate": 1.3113337206748833e-05, + "loss": 1.0337, + "step": 6655 + }, + { + "epoch": 0.4173302401404477, + "grad_norm": 2.92805552482605, + "learning_rate": 1.311140730576959e-05, + "loss": 1.3107, + "step": 6656 + }, + { + "epoch": 0.417392939996238, + "grad_norm": 3.0885064601898193, + "learning_rate": 1.310947727647577e-05, + "loss": 0.9991, + "step": 6657 + }, + { + "epoch": 0.4174556398520283, + "grad_norm": 2.8712868690490723, + "learning_rate": 1.3107547118946976e-05, + "loss": 1.0764, + "step": 6658 + }, + { + "epoch": 0.41751833970781865, + "grad_norm": 3.0234875679016113, + "learning_rate": 1.31056168332628e-05, + "loss": 1.0109, + "step": 6659 + }, + { + "epoch": 0.417581039563609, + "grad_norm": 3.0637519359588623, + "learning_rate": 1.3103686419502852e-05, + "loss": 1.1369, + "step": 6660 + }, + { + "epoch": 0.41764373941939936, + "grad_norm": 3.2393436431884766, + "learning_rate": 1.310175587774674e-05, + "loss": 1.166, + "step": 6661 + }, + { + "epoch": 0.4177064392751897, + "grad_norm": 2.7906312942504883, + "learning_rate": 1.3099825208074077e-05, + "loss": 1.0949, + "step": 6662 + }, + { + "epoch": 0.41776913913098, + "grad_norm": 3.598829984664917, + "learning_rate": 1.3097894410564492e-05, + "loss": 1.085, + "step": 6663 + }, + { + "epoch": 0.41783183898677034, + "grad_norm": 2.8236753940582275, + "learning_rate": 1.3095963485297602e-05, + "loss": 1.1587, + "step": 6664 + }, + { + "epoch": 0.41789453884256067, + "grad_norm": 2.6978728771209717, + "learning_rate": 1.3094032432353046e-05, + "loss": 1.0812, + "step": 6665 + }, + { + "epoch": 0.417957238698351, + "grad_norm": 2.967733383178711, + "learning_rate": 1.3092101251810455e-05, + "loss": 1.0912, + "step": 6666 + }, + { + "epoch": 0.4180199385541413, + "grad_norm": 3.0134470462799072, + "learning_rate": 1.3090169943749475e-05, + "loss": 1.0718, + "step": 6667 + }, + { + "epoch": 0.41808263840993165, + "grad_norm": 2.9222609996795654, + "learning_rate": 1.3088238508249752e-05, + "loss": 1.2669, + "step": 6668 + }, + { + "epoch": 0.418145338265722, + "grad_norm": 3.277831792831421, + "learning_rate": 1.3086306945390937e-05, + "loss": 0.9476, + "step": 6669 + }, + { + "epoch": 0.4182080381215123, + "grad_norm": 2.8616793155670166, + "learning_rate": 1.3084375255252689e-05, + "loss": 1.2042, + "step": 6670 + }, + { + "epoch": 0.41827073797730263, + "grad_norm": 3.0110206604003906, + "learning_rate": 1.3082443437914666e-05, + "loss": 1.1522, + "step": 6671 + }, + { + "epoch": 0.41833343783309296, + "grad_norm": 2.911752939224243, + "learning_rate": 1.308051149345655e-05, + "loss": 1.1096, + "step": 6672 + }, + { + "epoch": 0.41839613768888334, + "grad_norm": 3.2128212451934814, + "learning_rate": 1.3078579421958e-05, + "loss": 0.9326, + "step": 6673 + }, + { + "epoch": 0.41845883754467367, + "grad_norm": 2.8353731632232666, + "learning_rate": 1.3076647223498703e-05, + "loss": 1.0994, + "step": 6674 + }, + { + "epoch": 0.418521537400464, + "grad_norm": 3.2078685760498047, + "learning_rate": 1.3074714898158337e-05, + "loss": 1.0922, + "step": 6675 + }, + { + "epoch": 0.4185842372562543, + "grad_norm": 2.9631881713867188, + "learning_rate": 1.3072782446016596e-05, + "loss": 1.1802, + "step": 6676 + }, + { + "epoch": 0.41864693711204465, + "grad_norm": 3.351189613342285, + "learning_rate": 1.307084986715317e-05, + "loss": 1.1086, + "step": 6677 + }, + { + "epoch": 0.418709636967835, + "grad_norm": 3.356567859649658, + "learning_rate": 1.3068917161647767e-05, + "loss": 1.0274, + "step": 6678 + }, + { + "epoch": 0.4187723368236253, + "grad_norm": 2.917234182357788, + "learning_rate": 1.3066984329580081e-05, + "loss": 1.0902, + "step": 6679 + }, + { + "epoch": 0.41883503667941563, + "grad_norm": 3.154897928237915, + "learning_rate": 1.306505137102983e-05, + "loss": 1.0971, + "step": 6680 + }, + { + "epoch": 0.41889773653520596, + "grad_norm": 3.0302796363830566, + "learning_rate": 1.3063118286076726e-05, + "loss": 1.0553, + "step": 6681 + }, + { + "epoch": 0.4189604363909963, + "grad_norm": 2.837998390197754, + "learning_rate": 1.3061185074800489e-05, + "loss": 1.1552, + "step": 6682 + }, + { + "epoch": 0.4190231362467866, + "grad_norm": 3.2490994930267334, + "learning_rate": 1.3059251737280847e-05, + "loss": 1.0168, + "step": 6683 + }, + { + "epoch": 0.419085836102577, + "grad_norm": 2.772465229034424, + "learning_rate": 1.3057318273597531e-05, + "loss": 1.11, + "step": 6684 + }, + { + "epoch": 0.4191485359583673, + "grad_norm": 3.040339946746826, + "learning_rate": 1.3055384683830274e-05, + "loss": 1.1594, + "step": 6685 + }, + { + "epoch": 0.41921123581415765, + "grad_norm": 3.0504848957061768, + "learning_rate": 1.3053450968058818e-05, + "loss": 1.1315, + "step": 6686 + }, + { + "epoch": 0.419273935669948, + "grad_norm": 2.659184455871582, + "learning_rate": 1.3051517126362913e-05, + "loss": 1.0762, + "step": 6687 + }, + { + "epoch": 0.4193366355257383, + "grad_norm": 3.4579415321350098, + "learning_rate": 1.3049583158822308e-05, + "loss": 1.1107, + "step": 6688 + }, + { + "epoch": 0.4193993353815286, + "grad_norm": 2.8861441612243652, + "learning_rate": 1.3047649065516761e-05, + "loss": 1.1515, + "step": 6689 + }, + { + "epoch": 0.41946203523731895, + "grad_norm": 3.104598045349121, + "learning_rate": 1.3045714846526031e-05, + "loss": 1.1088, + "step": 6690 + }, + { + "epoch": 0.4195247350931093, + "grad_norm": 3.063014507293701, + "learning_rate": 1.3043780501929892e-05, + "loss": 1.2151, + "step": 6691 + }, + { + "epoch": 0.4195874349488996, + "grad_norm": 3.257495403289795, + "learning_rate": 1.304184603180811e-05, + "loss": 1.028, + "step": 6692 + }, + { + "epoch": 0.41965013480468993, + "grad_norm": 3.1647229194641113, + "learning_rate": 1.3039911436240462e-05, + "loss": 1.0567, + "step": 6693 + }, + { + "epoch": 0.41971283466048026, + "grad_norm": 3.123331069946289, + "learning_rate": 1.3037976715306738e-05, + "loss": 1.0602, + "step": 6694 + }, + { + "epoch": 0.4197755345162706, + "grad_norm": 2.9913439750671387, + "learning_rate": 1.3036041869086718e-05, + "loss": 1.1199, + "step": 6695 + }, + { + "epoch": 0.41983823437206097, + "grad_norm": 2.946660280227661, + "learning_rate": 1.3034106897660202e-05, + "loss": 1.0905, + "step": 6696 + }, + { + "epoch": 0.4199009342278513, + "grad_norm": 3.038954734802246, + "learning_rate": 1.3032171801106983e-05, + "loss": 1.0557, + "step": 6697 + }, + { + "epoch": 0.4199636340836416, + "grad_norm": 3.3412740230560303, + "learning_rate": 1.3030236579506867e-05, + "loss": 1.1124, + "step": 6698 + }, + { + "epoch": 0.42002633393943195, + "grad_norm": 3.094290256500244, + "learning_rate": 1.3028301232939662e-05, + "loss": 1.0829, + "step": 6699 + }, + { + "epoch": 0.4200890337952223, + "grad_norm": 3.0499725341796875, + "learning_rate": 1.3026365761485183e-05, + "loss": 1.3724, + "step": 6700 + }, + { + "epoch": 0.4201517336510126, + "grad_norm": 3.3143270015716553, + "learning_rate": 1.3024430165223245e-05, + "loss": 1.3101, + "step": 6701 + }, + { + "epoch": 0.42021443350680293, + "grad_norm": 3.117030620574951, + "learning_rate": 1.3022494444233678e-05, + "loss": 1.1192, + "step": 6702 + }, + { + "epoch": 0.42027713336259326, + "grad_norm": 3.0011701583862305, + "learning_rate": 1.3020558598596307e-05, + "loss": 1.289, + "step": 6703 + }, + { + "epoch": 0.4203398332183836, + "grad_norm": 3.0278468132019043, + "learning_rate": 1.3018622628390966e-05, + "loss": 1.1407, + "step": 6704 + }, + { + "epoch": 0.4204025330741739, + "grad_norm": 3.1136226654052734, + "learning_rate": 1.3016686533697498e-05, + "loss": 1.1859, + "step": 6705 + }, + { + "epoch": 0.42046523292996424, + "grad_norm": 3.1727898120880127, + "learning_rate": 1.3014750314595744e-05, + "loss": 1.1307, + "step": 6706 + }, + { + "epoch": 0.42052793278575457, + "grad_norm": 2.929440498352051, + "learning_rate": 1.3012813971165557e-05, + "loss": 1.2298, + "step": 6707 + }, + { + "epoch": 0.42059063264154495, + "grad_norm": 2.774919271469116, + "learning_rate": 1.301087750348679e-05, + "loss": 1.1521, + "step": 6708 + }, + { + "epoch": 0.4206533324973353, + "grad_norm": 2.8912439346313477, + "learning_rate": 1.3008940911639302e-05, + "loss": 1.1012, + "step": 6709 + }, + { + "epoch": 0.4207160323531256, + "grad_norm": 2.7832446098327637, + "learning_rate": 1.300700419570296e-05, + "loss": 1.2203, + "step": 6710 + }, + { + "epoch": 0.42077873220891593, + "grad_norm": 2.9853551387786865, + "learning_rate": 1.3005067355757634e-05, + "loss": 1.112, + "step": 6711 + }, + { + "epoch": 0.42084143206470626, + "grad_norm": 3.2052624225616455, + "learning_rate": 1.3003130391883198e-05, + "loss": 1.0824, + "step": 6712 + }, + { + "epoch": 0.4209041319204966, + "grad_norm": 2.818079710006714, + "learning_rate": 1.3001193304159534e-05, + "loss": 1.1892, + "step": 6713 + }, + { + "epoch": 0.4209668317762869, + "grad_norm": 2.7415647506713867, + "learning_rate": 1.299925609266653e-05, + "loss": 1.199, + "step": 6714 + }, + { + "epoch": 0.42102953163207724, + "grad_norm": 3.206249713897705, + "learning_rate": 1.299731875748407e-05, + "loss": 1.097, + "step": 6715 + }, + { + "epoch": 0.42109223148786756, + "grad_norm": 3.118673801422119, + "learning_rate": 1.2995381298692056e-05, + "loss": 1.0372, + "step": 6716 + }, + { + "epoch": 0.4211549313436579, + "grad_norm": 2.7611167430877686, + "learning_rate": 1.2993443716370384e-05, + "loss": 1.1706, + "step": 6717 + }, + { + "epoch": 0.4212176311994482, + "grad_norm": 3.0903127193450928, + "learning_rate": 1.2991506010598965e-05, + "loss": 1.0428, + "step": 6718 + }, + { + "epoch": 0.4212803310552386, + "grad_norm": 3.0625922679901123, + "learning_rate": 1.2989568181457704e-05, + "loss": 1.0368, + "step": 6719 + }, + { + "epoch": 0.4213430309110289, + "grad_norm": 3.0975210666656494, + "learning_rate": 1.2987630229026524e-05, + "loss": 1.2338, + "step": 6720 + }, + { + "epoch": 0.42140573076681925, + "grad_norm": 2.861532688140869, + "learning_rate": 1.298569215338534e-05, + "loss": 1.1933, + "step": 6721 + }, + { + "epoch": 0.4214684306226096, + "grad_norm": 2.77420711517334, + "learning_rate": 1.2983753954614084e-05, + "loss": 1.1085, + "step": 6722 + }, + { + "epoch": 0.4215311304783999, + "grad_norm": 2.945796489715576, + "learning_rate": 1.2981815632792683e-05, + "loss": 1.1406, + "step": 6723 + }, + { + "epoch": 0.42159383033419023, + "grad_norm": 2.869182825088501, + "learning_rate": 1.2979877188001075e-05, + "loss": 1.0223, + "step": 6724 + }, + { + "epoch": 0.42165653018998056, + "grad_norm": 2.8834168910980225, + "learning_rate": 1.2977938620319204e-05, + "loss": 1.0382, + "step": 6725 + }, + { + "epoch": 0.4217192300457709, + "grad_norm": 3.0478549003601074, + "learning_rate": 1.297599992982701e-05, + "loss": 1.2446, + "step": 6726 + }, + { + "epoch": 0.4217819299015612, + "grad_norm": 2.696366786956787, + "learning_rate": 1.2974061116604453e-05, + "loss": 1.1589, + "step": 6727 + }, + { + "epoch": 0.42184462975735154, + "grad_norm": 3.3301570415496826, + "learning_rate": 1.2972122180731484e-05, + "loss": 1.2502, + "step": 6728 + }, + { + "epoch": 0.42190732961314187, + "grad_norm": 2.997023820877075, + "learning_rate": 1.2970183122288068e-05, + "loss": 1.0384, + "step": 6729 + }, + { + "epoch": 0.4219700294689322, + "grad_norm": 2.838188409805298, + "learning_rate": 1.2968243941354169e-05, + "loss": 1.2133, + "step": 6730 + }, + { + "epoch": 0.4220327293247226, + "grad_norm": 2.835247278213501, + "learning_rate": 1.2966304638009759e-05, + "loss": 1.133, + "step": 6731 + }, + { + "epoch": 0.4220954291805129, + "grad_norm": 2.752713441848755, + "learning_rate": 1.2964365212334817e-05, + "loss": 1.1218, + "step": 6732 + }, + { + "epoch": 0.42215812903630323, + "grad_norm": 2.6556124687194824, + "learning_rate": 1.2962425664409323e-05, + "loss": 1.4393, + "step": 6733 + }, + { + "epoch": 0.42222082889209356, + "grad_norm": 2.647592306137085, + "learning_rate": 1.2960485994313267e-05, + "loss": 0.9928, + "step": 6734 + }, + { + "epoch": 0.4222835287478839, + "grad_norm": 2.8161110877990723, + "learning_rate": 1.2958546202126638e-05, + "loss": 1.1668, + "step": 6735 + }, + { + "epoch": 0.4223462286036742, + "grad_norm": 3.006899118423462, + "learning_rate": 1.2956606287929438e-05, + "loss": 0.9666, + "step": 6736 + }, + { + "epoch": 0.42240892845946454, + "grad_norm": 2.635474681854248, + "learning_rate": 1.2954666251801662e-05, + "loss": 1.3137, + "step": 6737 + }, + { + "epoch": 0.42247162831525487, + "grad_norm": 3.2504806518554688, + "learning_rate": 1.2952726093823322e-05, + "loss": 0.9994, + "step": 6738 + }, + { + "epoch": 0.4225343281710452, + "grad_norm": 3.0767722129821777, + "learning_rate": 1.295078581407443e-05, + "loss": 1.0171, + "step": 6739 + }, + { + "epoch": 0.4225970280268355, + "grad_norm": 2.983442783355713, + "learning_rate": 1.2948845412635e-05, + "loss": 1.2395, + "step": 6740 + }, + { + "epoch": 0.42265972788262585, + "grad_norm": 3.0520358085632324, + "learning_rate": 1.2946904889585057e-05, + "loss": 1.066, + "step": 6741 + }, + { + "epoch": 0.42272242773841623, + "grad_norm": 2.9939615726470947, + "learning_rate": 1.294496424500463e-05, + "loss": 1.1314, + "step": 6742 + }, + { + "epoch": 0.42278512759420656, + "grad_norm": 3.1255452632904053, + "learning_rate": 1.2943023478973744e-05, + "loss": 1.0367, + "step": 6743 + }, + { + "epoch": 0.4228478274499969, + "grad_norm": 3.1394202709198, + "learning_rate": 1.2941082591572443e-05, + "loss": 1.1673, + "step": 6744 + }, + { + "epoch": 0.4229105273057872, + "grad_norm": 3.3798608779907227, + "learning_rate": 1.293914158288077e-05, + "loss": 1.1371, + "step": 6745 + }, + { + "epoch": 0.42297322716157754, + "grad_norm": 3.003701686859131, + "learning_rate": 1.2937200452978767e-05, + "loss": 1.1755, + "step": 6746 + }, + { + "epoch": 0.42303592701736786, + "grad_norm": 3.0717034339904785, + "learning_rate": 1.2935259201946493e-05, + "loss": 1.375, + "step": 6747 + }, + { + "epoch": 0.4230986268731582, + "grad_norm": 3.165642738342285, + "learning_rate": 1.2933317829863996e-05, + "loss": 0.9237, + "step": 6748 + }, + { + "epoch": 0.4231613267289485, + "grad_norm": 2.7770681381225586, + "learning_rate": 1.2931376336811348e-05, + "loss": 1.1076, + "step": 6749 + }, + { + "epoch": 0.42322402658473884, + "grad_norm": 3.2638375759124756, + "learning_rate": 1.292943472286861e-05, + "loss": 1.2941, + "step": 6750 + }, + { + "epoch": 0.42328672644052917, + "grad_norm": 2.987943649291992, + "learning_rate": 1.2927492988115857e-05, + "loss": 1.2991, + "step": 6751 + }, + { + "epoch": 0.4233494262963195, + "grad_norm": 2.9281904697418213, + "learning_rate": 1.2925551132633164e-05, + "loss": 1.0776, + "step": 6752 + }, + { + "epoch": 0.4234121261521098, + "grad_norm": 3.0203752517700195, + "learning_rate": 1.2923609156500613e-05, + "loss": 1.2238, + "step": 6753 + }, + { + "epoch": 0.4234748260079002, + "grad_norm": 2.607971668243408, + "learning_rate": 1.2921667059798297e-05, + "loss": 1.0284, + "step": 6754 + }, + { + "epoch": 0.42353752586369053, + "grad_norm": 2.897588014602661, + "learning_rate": 1.29197248426063e-05, + "loss": 0.9631, + "step": 6755 + }, + { + "epoch": 0.42360022571948086, + "grad_norm": 3.1764557361602783, + "learning_rate": 1.2917782505004725e-05, + "loss": 1.0953, + "step": 6756 + }, + { + "epoch": 0.4236629255752712, + "grad_norm": 2.8382368087768555, + "learning_rate": 1.2915840047073672e-05, + "loss": 1.0903, + "step": 6757 + }, + { + "epoch": 0.4237256254310615, + "grad_norm": 3.572089672088623, + "learning_rate": 1.2913897468893249e-05, + "loss": 1.1453, + "step": 6758 + }, + { + "epoch": 0.42378832528685184, + "grad_norm": 2.9242658615112305, + "learning_rate": 1.2911954770543565e-05, + "loss": 0.98, + "step": 6759 + }, + { + "epoch": 0.42385102514264217, + "grad_norm": 2.6763548851013184, + "learning_rate": 1.2910011952104742e-05, + "loss": 1.0832, + "step": 6760 + }, + { + "epoch": 0.4239137249984325, + "grad_norm": 2.790308952331543, + "learning_rate": 1.2908069013656895e-05, + "loss": 1.0913, + "step": 6761 + }, + { + "epoch": 0.4239764248542228, + "grad_norm": 3.116678476333618, + "learning_rate": 1.2906125955280158e-05, + "loss": 1.2561, + "step": 6762 + }, + { + "epoch": 0.42403912471001315, + "grad_norm": 3.0152549743652344, + "learning_rate": 1.2904182777054656e-05, + "loss": 1.1611, + "step": 6763 + }, + { + "epoch": 0.4241018245658035, + "grad_norm": 3.087740898132324, + "learning_rate": 1.2902239479060532e-05, + "loss": 1.2575, + "step": 6764 + }, + { + "epoch": 0.4241645244215938, + "grad_norm": 2.7440590858459473, + "learning_rate": 1.2900296061377927e-05, + "loss": 1.0723, + "step": 6765 + }, + { + "epoch": 0.4242272242773842, + "grad_norm": 3.0673415660858154, + "learning_rate": 1.2898352524086983e-05, + "loss": 1.2731, + "step": 6766 + }, + { + "epoch": 0.4242899241331745, + "grad_norm": 2.8208162784576416, + "learning_rate": 1.2896408867267857e-05, + "loss": 1.0901, + "step": 6767 + }, + { + "epoch": 0.42435262398896484, + "grad_norm": 3.116319179534912, + "learning_rate": 1.2894465091000701e-05, + "loss": 1.1195, + "step": 6768 + }, + { + "epoch": 0.42441532384475517, + "grad_norm": 2.94429874420166, + "learning_rate": 1.2892521195365679e-05, + "loss": 1.1076, + "step": 6769 + }, + { + "epoch": 0.4244780237005455, + "grad_norm": 3.1529958248138428, + "learning_rate": 1.2890577180442953e-05, + "loss": 1.0077, + "step": 6770 + }, + { + "epoch": 0.4245407235563358, + "grad_norm": 3.1094934940338135, + "learning_rate": 1.2888633046312705e-05, + "loss": 1.226, + "step": 6771 + }, + { + "epoch": 0.42460342341212615, + "grad_norm": 3.1235413551330566, + "learning_rate": 1.2886688793055097e-05, + "loss": 1.0189, + "step": 6772 + }, + { + "epoch": 0.4246661232679165, + "grad_norm": 3.127775192260742, + "learning_rate": 1.288474442075032e-05, + "loss": 1.195, + "step": 6773 + }, + { + "epoch": 0.4247288231237068, + "grad_norm": 2.935081720352173, + "learning_rate": 1.2882799929478557e-05, + "loss": 0.9915, + "step": 6774 + }, + { + "epoch": 0.4247915229794971, + "grad_norm": 3.213676929473877, + "learning_rate": 1.288085531932e-05, + "loss": 1.2343, + "step": 6775 + }, + { + "epoch": 0.42485422283528745, + "grad_norm": 3.097310781478882, + "learning_rate": 1.2878910590354843e-05, + "loss": 1.1405, + "step": 6776 + }, + { + "epoch": 0.42491692269107784, + "grad_norm": 2.794008731842041, + "learning_rate": 1.2876965742663287e-05, + "loss": 1.0128, + "step": 6777 + }, + { + "epoch": 0.42497962254686816, + "grad_norm": 3.0193288326263428, + "learning_rate": 1.287502077632554e-05, + "loss": 1.1106, + "step": 6778 + }, + { + "epoch": 0.4250423224026585, + "grad_norm": 3.029144287109375, + "learning_rate": 1.2873075691421808e-05, + "loss": 1.0945, + "step": 6779 + }, + { + "epoch": 0.4251050222584488, + "grad_norm": 3.0266714096069336, + "learning_rate": 1.287113048803231e-05, + "loss": 1.2466, + "step": 6780 + }, + { + "epoch": 0.42516772211423914, + "grad_norm": 3.0894694328308105, + "learning_rate": 1.2869185166237266e-05, + "loss": 1.1221, + "step": 6781 + }, + { + "epoch": 0.42523042197002947, + "grad_norm": 3.113748550415039, + "learning_rate": 1.28672397261169e-05, + "loss": 1.0843, + "step": 6782 + }, + { + "epoch": 0.4252931218258198, + "grad_norm": 2.842684745788574, + "learning_rate": 1.2865294167751444e-05, + "loss": 0.9327, + "step": 6783 + }, + { + "epoch": 0.4253558216816101, + "grad_norm": 2.9851133823394775, + "learning_rate": 1.2863348491221129e-05, + "loss": 0.9829, + "step": 6784 + }, + { + "epoch": 0.42541852153740045, + "grad_norm": 3.0748982429504395, + "learning_rate": 1.2861402696606198e-05, + "loss": 0.9789, + "step": 6785 + }, + { + "epoch": 0.4254812213931908, + "grad_norm": 3.0357441902160645, + "learning_rate": 1.2859456783986892e-05, + "loss": 1.2117, + "step": 6786 + }, + { + "epoch": 0.4255439212489811, + "grad_norm": 3.126687526702881, + "learning_rate": 1.285751075344347e-05, + "loss": 1.0147, + "step": 6787 + }, + { + "epoch": 0.42560662110477143, + "grad_norm": 2.851289987564087, + "learning_rate": 1.2855564605056174e-05, + "loss": 1.0595, + "step": 6788 + }, + { + "epoch": 0.4256693209605618, + "grad_norm": 2.847919225692749, + "learning_rate": 1.2853618338905271e-05, + "loss": 1.2635, + "step": 6789 + }, + { + "epoch": 0.42573202081635214, + "grad_norm": 3.0255346298217773, + "learning_rate": 1.2851671955071024e-05, + "loss": 1.2225, + "step": 6790 + }, + { + "epoch": 0.42579472067214247, + "grad_norm": 2.884411573410034, + "learning_rate": 1.2849725453633699e-05, + "loss": 1.111, + "step": 6791 + }, + { + "epoch": 0.4258574205279328, + "grad_norm": 3.263030529022217, + "learning_rate": 1.2847778834673572e-05, + "loss": 1.0195, + "step": 6792 + }, + { + "epoch": 0.4259201203837231, + "grad_norm": 3.2139694690704346, + "learning_rate": 1.2845832098270925e-05, + "loss": 1.0603, + "step": 6793 + }, + { + "epoch": 0.42598282023951345, + "grad_norm": 2.8423192501068115, + "learning_rate": 1.2843885244506033e-05, + "loss": 1.3414, + "step": 6794 + }, + { + "epoch": 0.4260455200953038, + "grad_norm": 3.303861379623413, + "learning_rate": 1.2841938273459192e-05, + "loss": 1.0408, + "step": 6795 + }, + { + "epoch": 0.4261082199510941, + "grad_norm": 3.0996198654174805, + "learning_rate": 1.2839991185210694e-05, + "loss": 1.0917, + "step": 6796 + }, + { + "epoch": 0.42617091980688443, + "grad_norm": 3.0438146591186523, + "learning_rate": 1.2838043979840833e-05, + "loss": 1.1522, + "step": 6797 + }, + { + "epoch": 0.42623361966267476, + "grad_norm": 2.7780232429504395, + "learning_rate": 1.2836096657429916e-05, + "loss": 1.2628, + "step": 6798 + }, + { + "epoch": 0.4262963195184651, + "grad_norm": 3.13669490814209, + "learning_rate": 1.2834149218058248e-05, + "loss": 1.0239, + "step": 6799 + }, + { + "epoch": 0.42635901937425547, + "grad_norm": 3.1088693141937256, + "learning_rate": 1.2832201661806142e-05, + "loss": 1.0665, + "step": 6800 + }, + { + "epoch": 0.4264217192300458, + "grad_norm": 2.5787951946258545, + "learning_rate": 1.2830253988753915e-05, + "loss": 1.1643, + "step": 6801 + }, + { + "epoch": 0.4264844190858361, + "grad_norm": 3.0724191665649414, + "learning_rate": 1.2828306198981895e-05, + "loss": 1.1304, + "step": 6802 + }, + { + "epoch": 0.42654711894162645, + "grad_norm": 3.09965181350708, + "learning_rate": 1.2826358292570398e-05, + "loss": 0.9906, + "step": 6803 + }, + { + "epoch": 0.4266098187974168, + "grad_norm": 3.136080741882324, + "learning_rate": 1.2824410269599765e-05, + "loss": 0.9676, + "step": 6804 + }, + { + "epoch": 0.4266725186532071, + "grad_norm": 3.216989755630493, + "learning_rate": 1.2822462130150326e-05, + "loss": 1.0885, + "step": 6805 + }, + { + "epoch": 0.4267352185089974, + "grad_norm": 3.299123764038086, + "learning_rate": 1.2820513874302427e-05, + "loss": 1.0925, + "step": 6806 + }, + { + "epoch": 0.42679791836478775, + "grad_norm": 3.0159435272216797, + "learning_rate": 1.2818565502136414e-05, + "loss": 1.1577, + "step": 6807 + }, + { + "epoch": 0.4268606182205781, + "grad_norm": 3.1394875049591064, + "learning_rate": 1.2816617013732634e-05, + "loss": 1.0712, + "step": 6808 + }, + { + "epoch": 0.4269233180763684, + "grad_norm": 2.818347215652466, + "learning_rate": 1.281466840917145e-05, + "loss": 1.2423, + "step": 6809 + }, + { + "epoch": 0.42698601793215873, + "grad_norm": 2.8620569705963135, + "learning_rate": 1.2812719688533214e-05, + "loss": 1.266, + "step": 6810 + }, + { + "epoch": 0.42704871778794906, + "grad_norm": 2.74960994720459, + "learning_rate": 1.2810770851898295e-05, + "loss": 1.1447, + "step": 6811 + }, + { + "epoch": 0.42711141764373944, + "grad_norm": 2.8930485248565674, + "learning_rate": 1.2808821899347063e-05, + "loss": 1.1396, + "step": 6812 + }, + { + "epoch": 0.42717411749952977, + "grad_norm": 3.126687526702881, + "learning_rate": 1.2806872830959898e-05, + "loss": 1.0891, + "step": 6813 + }, + { + "epoch": 0.4272368173553201, + "grad_norm": 3.0552847385406494, + "learning_rate": 1.2804923646817169e-05, + "loss": 1.0769, + "step": 6814 + }, + { + "epoch": 0.4272995172111104, + "grad_norm": 3.278766393661499, + "learning_rate": 1.2802974346999268e-05, + "loss": 1.1638, + "step": 6815 + }, + { + "epoch": 0.42736221706690075, + "grad_norm": 3.4686598777770996, + "learning_rate": 1.2801024931586584e-05, + "loss": 1.0709, + "step": 6816 + }, + { + "epoch": 0.4274249169226911, + "grad_norm": 3.005122661590576, + "learning_rate": 1.2799075400659509e-05, + "loss": 1.0613, + "step": 6817 + }, + { + "epoch": 0.4274876167784814, + "grad_norm": 2.968047618865967, + "learning_rate": 1.2797125754298443e-05, + "loss": 1.092, + "step": 6818 + }, + { + "epoch": 0.42755031663427173, + "grad_norm": 2.8828516006469727, + "learning_rate": 1.2795175992583786e-05, + "loss": 1.23, + "step": 6819 + }, + { + "epoch": 0.42761301649006206, + "grad_norm": 3.179996967315674, + "learning_rate": 1.2793226115595951e-05, + "loss": 1.0352, + "step": 6820 + }, + { + "epoch": 0.4276757163458524, + "grad_norm": 2.738358974456787, + "learning_rate": 1.2791276123415348e-05, + "loss": 1.1915, + "step": 6821 + }, + { + "epoch": 0.4277384162016427, + "grad_norm": 2.7325518131256104, + "learning_rate": 1.2789326016122399e-05, + "loss": 0.9507, + "step": 6822 + }, + { + "epoch": 0.4278011160574331, + "grad_norm": 2.705857276916504, + "learning_rate": 1.2787375793797518e-05, + "loss": 1.0193, + "step": 6823 + }, + { + "epoch": 0.4278638159132234, + "grad_norm": 3.2140419483184814, + "learning_rate": 1.2785425456521143e-05, + "loss": 1.0818, + "step": 6824 + }, + { + "epoch": 0.42792651576901375, + "grad_norm": 3.117537260055542, + "learning_rate": 1.2783475004373696e-05, + "loss": 0.9437, + "step": 6825 + }, + { + "epoch": 0.4279892156248041, + "grad_norm": 2.714980363845825, + "learning_rate": 1.2781524437435619e-05, + "loss": 1.3929, + "step": 6826 + }, + { + "epoch": 0.4280519154805944, + "grad_norm": 3.004643678665161, + "learning_rate": 1.2779573755787356e-05, + "loss": 1.1182, + "step": 6827 + }, + { + "epoch": 0.42811461533638473, + "grad_norm": 2.8913564682006836, + "learning_rate": 1.2777622959509346e-05, + "loss": 1.1041, + "step": 6828 + }, + { + "epoch": 0.42817731519217506, + "grad_norm": 3.0983331203460693, + "learning_rate": 1.2775672048682047e-05, + "loss": 1.1275, + "step": 6829 + }, + { + "epoch": 0.4282400150479654, + "grad_norm": 3.2117719650268555, + "learning_rate": 1.277372102338591e-05, + "loss": 1.038, + "step": 6830 + }, + { + "epoch": 0.4283027149037557, + "grad_norm": 3.701078414916992, + "learning_rate": 1.2771769883701397e-05, + "loss": 1.0087, + "step": 6831 + }, + { + "epoch": 0.42836541475954604, + "grad_norm": 3.0539519786834717, + "learning_rate": 1.2769818629708972e-05, + "loss": 1.2571, + "step": 6832 + }, + { + "epoch": 0.42842811461533636, + "grad_norm": 2.8451592922210693, + "learning_rate": 1.2767867261489108e-05, + "loss": 1.0343, + "step": 6833 + }, + { + "epoch": 0.4284908144711267, + "grad_norm": 2.7153191566467285, + "learning_rate": 1.2765915779122276e-05, + "loss": 1.166, + "step": 6834 + }, + { + "epoch": 0.4285535143269171, + "grad_norm": 2.9894254207611084, + "learning_rate": 1.2763964182688955e-05, + "loss": 1.0562, + "step": 6835 + }, + { + "epoch": 0.4286162141827074, + "grad_norm": 2.90433406829834, + "learning_rate": 1.2762012472269634e-05, + "loss": 1.0462, + "step": 6836 + }, + { + "epoch": 0.4286789140384977, + "grad_norm": 3.168940305709839, + "learning_rate": 1.2760060647944794e-05, + "loss": 1.068, + "step": 6837 + }, + { + "epoch": 0.42874161389428805, + "grad_norm": 3.322687864303589, + "learning_rate": 1.2758108709794937e-05, + "loss": 0.9783, + "step": 6838 + }, + { + "epoch": 0.4288043137500784, + "grad_norm": 2.8359196186065674, + "learning_rate": 1.275615665790055e-05, + "loss": 1.0507, + "step": 6839 + }, + { + "epoch": 0.4288670136058687, + "grad_norm": 2.9148032665252686, + "learning_rate": 1.2754204492342148e-05, + "loss": 1.1324, + "step": 6840 + }, + { + "epoch": 0.42892971346165903, + "grad_norm": 3.2197282314300537, + "learning_rate": 1.275225221320023e-05, + "loss": 1.0783, + "step": 6841 + }, + { + "epoch": 0.42899241331744936, + "grad_norm": 3.123656988143921, + "learning_rate": 1.2750299820555312e-05, + "loss": 1.2105, + "step": 6842 + }, + { + "epoch": 0.4290551131732397, + "grad_norm": 3.3941543102264404, + "learning_rate": 1.2748347314487905e-05, + "loss": 1.1329, + "step": 6843 + }, + { + "epoch": 0.42911781302903, + "grad_norm": 3.0282528400421143, + "learning_rate": 1.274639469507854e-05, + "loss": 1.0907, + "step": 6844 + }, + { + "epoch": 0.42918051288482034, + "grad_norm": 3.2331724166870117, + "learning_rate": 1.2744441962407734e-05, + "loss": 1.0727, + "step": 6845 + }, + { + "epoch": 0.42924321274061067, + "grad_norm": 3.1654062271118164, + "learning_rate": 1.2742489116556023e-05, + "loss": 1.1819, + "step": 6846 + }, + { + "epoch": 0.42930591259640105, + "grad_norm": 3.191190242767334, + "learning_rate": 1.2740536157603942e-05, + "loss": 1.0396, + "step": 6847 + }, + { + "epoch": 0.4293686124521914, + "grad_norm": 2.9582533836364746, + "learning_rate": 1.2738583085632028e-05, + "loss": 1.3442, + "step": 6848 + }, + { + "epoch": 0.4294313123079817, + "grad_norm": 3.4705426692962646, + "learning_rate": 1.2736629900720832e-05, + "loss": 1.0214, + "step": 6849 + }, + { + "epoch": 0.42949401216377203, + "grad_norm": 3.023643732070923, + "learning_rate": 1.2734676602950894e-05, + "loss": 1.1095, + "step": 6850 + }, + { + "epoch": 0.42955671201956236, + "grad_norm": 2.8540685176849365, + "learning_rate": 1.2732723192402779e-05, + "loss": 1.111, + "step": 6851 + }, + { + "epoch": 0.4296194118753527, + "grad_norm": 3.6532156467437744, + "learning_rate": 1.2730769669157037e-05, + "loss": 1.1436, + "step": 6852 + }, + { + "epoch": 0.429682111731143, + "grad_norm": 3.0326004028320312, + "learning_rate": 1.2728816033294236e-05, + "loss": 1.0766, + "step": 6853 + }, + { + "epoch": 0.42974481158693334, + "grad_norm": 3.036604166030884, + "learning_rate": 1.2726862284894939e-05, + "loss": 1.2215, + "step": 6854 + }, + { + "epoch": 0.42980751144272367, + "grad_norm": 2.8027615547180176, + "learning_rate": 1.2724908424039728e-05, + "loss": 1.0213, + "step": 6855 + }, + { + "epoch": 0.429870211298514, + "grad_norm": 3.2444887161254883, + "learning_rate": 1.2722954450809169e-05, + "loss": 1.2075, + "step": 6856 + }, + { + "epoch": 0.4299329111543043, + "grad_norm": 2.8939878940582275, + "learning_rate": 1.272100036528385e-05, + "loss": 1.1445, + "step": 6857 + }, + { + "epoch": 0.4299956110100947, + "grad_norm": 3.0501296520233154, + "learning_rate": 1.2719046167544363e-05, + "loss": 0.9904, + "step": 6858 + }, + { + "epoch": 0.43005831086588503, + "grad_norm": 2.9248523712158203, + "learning_rate": 1.2717091857671287e-05, + "loss": 1.0137, + "step": 6859 + }, + { + "epoch": 0.43012101072167536, + "grad_norm": 3.4630002975463867, + "learning_rate": 1.2715137435745228e-05, + "loss": 0.945, + "step": 6860 + }, + { + "epoch": 0.4301837105774657, + "grad_norm": 3.0625405311584473, + "learning_rate": 1.2713182901846782e-05, + "loss": 1.2767, + "step": 6861 + }, + { + "epoch": 0.430246410433256, + "grad_norm": 2.6848437786102295, + "learning_rate": 1.2711228256056553e-05, + "loss": 1.2248, + "step": 6862 + }, + { + "epoch": 0.43030911028904634, + "grad_norm": 2.8085339069366455, + "learning_rate": 1.2709273498455152e-05, + "loss": 1.1514, + "step": 6863 + }, + { + "epoch": 0.43037181014483666, + "grad_norm": 3.06841778755188, + "learning_rate": 1.2707318629123195e-05, + "loss": 1.0394, + "step": 6864 + }, + { + "epoch": 0.430434510000627, + "grad_norm": 3.016894817352295, + "learning_rate": 1.27053636481413e-05, + "loss": 1.0692, + "step": 6865 + }, + { + "epoch": 0.4304972098564173, + "grad_norm": 3.202580451965332, + "learning_rate": 1.2703408555590089e-05, + "loss": 1.1727, + "step": 6866 + }, + { + "epoch": 0.43055990971220764, + "grad_norm": 2.9536213874816895, + "learning_rate": 1.2701453351550193e-05, + "loss": 1.0881, + "step": 6867 + }, + { + "epoch": 0.43062260956799797, + "grad_norm": 3.1655378341674805, + "learning_rate": 1.2699498036102239e-05, + "loss": 1.1678, + "step": 6868 + }, + { + "epoch": 0.4306853094237883, + "grad_norm": 3.2993004322052, + "learning_rate": 1.2697542609326872e-05, + "loss": 1.1313, + "step": 6869 + }, + { + "epoch": 0.4307480092795787, + "grad_norm": 3.2718911170959473, + "learning_rate": 1.2695587071304726e-05, + "loss": 1.0901, + "step": 6870 + }, + { + "epoch": 0.430810709135369, + "grad_norm": 3.0449728965759277, + "learning_rate": 1.2693631422116455e-05, + "loss": 0.9682, + "step": 6871 + }, + { + "epoch": 0.43087340899115933, + "grad_norm": 2.8021974563598633, + "learning_rate": 1.2691675661842704e-05, + "loss": 1.1688, + "step": 6872 + }, + { + "epoch": 0.43093610884694966, + "grad_norm": 2.94998836517334, + "learning_rate": 1.2689719790564134e-05, + "loss": 1.1417, + "step": 6873 + }, + { + "epoch": 0.43099880870274, + "grad_norm": 3.2050421237945557, + "learning_rate": 1.26877638083614e-05, + "loss": 1.0349, + "step": 6874 + }, + { + "epoch": 0.4310615085585303, + "grad_norm": 3.1142892837524414, + "learning_rate": 1.268580771531517e-05, + "loss": 1.0343, + "step": 6875 + }, + { + "epoch": 0.43112420841432064, + "grad_norm": 2.8724682331085205, + "learning_rate": 1.2683851511506114e-05, + "loss": 1.0986, + "step": 6876 + }, + { + "epoch": 0.43118690827011097, + "grad_norm": 3.0046374797821045, + "learning_rate": 1.26818951970149e-05, + "loss": 1.0096, + "step": 6877 + }, + { + "epoch": 0.4312496081259013, + "grad_norm": 2.9479475021362305, + "learning_rate": 1.2679938771922215e-05, + "loss": 1.1032, + "step": 6878 + }, + { + "epoch": 0.4313123079816916, + "grad_norm": 3.0748302936553955, + "learning_rate": 1.2677982236308737e-05, + "loss": 1.1359, + "step": 6879 + }, + { + "epoch": 0.43137500783748195, + "grad_norm": 3.022064208984375, + "learning_rate": 1.2676025590255154e-05, + "loss": 1.163, + "step": 6880 + }, + { + "epoch": 0.43143770769327233, + "grad_norm": 3.234405994415283, + "learning_rate": 1.267406883384216e-05, + "loss": 1.0823, + "step": 6881 + }, + { + "epoch": 0.43150040754906266, + "grad_norm": 3.164961338043213, + "learning_rate": 1.2672111967150451e-05, + "loss": 1.0849, + "step": 6882 + }, + { + "epoch": 0.431563107404853, + "grad_norm": 2.967149257659912, + "learning_rate": 1.2670154990260725e-05, + "loss": 1.0954, + "step": 6883 + }, + { + "epoch": 0.4316258072606433, + "grad_norm": 2.924393892288208, + "learning_rate": 1.2668197903253694e-05, + "loss": 1.0722, + "step": 6884 + }, + { + "epoch": 0.43168850711643364, + "grad_norm": 3.064877510070801, + "learning_rate": 1.266624070621006e-05, + "loss": 1.2, + "step": 6885 + }, + { + "epoch": 0.43175120697222397, + "grad_norm": 2.756847381591797, + "learning_rate": 1.2664283399210548e-05, + "loss": 0.9362, + "step": 6886 + }, + { + "epoch": 0.4318139068280143, + "grad_norm": 2.9450440406799316, + "learning_rate": 1.2662325982335868e-05, + "loss": 1.1191, + "step": 6887 + }, + { + "epoch": 0.4318766066838046, + "grad_norm": 3.247872829437256, + "learning_rate": 1.2660368455666752e-05, + "loss": 1.1112, + "step": 6888 + }, + { + "epoch": 0.43193930653959495, + "grad_norm": 3.3571267127990723, + "learning_rate": 1.2658410819283922e-05, + "loss": 1.2722, + "step": 6889 + }, + { + "epoch": 0.4320020063953853, + "grad_norm": 2.5089621543884277, + "learning_rate": 1.2656453073268117e-05, + "loss": 1.248, + "step": 6890 + }, + { + "epoch": 0.4320647062511756, + "grad_norm": 3.452035665512085, + "learning_rate": 1.265449521770007e-05, + "loss": 1.0055, + "step": 6891 + }, + { + "epoch": 0.43212740610696593, + "grad_norm": 3.321228504180908, + "learning_rate": 1.2652537252660522e-05, + "loss": 1.1361, + "step": 6892 + }, + { + "epoch": 0.4321901059627563, + "grad_norm": 2.7081058025360107, + "learning_rate": 1.2650579178230223e-05, + "loss": 1.1615, + "step": 6893 + }, + { + "epoch": 0.43225280581854664, + "grad_norm": 2.9792122840881348, + "learning_rate": 1.2648620994489922e-05, + "loss": 1.1035, + "step": 6894 + }, + { + "epoch": 0.43231550567433696, + "grad_norm": 2.7544682025909424, + "learning_rate": 1.2646662701520378e-05, + "loss": 1.1801, + "step": 6895 + }, + { + "epoch": 0.4323782055301273, + "grad_norm": 3.5933594703674316, + "learning_rate": 1.2644704299402349e-05, + "loss": 1.1287, + "step": 6896 + }, + { + "epoch": 0.4324409053859176, + "grad_norm": 3.197995185852051, + "learning_rate": 1.2642745788216597e-05, + "loss": 1.0366, + "step": 6897 + }, + { + "epoch": 0.43250360524170794, + "grad_norm": 3.079057455062866, + "learning_rate": 1.2640787168043893e-05, + "loss": 1.1759, + "step": 6898 + }, + { + "epoch": 0.43256630509749827, + "grad_norm": 2.8323168754577637, + "learning_rate": 1.2638828438965015e-05, + "loss": 1.0308, + "step": 6899 + }, + { + "epoch": 0.4326290049532886, + "grad_norm": 2.9745030403137207, + "learning_rate": 1.263686960106073e-05, + "loss": 1.0283, + "step": 6900 + }, + { + "epoch": 0.4326917048090789, + "grad_norm": 3.3484158515930176, + "learning_rate": 1.2634910654411831e-05, + "loss": 1.1716, + "step": 6901 + }, + { + "epoch": 0.43275440466486925, + "grad_norm": 3.01043438911438, + "learning_rate": 1.2632951599099104e-05, + "loss": 1.1327, + "step": 6902 + }, + { + "epoch": 0.4328171045206596, + "grad_norm": 2.95650577545166, + "learning_rate": 1.2630992435203332e-05, + "loss": 1.1929, + "step": 6903 + }, + { + "epoch": 0.43287980437644996, + "grad_norm": 3.0484654903411865, + "learning_rate": 1.2629033162805324e-05, + "loss": 1.1101, + "step": 6904 + }, + { + "epoch": 0.4329425042322403, + "grad_norm": 3.037277936935425, + "learning_rate": 1.262707378198587e-05, + "loss": 1.1002, + "step": 6905 + }, + { + "epoch": 0.4330052040880306, + "grad_norm": 3.3485138416290283, + "learning_rate": 1.262511429282578e-05, + "loss": 1.2678, + "step": 6906 + }, + { + "epoch": 0.43306790394382094, + "grad_norm": 2.9010748863220215, + "learning_rate": 1.262315469540586e-05, + "loss": 1.2063, + "step": 6907 + }, + { + "epoch": 0.43313060379961127, + "grad_norm": 3.332590103149414, + "learning_rate": 1.2621194989806926e-05, + "loss": 1.0738, + "step": 6908 + }, + { + "epoch": 0.4331933036554016, + "grad_norm": 3.039156436920166, + "learning_rate": 1.2619235176109798e-05, + "loss": 1.1507, + "step": 6909 + }, + { + "epoch": 0.4332560035111919, + "grad_norm": 3.093553066253662, + "learning_rate": 1.2617275254395296e-05, + "loss": 1.3064, + "step": 6910 + }, + { + "epoch": 0.43331870336698225, + "grad_norm": 2.9311399459838867, + "learning_rate": 1.2615315224744247e-05, + "loss": 1.1321, + "step": 6911 + }, + { + "epoch": 0.4333814032227726, + "grad_norm": 2.9865870475769043, + "learning_rate": 1.2613355087237484e-05, + "loss": 1.0053, + "step": 6912 + }, + { + "epoch": 0.4334441030785629, + "grad_norm": 3.322350025177002, + "learning_rate": 1.2611394841955847e-05, + "loss": 1.1022, + "step": 6913 + }, + { + "epoch": 0.43350680293435323, + "grad_norm": 3.365086078643799, + "learning_rate": 1.2609434488980168e-05, + "loss": 1.0485, + "step": 6914 + }, + { + "epoch": 0.43356950279014356, + "grad_norm": 2.5559182167053223, + "learning_rate": 1.26074740283913e-05, + "loss": 1.1848, + "step": 6915 + }, + { + "epoch": 0.43363220264593394, + "grad_norm": 3.139997959136963, + "learning_rate": 1.2605513460270084e-05, + "loss": 1.3191, + "step": 6916 + }, + { + "epoch": 0.43369490250172427, + "grad_norm": 3.012491226196289, + "learning_rate": 1.2603552784697387e-05, + "loss": 1.0049, + "step": 6917 + }, + { + "epoch": 0.4337576023575146, + "grad_norm": 2.942789077758789, + "learning_rate": 1.2601592001754052e-05, + "loss": 1.1359, + "step": 6918 + }, + { + "epoch": 0.4338203022133049, + "grad_norm": 3.4029762744903564, + "learning_rate": 1.2599631111520956e-05, + "loss": 0.9644, + "step": 6919 + }, + { + "epoch": 0.43388300206909525, + "grad_norm": 3.2876675128936768, + "learning_rate": 1.2597670114078952e-05, + "loss": 1.0568, + "step": 6920 + }, + { + "epoch": 0.4339457019248856, + "grad_norm": 3.282804250717163, + "learning_rate": 1.2595709009508926e-05, + "loss": 1.1348, + "step": 6921 + }, + { + "epoch": 0.4340084017806759, + "grad_norm": 2.7973363399505615, + "learning_rate": 1.2593747797891743e-05, + "loss": 1.1293, + "step": 6922 + }, + { + "epoch": 0.43407110163646623, + "grad_norm": 3.204002618789673, + "learning_rate": 1.2591786479308288e-05, + "loss": 1.1535, + "step": 6923 + }, + { + "epoch": 0.43413380149225655, + "grad_norm": 2.9820735454559326, + "learning_rate": 1.2589825053839446e-05, + "loss": 1.1064, + "step": 6924 + }, + { + "epoch": 0.4341965013480469, + "grad_norm": 2.8549399375915527, + "learning_rate": 1.2587863521566105e-05, + "loss": 1.307, + "step": 6925 + }, + { + "epoch": 0.4342592012038372, + "grad_norm": 3.0078511238098145, + "learning_rate": 1.2585901882569163e-05, + "loss": 1.1725, + "step": 6926 + }, + { + "epoch": 0.43432190105962754, + "grad_norm": 2.8244078159332275, + "learning_rate": 1.258394013692951e-05, + "loss": 1.1346, + "step": 6927 + }, + { + "epoch": 0.4343846009154179, + "grad_norm": 2.7498373985290527, + "learning_rate": 1.2581978284728054e-05, + "loss": 1.2457, + "step": 6928 + }, + { + "epoch": 0.43444730077120824, + "grad_norm": 3.0550692081451416, + "learning_rate": 1.25800163260457e-05, + "loss": 1.1675, + "step": 6929 + }, + { + "epoch": 0.43451000062699857, + "grad_norm": 2.6898772716522217, + "learning_rate": 1.2578054260963363e-05, + "loss": 1.2726, + "step": 6930 + }, + { + "epoch": 0.4345727004827889, + "grad_norm": 2.961029291152954, + "learning_rate": 1.2576092089561953e-05, + "loss": 1.279, + "step": 6931 + }, + { + "epoch": 0.4346354003385792, + "grad_norm": 2.908651351928711, + "learning_rate": 1.2574129811922393e-05, + "loss": 1.0914, + "step": 6932 + }, + { + "epoch": 0.43469810019436955, + "grad_norm": 3.0091652870178223, + "learning_rate": 1.2572167428125608e-05, + "loss": 0.9805, + "step": 6933 + }, + { + "epoch": 0.4347608000501599, + "grad_norm": 3.3162219524383545, + "learning_rate": 1.2570204938252525e-05, + "loss": 1.4215, + "step": 6934 + }, + { + "epoch": 0.4348234999059502, + "grad_norm": 2.9078242778778076, + "learning_rate": 1.2568242342384081e-05, + "loss": 1.1292, + "step": 6935 + }, + { + "epoch": 0.43488619976174053, + "grad_norm": 3.090805768966675, + "learning_rate": 1.2566279640601207e-05, + "loss": 1.1768, + "step": 6936 + }, + { + "epoch": 0.43494889961753086, + "grad_norm": 3.129621744155884, + "learning_rate": 1.2564316832984851e-05, + "loss": 0.9985, + "step": 6937 + }, + { + "epoch": 0.4350115994733212, + "grad_norm": 3.0986907482147217, + "learning_rate": 1.2562353919615954e-05, + "loss": 1.1528, + "step": 6938 + }, + { + "epoch": 0.43507429932911157, + "grad_norm": 3.0078747272491455, + "learning_rate": 1.2560390900575472e-05, + "loss": 1.1471, + "step": 6939 + }, + { + "epoch": 0.4351369991849019, + "grad_norm": 3.013512372970581, + "learning_rate": 1.2558427775944357e-05, + "loss": 1.149, + "step": 6940 + }, + { + "epoch": 0.4351996990406922, + "grad_norm": 3.4666385650634766, + "learning_rate": 1.2556464545803571e-05, + "loss": 1.0662, + "step": 6941 + }, + { + "epoch": 0.43526239889648255, + "grad_norm": 3.0747337341308594, + "learning_rate": 1.2554501210234071e-05, + "loss": 1.0348, + "step": 6942 + }, + { + "epoch": 0.4353250987522729, + "grad_norm": 2.871523141860962, + "learning_rate": 1.255253776931683e-05, + "loss": 1.2167, + "step": 6943 + }, + { + "epoch": 0.4353877986080632, + "grad_norm": 3.0527100563049316, + "learning_rate": 1.2550574223132822e-05, + "loss": 1.1665, + "step": 6944 + }, + { + "epoch": 0.43545049846385353, + "grad_norm": 3.0065505504608154, + "learning_rate": 1.254861057176302e-05, + "loss": 1.0807, + "step": 6945 + }, + { + "epoch": 0.43551319831964386, + "grad_norm": 3.1112515926361084, + "learning_rate": 1.2546646815288411e-05, + "loss": 1.2278, + "step": 6946 + }, + { + "epoch": 0.4355758981754342, + "grad_norm": 3.0126678943634033, + "learning_rate": 1.254468295378997e-05, + "loss": 0.8992, + "step": 6947 + }, + { + "epoch": 0.4356385980312245, + "grad_norm": 3.230125665664673, + "learning_rate": 1.2542718987348699e-05, + "loss": 1.0392, + "step": 6948 + }, + { + "epoch": 0.43570129788701484, + "grad_norm": 3.2835586071014404, + "learning_rate": 1.2540754916045581e-05, + "loss": 1.1718, + "step": 6949 + }, + { + "epoch": 0.43576399774280516, + "grad_norm": 3.34558367729187, + "learning_rate": 1.2538790739961622e-05, + "loss": 0.94, + "step": 6950 + }, + { + "epoch": 0.43582669759859555, + "grad_norm": 3.252758741378784, + "learning_rate": 1.253682645917782e-05, + "loss": 1.0641, + "step": 6951 + }, + { + "epoch": 0.4358893974543859, + "grad_norm": 3.2027552127838135, + "learning_rate": 1.2534862073775184e-05, + "loss": 1.0488, + "step": 6952 + }, + { + "epoch": 0.4359520973101762, + "grad_norm": 3.2323882579803467, + "learning_rate": 1.2532897583834728e-05, + "loss": 1.0943, + "step": 6953 + }, + { + "epoch": 0.43601479716596653, + "grad_norm": 2.95585036277771, + "learning_rate": 1.2530932989437463e-05, + "loss": 1.0368, + "step": 6954 + }, + { + "epoch": 0.43607749702175685, + "grad_norm": 3.1013753414154053, + "learning_rate": 1.2528968290664412e-05, + "loss": 1.1617, + "step": 6955 + }, + { + "epoch": 0.4361401968775472, + "grad_norm": 2.9595344066619873, + "learning_rate": 1.2527003487596598e-05, + "loss": 1.1516, + "step": 6956 + }, + { + "epoch": 0.4362028967333375, + "grad_norm": 3.1603786945343018, + "learning_rate": 1.252503858031505e-05, + "loss": 1.2409, + "step": 6957 + }, + { + "epoch": 0.43626559658912784, + "grad_norm": 2.885622978210449, + "learning_rate": 1.2523073568900799e-05, + "loss": 1.1464, + "step": 6958 + }, + { + "epoch": 0.43632829644491816, + "grad_norm": 3.177389144897461, + "learning_rate": 1.2521108453434887e-05, + "loss": 1.1918, + "step": 6959 + }, + { + "epoch": 0.4363909963007085, + "grad_norm": 2.8251235485076904, + "learning_rate": 1.2519143233998348e-05, + "loss": 1.2087, + "step": 6960 + }, + { + "epoch": 0.4364536961564988, + "grad_norm": 2.670780658721924, + "learning_rate": 1.2517177910672237e-05, + "loss": 0.9975, + "step": 6961 + }, + { + "epoch": 0.4365163960122892, + "grad_norm": 2.912539005279541, + "learning_rate": 1.2515212483537595e-05, + "loss": 1.0426, + "step": 6962 + }, + { + "epoch": 0.4365790958680795, + "grad_norm": 3.0394537448883057, + "learning_rate": 1.2513246952675479e-05, + "loss": 1.2068, + "step": 6963 + }, + { + "epoch": 0.43664179572386985, + "grad_norm": 2.992382526397705, + "learning_rate": 1.2511281318166954e-05, + "loss": 1.1229, + "step": 6964 + }, + { + "epoch": 0.4367044955796602, + "grad_norm": 2.895338773727417, + "learning_rate": 1.2509315580093075e-05, + "loss": 1.1008, + "step": 6965 + }, + { + "epoch": 0.4367671954354505, + "grad_norm": 3.001444101333618, + "learning_rate": 1.2507349738534913e-05, + "loss": 1.031, + "step": 6966 + }, + { + "epoch": 0.43682989529124083, + "grad_norm": 2.86982798576355, + "learning_rate": 1.2505383793573538e-05, + "loss": 1.2907, + "step": 6967 + }, + { + "epoch": 0.43689259514703116, + "grad_norm": 3.0895230770111084, + "learning_rate": 1.2503417745290026e-05, + "loss": 0.9247, + "step": 6968 + }, + { + "epoch": 0.4369552950028215, + "grad_norm": 3.2372641563415527, + "learning_rate": 1.2501451593765457e-05, + "loss": 1.006, + "step": 6969 + }, + { + "epoch": 0.4370179948586118, + "grad_norm": 2.7060494422912598, + "learning_rate": 1.2499485339080914e-05, + "loss": 1.0666, + "step": 6970 + }, + { + "epoch": 0.43708069471440214, + "grad_norm": 3.153503179550171, + "learning_rate": 1.2497518981317486e-05, + "loss": 1.0365, + "step": 6971 + }, + { + "epoch": 0.43714339457019247, + "grad_norm": 3.049938917160034, + "learning_rate": 1.249555252055627e-05, + "loss": 1.1061, + "step": 6972 + }, + { + "epoch": 0.4372060944259828, + "grad_norm": 3.084177017211914, + "learning_rate": 1.2493585956878354e-05, + "loss": 1.0097, + "step": 6973 + }, + { + "epoch": 0.4372687942817732, + "grad_norm": 3.0846757888793945, + "learning_rate": 1.2491619290364845e-05, + "loss": 1.1278, + "step": 6974 + }, + { + "epoch": 0.4373314941375635, + "grad_norm": 2.858247995376587, + "learning_rate": 1.2489652521096852e-05, + "loss": 1.0529, + "step": 6975 + }, + { + "epoch": 0.43739419399335383, + "grad_norm": 3.136125326156616, + "learning_rate": 1.2487685649155474e-05, + "loss": 1.2104, + "step": 6976 + }, + { + "epoch": 0.43745689384914416, + "grad_norm": 2.9534473419189453, + "learning_rate": 1.2485718674621836e-05, + "loss": 1.0417, + "step": 6977 + }, + { + "epoch": 0.4375195937049345, + "grad_norm": 3.338195323944092, + "learning_rate": 1.248375159757705e-05, + "loss": 1.0375, + "step": 6978 + }, + { + "epoch": 0.4375822935607248, + "grad_norm": 3.2556517124176025, + "learning_rate": 1.248178441810224e-05, + "loss": 1.0387, + "step": 6979 + }, + { + "epoch": 0.43764499341651514, + "grad_norm": 2.9857139587402344, + "learning_rate": 1.2479817136278531e-05, + "loss": 0.8933, + "step": 6980 + }, + { + "epoch": 0.43770769327230546, + "grad_norm": 2.908832311630249, + "learning_rate": 1.2477849752187056e-05, + "loss": 1.2972, + "step": 6981 + }, + { + "epoch": 0.4377703931280958, + "grad_norm": 3.3401105403900146, + "learning_rate": 1.2475882265908949e-05, + "loss": 1.1277, + "step": 6982 + }, + { + "epoch": 0.4378330929838861, + "grad_norm": 3.2805752754211426, + "learning_rate": 1.2473914677525347e-05, + "loss": 1.1076, + "step": 6983 + }, + { + "epoch": 0.43789579283967645, + "grad_norm": 3.1709954738616943, + "learning_rate": 1.24719469871174e-05, + "loss": 1.0124, + "step": 6984 + }, + { + "epoch": 0.43795849269546683, + "grad_norm": 2.8441624641418457, + "learning_rate": 1.2469979194766248e-05, + "loss": 1.2245, + "step": 6985 + }, + { + "epoch": 0.43802119255125715, + "grad_norm": 3.162386178970337, + "learning_rate": 1.246801130055305e-05, + "loss": 1.1668, + "step": 6986 + }, + { + "epoch": 0.4380838924070475, + "grad_norm": 3.43778133392334, + "learning_rate": 1.2466043304558953e-05, + "loss": 1.1708, + "step": 6987 + }, + { + "epoch": 0.4381465922628378, + "grad_norm": 3.3585002422332764, + "learning_rate": 1.2464075206865125e-05, + "loss": 1.0375, + "step": 6988 + }, + { + "epoch": 0.43820929211862814, + "grad_norm": 2.9382925033569336, + "learning_rate": 1.2462107007552726e-05, + "loss": 1.0609, + "step": 6989 + }, + { + "epoch": 0.43827199197441846, + "grad_norm": 3.1381616592407227, + "learning_rate": 1.2460138706702929e-05, + "loss": 1.0058, + "step": 6990 + }, + { + "epoch": 0.4383346918302088, + "grad_norm": 2.8479535579681396, + "learning_rate": 1.2458170304396904e-05, + "loss": 1.2535, + "step": 6991 + }, + { + "epoch": 0.4383973916859991, + "grad_norm": 2.633129358291626, + "learning_rate": 1.2456201800715828e-05, + "loss": 1.059, + "step": 6992 + }, + { + "epoch": 0.43846009154178944, + "grad_norm": 2.8728716373443604, + "learning_rate": 1.2454233195740881e-05, + "loss": 1.2316, + "step": 6993 + }, + { + "epoch": 0.43852279139757977, + "grad_norm": 3.1120967864990234, + "learning_rate": 1.245226448955325e-05, + "loss": 1.1605, + "step": 6994 + }, + { + "epoch": 0.4385854912533701, + "grad_norm": 3.293297052383423, + "learning_rate": 1.2450295682234125e-05, + "loss": 0.9823, + "step": 6995 + }, + { + "epoch": 0.4386481911091604, + "grad_norm": 3.0264158248901367, + "learning_rate": 1.24483267738647e-05, + "loss": 1.0828, + "step": 6996 + }, + { + "epoch": 0.4387108909649508, + "grad_norm": 2.988771438598633, + "learning_rate": 1.2446357764526174e-05, + "loss": 1.0148, + "step": 6997 + }, + { + "epoch": 0.43877359082074113, + "grad_norm": 3.101435899734497, + "learning_rate": 1.2444388654299742e-05, + "loss": 1.179, + "step": 6998 + }, + { + "epoch": 0.43883629067653146, + "grad_norm": 2.8384768962860107, + "learning_rate": 1.2442419443266617e-05, + "loss": 0.8776, + "step": 6999 + }, + { + "epoch": 0.4388989905323218, + "grad_norm": 3.2283217906951904, + "learning_rate": 1.2440450131508008e-05, + "loss": 1.2686, + "step": 7000 + }, + { + "epoch": 0.4388989905323218, + "eval_loss": 1.1346032619476318, + "eval_runtime": 144.1579, + "eval_samples_per_second": 4.37, + "eval_steps_per_second": 1.096, + "step": 7000 + }, + { + "epoch": 0.4389616903881121, + "grad_norm": 3.2945311069488525, + "learning_rate": 1.243848071910513e-05, + "loss": 1.1467, + "step": 7001 + }, + { + "epoch": 0.43902439024390244, + "grad_norm": 2.85603404045105, + "learning_rate": 1.2436511206139198e-05, + "loss": 1.2871, + "step": 7002 + }, + { + "epoch": 0.43908709009969277, + "grad_norm": 3.395566701889038, + "learning_rate": 1.2434541592691443e-05, + "loss": 1.213, + "step": 7003 + }, + { + "epoch": 0.4391497899554831, + "grad_norm": 3.0140953063964844, + "learning_rate": 1.243257187884308e-05, + "loss": 1.0716, + "step": 7004 + }, + { + "epoch": 0.4392124898112734, + "grad_norm": 3.165915012359619, + "learning_rate": 1.2430602064675348e-05, + "loss": 1.0447, + "step": 7005 + }, + { + "epoch": 0.43927518966706375, + "grad_norm": 3.3091375827789307, + "learning_rate": 1.2428632150269482e-05, + "loss": 1.1056, + "step": 7006 + }, + { + "epoch": 0.4393378895228541, + "grad_norm": 3.092921257019043, + "learning_rate": 1.242666213570672e-05, + "loss": 1.0954, + "step": 7007 + }, + { + "epoch": 0.4394005893786444, + "grad_norm": 2.897458553314209, + "learning_rate": 1.2424692021068306e-05, + "loss": 1.08, + "step": 7008 + }, + { + "epoch": 0.4394632892344348, + "grad_norm": 3.4938838481903076, + "learning_rate": 1.2422721806435484e-05, + "loss": 1.0935, + "step": 7009 + }, + { + "epoch": 0.4395259890902251, + "grad_norm": 3.2642810344696045, + "learning_rate": 1.2420751491889513e-05, + "loss": 1.0841, + "step": 7010 + }, + { + "epoch": 0.43958868894601544, + "grad_norm": 3.2933528423309326, + "learning_rate": 1.2418781077511643e-05, + "loss": 1.0452, + "step": 7011 + }, + { + "epoch": 0.43965138880180576, + "grad_norm": 3.0317366123199463, + "learning_rate": 1.2416810563383136e-05, + "loss": 1.0289, + "step": 7012 + }, + { + "epoch": 0.4397140886575961, + "grad_norm": 3.125441789627075, + "learning_rate": 1.2414839949585256e-05, + "loss": 1.1685, + "step": 7013 + }, + { + "epoch": 0.4397767885133864, + "grad_norm": 2.8691108226776123, + "learning_rate": 1.241286923619927e-05, + "loss": 1.1365, + "step": 7014 + }, + { + "epoch": 0.43983948836917675, + "grad_norm": 2.947061777114868, + "learning_rate": 1.2410898423306453e-05, + "loss": 0.9185, + "step": 7015 + }, + { + "epoch": 0.4399021882249671, + "grad_norm": 3.1480112075805664, + "learning_rate": 1.2408927510988078e-05, + "loss": 1.1347, + "step": 7016 + }, + { + "epoch": 0.4399648880807574, + "grad_norm": 3.2824580669403076, + "learning_rate": 1.2406956499325429e-05, + "loss": 1.0137, + "step": 7017 + }, + { + "epoch": 0.4400275879365477, + "grad_norm": 2.7139415740966797, + "learning_rate": 1.2404985388399787e-05, + "loss": 1.0789, + "step": 7018 + }, + { + "epoch": 0.44009028779233805, + "grad_norm": 3.5770723819732666, + "learning_rate": 1.2403014178292442e-05, + "loss": 1.156, + "step": 7019 + }, + { + "epoch": 0.44015298764812844, + "grad_norm": 2.8536863327026367, + "learning_rate": 1.240104286908469e-05, + "loss": 1.1962, + "step": 7020 + }, + { + "epoch": 0.44021568750391876, + "grad_norm": 2.891472101211548, + "learning_rate": 1.2399071460857825e-05, + "loss": 1.1056, + "step": 7021 + }, + { + "epoch": 0.4402783873597091, + "grad_norm": 3.1245839595794678, + "learning_rate": 1.2397099953693146e-05, + "loss": 1.1552, + "step": 7022 + }, + { + "epoch": 0.4403410872154994, + "grad_norm": 3.357592821121216, + "learning_rate": 1.2395128347671964e-05, + "loss": 1.2323, + "step": 7023 + }, + { + "epoch": 0.44040378707128974, + "grad_norm": 3.021660804748535, + "learning_rate": 1.2393156642875579e-05, + "loss": 1.0434, + "step": 7024 + }, + { + "epoch": 0.44046648692708007, + "grad_norm": 2.9749231338500977, + "learning_rate": 1.239118483938531e-05, + "loss": 1.1565, + "step": 7025 + }, + { + "epoch": 0.4405291867828704, + "grad_norm": 3.0332112312316895, + "learning_rate": 1.238921293728248e-05, + "loss": 1.0924, + "step": 7026 + }, + { + "epoch": 0.4405918866386607, + "grad_norm": 2.892274856567383, + "learning_rate": 1.23872409366484e-05, + "loss": 1.1051, + "step": 7027 + }, + { + "epoch": 0.44065458649445105, + "grad_norm": 2.959829807281494, + "learning_rate": 1.2385268837564399e-05, + "loss": 1.254, + "step": 7028 + }, + { + "epoch": 0.4407172863502414, + "grad_norm": 3.7269296646118164, + "learning_rate": 1.2383296640111805e-05, + "loss": 1.2279, + "step": 7029 + }, + { + "epoch": 0.4407799862060317, + "grad_norm": 2.9075169563293457, + "learning_rate": 1.2381324344371958e-05, + "loss": 1.1163, + "step": 7030 + }, + { + "epoch": 0.44084268606182203, + "grad_norm": 3.2046213150024414, + "learning_rate": 1.2379351950426188e-05, + "loss": 1.1849, + "step": 7031 + }, + { + "epoch": 0.4409053859176124, + "grad_norm": 2.6607775688171387, + "learning_rate": 1.2377379458355842e-05, + "loss": 1.0314, + "step": 7032 + }, + { + "epoch": 0.44096808577340274, + "grad_norm": 2.834219217300415, + "learning_rate": 1.2375406868242264e-05, + "loss": 0.993, + "step": 7033 + }, + { + "epoch": 0.44103078562919307, + "grad_norm": 3.191732883453369, + "learning_rate": 1.2373434180166798e-05, + "loss": 1.1641, + "step": 7034 + }, + { + "epoch": 0.4410934854849834, + "grad_norm": 2.8584020137786865, + "learning_rate": 1.2371461394210809e-05, + "loss": 1.0697, + "step": 7035 + }, + { + "epoch": 0.4411561853407737, + "grad_norm": 2.781825542449951, + "learning_rate": 1.2369488510455644e-05, + "loss": 1.2282, + "step": 7036 + }, + { + "epoch": 0.44121888519656405, + "grad_norm": 3.1138696670532227, + "learning_rate": 1.2367515528982673e-05, + "loss": 1.0494, + "step": 7037 + }, + { + "epoch": 0.4412815850523544, + "grad_norm": 2.970829725265503, + "learning_rate": 1.2365542449873254e-05, + "loss": 1.162, + "step": 7038 + }, + { + "epoch": 0.4413442849081447, + "grad_norm": 2.8734230995178223, + "learning_rate": 1.2363569273208764e-05, + "loss": 1.1213, + "step": 7039 + }, + { + "epoch": 0.44140698476393503, + "grad_norm": 3.2868776321411133, + "learning_rate": 1.2361595999070572e-05, + "loss": 1.2382, + "step": 7040 + }, + { + "epoch": 0.44146968461972536, + "grad_norm": 3.004395008087158, + "learning_rate": 1.2359622627540059e-05, + "loss": 1.0756, + "step": 7041 + }, + { + "epoch": 0.4415323844755157, + "grad_norm": 2.7986254692077637, + "learning_rate": 1.2357649158698606e-05, + "loss": 1.1441, + "step": 7042 + }, + { + "epoch": 0.44159508433130606, + "grad_norm": 3.1993279457092285, + "learning_rate": 1.2355675592627599e-05, + "loss": 1.2061, + "step": 7043 + }, + { + "epoch": 0.4416577841870964, + "grad_norm": 3.27231764793396, + "learning_rate": 1.2353701929408425e-05, + "loss": 1.2274, + "step": 7044 + }, + { + "epoch": 0.4417204840428867, + "grad_norm": 3.05336594581604, + "learning_rate": 1.2351728169122483e-05, + "loss": 1.2613, + "step": 7045 + }, + { + "epoch": 0.44178318389867705, + "grad_norm": 3.038111686706543, + "learning_rate": 1.234975431185117e-05, + "loss": 0.9579, + "step": 7046 + }, + { + "epoch": 0.4418458837544674, + "grad_norm": 3.3642804622650146, + "learning_rate": 1.2347780357675885e-05, + "loss": 1.092, + "step": 7047 + }, + { + "epoch": 0.4419085836102577, + "grad_norm": 3.0492827892303467, + "learning_rate": 1.2345806306678036e-05, + "loss": 1.1996, + "step": 7048 + }, + { + "epoch": 0.441971283466048, + "grad_norm": 3.086186170578003, + "learning_rate": 1.234383215893903e-05, + "loss": 1.0904, + "step": 7049 + }, + { + "epoch": 0.44203398332183835, + "grad_norm": 3.207707166671753, + "learning_rate": 1.2341857914540288e-05, + "loss": 1.2294, + "step": 7050 + }, + { + "epoch": 0.4420966831776287, + "grad_norm": 3.863809823989868, + "learning_rate": 1.2339883573563219e-05, + "loss": 0.9587, + "step": 7051 + }, + { + "epoch": 0.442159383033419, + "grad_norm": 3.2815375328063965, + "learning_rate": 1.2337909136089252e-05, + "loss": 1.2171, + "step": 7052 + }, + { + "epoch": 0.44222208288920933, + "grad_norm": 2.8708252906799316, + "learning_rate": 1.2335934602199808e-05, + "loss": 1.0747, + "step": 7053 + }, + { + "epoch": 0.44228478274499966, + "grad_norm": 3.0807368755340576, + "learning_rate": 1.2333959971976321e-05, + "loss": 1.2753, + "step": 7054 + }, + { + "epoch": 0.44234748260079004, + "grad_norm": 3.398630142211914, + "learning_rate": 1.2331985245500222e-05, + "loss": 1.1751, + "step": 7055 + }, + { + "epoch": 0.44241018245658037, + "grad_norm": 2.8038933277130127, + "learning_rate": 1.2330010422852947e-05, + "loss": 1.1777, + "step": 7056 + }, + { + "epoch": 0.4424728823123707, + "grad_norm": 3.066483497619629, + "learning_rate": 1.2328035504115945e-05, + "loss": 1.154, + "step": 7057 + }, + { + "epoch": 0.442535582168161, + "grad_norm": 2.861271381378174, + "learning_rate": 1.2326060489370655e-05, + "loss": 1.2243, + "step": 7058 + }, + { + "epoch": 0.44259828202395135, + "grad_norm": 3.0968170166015625, + "learning_rate": 1.2324085378698529e-05, + "loss": 1.0304, + "step": 7059 + }, + { + "epoch": 0.4426609818797417, + "grad_norm": 2.9924569129943848, + "learning_rate": 1.2322110172181021e-05, + "loss": 1.1847, + "step": 7060 + }, + { + "epoch": 0.442723681735532, + "grad_norm": 3.0213711261749268, + "learning_rate": 1.2320134869899586e-05, + "loss": 1.1409, + "step": 7061 + }, + { + "epoch": 0.44278638159132233, + "grad_norm": 2.9298007488250732, + "learning_rate": 1.2318159471935692e-05, + "loss": 1.0943, + "step": 7062 + }, + { + "epoch": 0.44284908144711266, + "grad_norm": 2.8983335494995117, + "learning_rate": 1.2316183978370797e-05, + "loss": 1.1603, + "step": 7063 + }, + { + "epoch": 0.442911781302903, + "grad_norm": 3.1383490562438965, + "learning_rate": 1.2314208389286373e-05, + "loss": 1.0113, + "step": 7064 + }, + { + "epoch": 0.4429744811586933, + "grad_norm": 3.1761791706085205, + "learning_rate": 1.2312232704763894e-05, + "loss": 0.9592, + "step": 7065 + }, + { + "epoch": 0.4430371810144837, + "grad_norm": 2.8488621711730957, + "learning_rate": 1.2310256924884837e-05, + "loss": 1.2699, + "step": 7066 + }, + { + "epoch": 0.443099880870274, + "grad_norm": 3.06779146194458, + "learning_rate": 1.2308281049730687e-05, + "loss": 0.9982, + "step": 7067 + }, + { + "epoch": 0.44316258072606435, + "grad_norm": 2.878199338912964, + "learning_rate": 1.2306305079382923e-05, + "loss": 1.1132, + "step": 7068 + }, + { + "epoch": 0.4432252805818547, + "grad_norm": 3.648299217224121, + "learning_rate": 1.2304329013923036e-05, + "loss": 1.1255, + "step": 7069 + }, + { + "epoch": 0.443287980437645, + "grad_norm": 2.6739442348480225, + "learning_rate": 1.2302352853432521e-05, + "loss": 1.2994, + "step": 7070 + }, + { + "epoch": 0.44335068029343533, + "grad_norm": 2.8629252910614014, + "learning_rate": 1.2300376597992873e-05, + "loss": 1.006, + "step": 7071 + }, + { + "epoch": 0.44341338014922566, + "grad_norm": 3.122330904006958, + "learning_rate": 1.2298400247685596e-05, + "loss": 0.9533, + "step": 7072 + }, + { + "epoch": 0.443476080005016, + "grad_norm": 2.9948179721832275, + "learning_rate": 1.229642380259219e-05, + "loss": 1.0697, + "step": 7073 + }, + { + "epoch": 0.4435387798608063, + "grad_norm": 2.9851009845733643, + "learning_rate": 1.229444726279417e-05, + "loss": 1.0733, + "step": 7074 + }, + { + "epoch": 0.44360147971659664, + "grad_norm": 2.9528958797454834, + "learning_rate": 1.229247062837304e-05, + "loss": 1.1601, + "step": 7075 + }, + { + "epoch": 0.44366417957238696, + "grad_norm": 3.044726848602295, + "learning_rate": 1.229049389941032e-05, + "loss": 1.1519, + "step": 7076 + }, + { + "epoch": 0.4437268794281773, + "grad_norm": 2.8992462158203125, + "learning_rate": 1.2288517075987537e-05, + "loss": 1.0338, + "step": 7077 + }, + { + "epoch": 0.4437895792839677, + "grad_norm": 3.153506278991699, + "learning_rate": 1.2286540158186206e-05, + "loss": 1.3155, + "step": 7078 + }, + { + "epoch": 0.443852279139758, + "grad_norm": 2.9118826389312744, + "learning_rate": 1.2284563146087862e-05, + "loss": 1.0163, + "step": 7079 + }, + { + "epoch": 0.4439149789955483, + "grad_norm": 2.588881254196167, + "learning_rate": 1.2282586039774031e-05, + "loss": 0.9917, + "step": 7080 + }, + { + "epoch": 0.44397767885133865, + "grad_norm": 2.9799859523773193, + "learning_rate": 1.2280608839326253e-05, + "loss": 1.0646, + "step": 7081 + }, + { + "epoch": 0.444040378707129, + "grad_norm": 2.7320961952209473, + "learning_rate": 1.2278631544826067e-05, + "loss": 1.1283, + "step": 7082 + }, + { + "epoch": 0.4441030785629193, + "grad_norm": 2.9619808197021484, + "learning_rate": 1.227665415635502e-05, + "loss": 1.0442, + "step": 7083 + }, + { + "epoch": 0.44416577841870963, + "grad_norm": 3.2338125705718994, + "learning_rate": 1.227467667399465e-05, + "loss": 1.0046, + "step": 7084 + }, + { + "epoch": 0.44422847827449996, + "grad_norm": 3.2175204753875732, + "learning_rate": 1.227269909782652e-05, + "loss": 1.0852, + "step": 7085 + }, + { + "epoch": 0.4442911781302903, + "grad_norm": 2.954831600189209, + "learning_rate": 1.2270721427932175e-05, + "loss": 1.0291, + "step": 7086 + }, + { + "epoch": 0.4443538779860806, + "grad_norm": 2.9718375205993652, + "learning_rate": 1.2268743664393182e-05, + "loss": 1.1713, + "step": 7087 + }, + { + "epoch": 0.44441657784187094, + "grad_norm": 3.1632795333862305, + "learning_rate": 1.2266765807291102e-05, + "loss": 1.046, + "step": 7088 + }, + { + "epoch": 0.44447927769766127, + "grad_norm": 3.04801344871521, + "learning_rate": 1.2264787856707498e-05, + "loss": 1.0979, + "step": 7089 + }, + { + "epoch": 0.44454197755345165, + "grad_norm": 3.2370266914367676, + "learning_rate": 1.2262809812723947e-05, + "loss": 0.9319, + "step": 7090 + }, + { + "epoch": 0.444604677409242, + "grad_norm": 2.8671629428863525, + "learning_rate": 1.226083167542202e-05, + "loss": 1.065, + "step": 7091 + }, + { + "epoch": 0.4446673772650323, + "grad_norm": 3.148754119873047, + "learning_rate": 1.2258853444883297e-05, + "loss": 1.2013, + "step": 7092 + }, + { + "epoch": 0.44473007712082263, + "grad_norm": 3.689638614654541, + "learning_rate": 1.2256875121189355e-05, + "loss": 1.2465, + "step": 7093 + }, + { + "epoch": 0.44479277697661296, + "grad_norm": 2.881479263305664, + "learning_rate": 1.2254896704421789e-05, + "loss": 1.0973, + "step": 7094 + }, + { + "epoch": 0.4448554768324033, + "grad_norm": 3.2643442153930664, + "learning_rate": 1.2252918194662183e-05, + "loss": 0.9837, + "step": 7095 + }, + { + "epoch": 0.4449181766881936, + "grad_norm": 3.253251075744629, + "learning_rate": 1.225093959199213e-05, + "loss": 1.088, + "step": 7096 + }, + { + "epoch": 0.44498087654398394, + "grad_norm": 3.2240962982177734, + "learning_rate": 1.2248960896493234e-05, + "loss": 1.3177, + "step": 7097 + }, + { + "epoch": 0.44504357639977427, + "grad_norm": 3.1249661445617676, + "learning_rate": 1.2246982108247088e-05, + "loss": 1.137, + "step": 7098 + }, + { + "epoch": 0.4451062762555646, + "grad_norm": 2.866651773452759, + "learning_rate": 1.2245003227335305e-05, + "loss": 0.9769, + "step": 7099 + }, + { + "epoch": 0.4451689761113549, + "grad_norm": 3.0114588737487793, + "learning_rate": 1.2243024253839488e-05, + "loss": 1.0921, + "step": 7100 + }, + { + "epoch": 0.4452316759671453, + "grad_norm": 2.6788337230682373, + "learning_rate": 1.2241045187841257e-05, + "loss": 1.1075, + "step": 7101 + }, + { + "epoch": 0.44529437582293563, + "grad_norm": 2.9005072116851807, + "learning_rate": 1.2239066029422221e-05, + "loss": 1.0138, + "step": 7102 + }, + { + "epoch": 0.44535707567872596, + "grad_norm": 3.222548484802246, + "learning_rate": 1.2237086778664007e-05, + "loss": 1.2014, + "step": 7103 + }, + { + "epoch": 0.4454197755345163, + "grad_norm": 2.605255603790283, + "learning_rate": 1.2235107435648232e-05, + "loss": 1.146, + "step": 7104 + }, + { + "epoch": 0.4454824753903066, + "grad_norm": 3.0823183059692383, + "learning_rate": 1.2233128000456534e-05, + "loss": 1.2201, + "step": 7105 + }, + { + "epoch": 0.44554517524609694, + "grad_norm": 2.599464178085327, + "learning_rate": 1.2231148473170538e-05, + "loss": 1.1069, + "step": 7106 + }, + { + "epoch": 0.44560787510188726, + "grad_norm": 3.014047145843506, + "learning_rate": 1.2229168853871878e-05, + "loss": 1.1146, + "step": 7107 + }, + { + "epoch": 0.4456705749576776, + "grad_norm": 2.833242654800415, + "learning_rate": 1.2227189142642202e-05, + "loss": 1.2514, + "step": 7108 + }, + { + "epoch": 0.4457332748134679, + "grad_norm": 2.940075397491455, + "learning_rate": 1.2225209339563144e-05, + "loss": 1.0724, + "step": 7109 + }, + { + "epoch": 0.44579597466925824, + "grad_norm": 2.8261351585388184, + "learning_rate": 1.2223229444716361e-05, + "loss": 1.2098, + "step": 7110 + }, + { + "epoch": 0.44585867452504857, + "grad_norm": 3.110743522644043, + "learning_rate": 1.2221249458183496e-05, + "loss": 1.1252, + "step": 7111 + }, + { + "epoch": 0.4459213743808389, + "grad_norm": 2.7956838607788086, + "learning_rate": 1.2219269380046206e-05, + "loss": 1.1626, + "step": 7112 + }, + { + "epoch": 0.4459840742366293, + "grad_norm": 3.0702428817749023, + "learning_rate": 1.2217289210386149e-05, + "loss": 1.0519, + "step": 7113 + }, + { + "epoch": 0.4460467740924196, + "grad_norm": 3.12563157081604, + "learning_rate": 1.221530894928499e-05, + "loss": 1.1703, + "step": 7114 + }, + { + "epoch": 0.44610947394820993, + "grad_norm": 3.1256871223449707, + "learning_rate": 1.2213328596824392e-05, + "loss": 1.1224, + "step": 7115 + }, + { + "epoch": 0.44617217380400026, + "grad_norm": 2.7161924839019775, + "learning_rate": 1.2211348153086024e-05, + "loss": 1.0178, + "step": 7116 + }, + { + "epoch": 0.4462348736597906, + "grad_norm": 3.107480525970459, + "learning_rate": 1.2209367618151567e-05, + "loss": 1.1124, + "step": 7117 + }, + { + "epoch": 0.4462975735155809, + "grad_norm": 3.030522584915161, + "learning_rate": 1.2207386992102687e-05, + "loss": 1.1991, + "step": 7118 + }, + { + "epoch": 0.44636027337137124, + "grad_norm": 2.846062660217285, + "learning_rate": 1.2205406275021076e-05, + "loss": 1.0451, + "step": 7119 + }, + { + "epoch": 0.44642297322716157, + "grad_norm": 3.098994016647339, + "learning_rate": 1.2203425466988413e-05, + "loss": 1.1619, + "step": 7120 + }, + { + "epoch": 0.4464856730829519, + "grad_norm": 2.9848616123199463, + "learning_rate": 1.2201444568086385e-05, + "loss": 1.1089, + "step": 7121 + }, + { + "epoch": 0.4465483729387422, + "grad_norm": 3.0984485149383545, + "learning_rate": 1.2199463578396688e-05, + "loss": 1.0385, + "step": 7122 + }, + { + "epoch": 0.44661107279453255, + "grad_norm": 3.0774002075195312, + "learning_rate": 1.219748249800102e-05, + "loss": 0.9773, + "step": 7123 + }, + { + "epoch": 0.44667377265032293, + "grad_norm": 2.7920303344726562, + "learning_rate": 1.2195501326981072e-05, + "loss": 1.1563, + "step": 7124 + }, + { + "epoch": 0.44673647250611326, + "grad_norm": 3.1259734630584717, + "learning_rate": 1.2193520065418558e-05, + "loss": 1.2304, + "step": 7125 + }, + { + "epoch": 0.4467991723619036, + "grad_norm": 2.9532554149627686, + "learning_rate": 1.219153871339518e-05, + "loss": 1.0324, + "step": 7126 + }, + { + "epoch": 0.4468618722176939, + "grad_norm": 2.8097949028015137, + "learning_rate": 1.2189557270992647e-05, + "loss": 1.0653, + "step": 7127 + }, + { + "epoch": 0.44692457207348424, + "grad_norm": 3.2011725902557373, + "learning_rate": 1.2187575738292677e-05, + "loss": 1.1804, + "step": 7128 + }, + { + "epoch": 0.44698727192927457, + "grad_norm": 2.571985960006714, + "learning_rate": 1.2185594115376991e-05, + "loss": 1.2076, + "step": 7129 + }, + { + "epoch": 0.4470499717850649, + "grad_norm": 2.978325366973877, + "learning_rate": 1.2183612402327305e-05, + "loss": 1.1556, + "step": 7130 + }, + { + "epoch": 0.4471126716408552, + "grad_norm": 3.0010201930999756, + "learning_rate": 1.218163059922535e-05, + "loss": 0.8356, + "step": 7131 + }, + { + "epoch": 0.44717537149664555, + "grad_norm": 3.052706718444824, + "learning_rate": 1.2179648706152853e-05, + "loss": 1.1669, + "step": 7132 + }, + { + "epoch": 0.4472380713524359, + "grad_norm": 2.806400775909424, + "learning_rate": 1.2177666723191547e-05, + "loss": 1.0532, + "step": 7133 + }, + { + "epoch": 0.4473007712082262, + "grad_norm": 3.106278896331787, + "learning_rate": 1.2175684650423177e-05, + "loss": 0.9785, + "step": 7134 + }, + { + "epoch": 0.4473634710640165, + "grad_norm": 3.0926432609558105, + "learning_rate": 1.2173702487929469e-05, + "loss": 0.9912, + "step": 7135 + }, + { + "epoch": 0.4474261709198069, + "grad_norm": 3.26355242729187, + "learning_rate": 1.2171720235792179e-05, + "loss": 0.9227, + "step": 7136 + }, + { + "epoch": 0.44748887077559724, + "grad_norm": 3.083974838256836, + "learning_rate": 1.216973789409305e-05, + "loss": 1.1314, + "step": 7137 + }, + { + "epoch": 0.44755157063138756, + "grad_norm": 2.8129491806030273, + "learning_rate": 1.2167755462913839e-05, + "loss": 1.1444, + "step": 7138 + }, + { + "epoch": 0.4476142704871779, + "grad_norm": 2.7923173904418945, + "learning_rate": 1.2165772942336297e-05, + "loss": 1.0349, + "step": 7139 + }, + { + "epoch": 0.4476769703429682, + "grad_norm": 2.9749834537506104, + "learning_rate": 1.2163790332442183e-05, + "loss": 1.0067, + "step": 7140 + }, + { + "epoch": 0.44773967019875854, + "grad_norm": 2.9057066440582275, + "learning_rate": 1.2161807633313258e-05, + "loss": 1.2028, + "step": 7141 + }, + { + "epoch": 0.44780237005454887, + "grad_norm": 2.851060152053833, + "learning_rate": 1.2159824845031296e-05, + "loss": 1.1073, + "step": 7142 + }, + { + "epoch": 0.4478650699103392, + "grad_norm": 3.2097082138061523, + "learning_rate": 1.2157841967678064e-05, + "loss": 1.0853, + "step": 7143 + }, + { + "epoch": 0.4479277697661295, + "grad_norm": 3.352391242980957, + "learning_rate": 1.215585900133533e-05, + "loss": 1.0128, + "step": 7144 + }, + { + "epoch": 0.44799046962191985, + "grad_norm": 2.6986262798309326, + "learning_rate": 1.215387594608488e-05, + "loss": 1.1281, + "step": 7145 + }, + { + "epoch": 0.4480531694777102, + "grad_norm": 2.840564727783203, + "learning_rate": 1.2151892802008492e-05, + "loss": 1.1548, + "step": 7146 + }, + { + "epoch": 0.44811586933350056, + "grad_norm": 3.0469183921813965, + "learning_rate": 1.2149909569187951e-05, + "loss": 0.9884, + "step": 7147 + }, + { + "epoch": 0.4481785691892909, + "grad_norm": 3.1516916751861572, + "learning_rate": 1.2147926247705043e-05, + "loss": 1.256, + "step": 7148 + }, + { + "epoch": 0.4482412690450812, + "grad_norm": 2.9112679958343506, + "learning_rate": 1.2145942837641566e-05, + "loss": 1.1092, + "step": 7149 + }, + { + "epoch": 0.44830396890087154, + "grad_norm": 2.9919142723083496, + "learning_rate": 1.2143959339079312e-05, + "loss": 1.2659, + "step": 7150 + }, + { + "epoch": 0.44836666875666187, + "grad_norm": 2.9711380004882812, + "learning_rate": 1.2141975752100081e-05, + "loss": 1.1482, + "step": 7151 + }, + { + "epoch": 0.4484293686124522, + "grad_norm": 3.0757088661193848, + "learning_rate": 1.2139992076785677e-05, + "loss": 1.1945, + "step": 7152 + }, + { + "epoch": 0.4484920684682425, + "grad_norm": 3.3260462284088135, + "learning_rate": 1.2138008313217903e-05, + "loss": 1.1012, + "step": 7153 + }, + { + "epoch": 0.44855476832403285, + "grad_norm": 2.6938276290893555, + "learning_rate": 1.2136024461478577e-05, + "loss": 1.1404, + "step": 7154 + }, + { + "epoch": 0.4486174681798232, + "grad_norm": 2.9354913234710693, + "learning_rate": 1.2134040521649507e-05, + "loss": 1.1624, + "step": 7155 + }, + { + "epoch": 0.4486801680356135, + "grad_norm": 3.2723071575164795, + "learning_rate": 1.2132056493812514e-05, + "loss": 1.1573, + "step": 7156 + }, + { + "epoch": 0.44874286789140383, + "grad_norm": 3.088325023651123, + "learning_rate": 1.2130072378049416e-05, + "loss": 1.036, + "step": 7157 + }, + { + "epoch": 0.44880556774719416, + "grad_norm": 3.0998692512512207, + "learning_rate": 1.2128088174442042e-05, + "loss": 1.0324, + "step": 7158 + }, + { + "epoch": 0.44886826760298454, + "grad_norm": 2.8250186443328857, + "learning_rate": 1.2126103883072216e-05, + "loss": 1.1525, + "step": 7159 + }, + { + "epoch": 0.44893096745877487, + "grad_norm": 2.9119200706481934, + "learning_rate": 1.2124119504021776e-05, + "loss": 1.4922, + "step": 7160 + }, + { + "epoch": 0.4489936673145652, + "grad_norm": 3.38920521736145, + "learning_rate": 1.2122135037372553e-05, + "loss": 1.1957, + "step": 7161 + }, + { + "epoch": 0.4490563671703555, + "grad_norm": 2.9968340396881104, + "learning_rate": 1.2120150483206387e-05, + "loss": 1.2882, + "step": 7162 + }, + { + "epoch": 0.44911906702614585, + "grad_norm": 3.0077474117279053, + "learning_rate": 1.2118165841605127e-05, + "loss": 1.0629, + "step": 7163 + }, + { + "epoch": 0.4491817668819362, + "grad_norm": 2.8476712703704834, + "learning_rate": 1.211618111265061e-05, + "loss": 1.1654, + "step": 7164 + }, + { + "epoch": 0.4492444667377265, + "grad_norm": 3.2809255123138428, + "learning_rate": 1.2114196296424696e-05, + "loss": 1.2363, + "step": 7165 + }, + { + "epoch": 0.4493071665935168, + "grad_norm": 2.914182424545288, + "learning_rate": 1.2112211393009233e-05, + "loss": 0.9827, + "step": 7166 + }, + { + "epoch": 0.44936986644930715, + "grad_norm": 2.8007771968841553, + "learning_rate": 1.2110226402486081e-05, + "loss": 1.0773, + "step": 7167 + }, + { + "epoch": 0.4494325663050975, + "grad_norm": 2.7688961029052734, + "learning_rate": 1.2108241324937096e-05, + "loss": 1.223, + "step": 7168 + }, + { + "epoch": 0.4494952661608878, + "grad_norm": 3.021360397338867, + "learning_rate": 1.210625616044415e-05, + "loss": 0.9076, + "step": 7169 + }, + { + "epoch": 0.44955796601667813, + "grad_norm": 2.851750135421753, + "learning_rate": 1.2104270909089107e-05, + "loss": 1.1957, + "step": 7170 + }, + { + "epoch": 0.4496206658724685, + "grad_norm": 3.1547672748565674, + "learning_rate": 1.2102285570953842e-05, + "loss": 1.0128, + "step": 7171 + }, + { + "epoch": 0.44968336572825884, + "grad_norm": 3.2241437435150146, + "learning_rate": 1.2100300146120226e-05, + "loss": 1.147, + "step": 7172 + }, + { + "epoch": 0.44974606558404917, + "grad_norm": 3.272097587585449, + "learning_rate": 1.2098314634670138e-05, + "loss": 1.0908, + "step": 7173 + }, + { + "epoch": 0.4498087654398395, + "grad_norm": 3.248941659927368, + "learning_rate": 1.2096329036685469e-05, + "loss": 1.2202, + "step": 7174 + }, + { + "epoch": 0.4498714652956298, + "grad_norm": 3.318255662918091, + "learning_rate": 1.2094343352248092e-05, + "loss": 1.0633, + "step": 7175 + }, + { + "epoch": 0.44993416515142015, + "grad_norm": 3.0813047885894775, + "learning_rate": 1.2092357581439909e-05, + "loss": 1.2114, + "step": 7176 + }, + { + "epoch": 0.4499968650072105, + "grad_norm": 2.7994627952575684, + "learning_rate": 1.2090371724342804e-05, + "loss": 1.0999, + "step": 7177 + }, + { + "epoch": 0.4500595648630008, + "grad_norm": 3.009291648864746, + "learning_rate": 1.208838578103868e-05, + "loss": 1.0468, + "step": 7178 + }, + { + "epoch": 0.45012226471879113, + "grad_norm": 3.347111701965332, + "learning_rate": 1.2086399751609435e-05, + "loss": 1.1837, + "step": 7179 + }, + { + "epoch": 0.45018496457458146, + "grad_norm": 3.170196533203125, + "learning_rate": 1.2084413636136976e-05, + "loss": 1.2017, + "step": 7180 + }, + { + "epoch": 0.4502476644303718, + "grad_norm": 2.62088680267334, + "learning_rate": 1.2082427434703204e-05, + "loss": 1.0713, + "step": 7181 + }, + { + "epoch": 0.45031036428616217, + "grad_norm": 3.084193706512451, + "learning_rate": 1.2080441147390032e-05, + "loss": 1.0262, + "step": 7182 + }, + { + "epoch": 0.4503730641419525, + "grad_norm": 3.229626417160034, + "learning_rate": 1.207845477427938e-05, + "loss": 1.1165, + "step": 7183 + }, + { + "epoch": 0.4504357639977428, + "grad_norm": 3.0036418437957764, + "learning_rate": 1.207646831545316e-05, + "loss": 1.2508, + "step": 7184 + }, + { + "epoch": 0.45049846385353315, + "grad_norm": 3.1335561275482178, + "learning_rate": 1.2074481770993298e-05, + "loss": 1.0537, + "step": 7185 + }, + { + "epoch": 0.4505611637093235, + "grad_norm": 2.994086742401123, + "learning_rate": 1.2072495140981715e-05, + "loss": 0.9782, + "step": 7186 + }, + { + "epoch": 0.4506238635651138, + "grad_norm": 3.1842751502990723, + "learning_rate": 1.2070508425500344e-05, + "loss": 1.1996, + "step": 7187 + }, + { + "epoch": 0.45068656342090413, + "grad_norm": 3.287277936935425, + "learning_rate": 1.2068521624631113e-05, + "loss": 1.168, + "step": 7188 + }, + { + "epoch": 0.45074926327669446, + "grad_norm": 3.114487648010254, + "learning_rate": 1.2066534738455961e-05, + "loss": 1.2292, + "step": 7189 + }, + { + "epoch": 0.4508119631324848, + "grad_norm": 3.0085153579711914, + "learning_rate": 1.2064547767056826e-05, + "loss": 1.0481, + "step": 7190 + }, + { + "epoch": 0.4508746629882751, + "grad_norm": 3.015376091003418, + "learning_rate": 1.2062560710515654e-05, + "loss": 1.2464, + "step": 7191 + }, + { + "epoch": 0.45093736284406544, + "grad_norm": 2.9951260089874268, + "learning_rate": 1.2060573568914382e-05, + "loss": 1.113, + "step": 7192 + }, + { + "epoch": 0.45100006269985576, + "grad_norm": 3.366523504257202, + "learning_rate": 1.2058586342334971e-05, + "loss": 1.0844, + "step": 7193 + }, + { + "epoch": 0.45106276255564615, + "grad_norm": 3.1743311882019043, + "learning_rate": 1.2056599030859367e-05, + "loss": 0.999, + "step": 7194 + }, + { + "epoch": 0.4511254624114365, + "grad_norm": 3.137681245803833, + "learning_rate": 1.2054611634569528e-05, + "loss": 1.2537, + "step": 7195 + }, + { + "epoch": 0.4511881622672268, + "grad_norm": 3.5906379222869873, + "learning_rate": 1.2052624153547418e-05, + "loss": 1.0771, + "step": 7196 + }, + { + "epoch": 0.4512508621230171, + "grad_norm": 3.021634340286255, + "learning_rate": 1.2050636587874997e-05, + "loss": 1.1255, + "step": 7197 + }, + { + "epoch": 0.45131356197880745, + "grad_norm": 2.7840893268585205, + "learning_rate": 1.2048648937634236e-05, + "loss": 1.0168, + "step": 7198 + }, + { + "epoch": 0.4513762618345978, + "grad_norm": 3.2322940826416016, + "learning_rate": 1.2046661202907101e-05, + "loss": 1.17, + "step": 7199 + }, + { + "epoch": 0.4514389616903881, + "grad_norm": 3.02255916595459, + "learning_rate": 1.204467338377557e-05, + "loss": 1.1614, + "step": 7200 + }, + { + "epoch": 0.45150166154617843, + "grad_norm": 2.9819371700286865, + "learning_rate": 1.2042685480321618e-05, + "loss": 1.0198, + "step": 7201 + }, + { + "epoch": 0.45156436140196876, + "grad_norm": 3.132899045944214, + "learning_rate": 1.2040697492627228e-05, + "loss": 1.0717, + "step": 7202 + }, + { + "epoch": 0.4516270612577591, + "grad_norm": 3.3405349254608154, + "learning_rate": 1.2038709420774384e-05, + "loss": 1.153, + "step": 7203 + }, + { + "epoch": 0.4516897611135494, + "grad_norm": 2.8424346446990967, + "learning_rate": 1.2036721264845075e-05, + "loss": 1.1667, + "step": 7204 + }, + { + "epoch": 0.4517524609693398, + "grad_norm": 3.030364751815796, + "learning_rate": 1.2034733024921292e-05, + "loss": 1.0834, + "step": 7205 + }, + { + "epoch": 0.4518151608251301, + "grad_norm": 2.9140660762786865, + "learning_rate": 1.2032744701085028e-05, + "loss": 1.1429, + "step": 7206 + }, + { + "epoch": 0.45187786068092045, + "grad_norm": 2.932298421859741, + "learning_rate": 1.203075629341829e-05, + "loss": 0.9727, + "step": 7207 + }, + { + "epoch": 0.4519405605367108, + "grad_norm": 3.213390827178955, + "learning_rate": 1.2028767802003067e-05, + "loss": 1.1505, + "step": 7208 + }, + { + "epoch": 0.4520032603925011, + "grad_norm": 3.0224032402038574, + "learning_rate": 1.2026779226921374e-05, + "loss": 1.0822, + "step": 7209 + }, + { + "epoch": 0.45206596024829143, + "grad_norm": 3.077122211456299, + "learning_rate": 1.2024790568255215e-05, + "loss": 1.1494, + "step": 7210 + }, + { + "epoch": 0.45212866010408176, + "grad_norm": 2.7951927185058594, + "learning_rate": 1.2022801826086609e-05, + "loss": 1.2907, + "step": 7211 + }, + { + "epoch": 0.4521913599598721, + "grad_norm": 3.006225824356079, + "learning_rate": 1.2020813000497565e-05, + "loss": 1.2017, + "step": 7212 + }, + { + "epoch": 0.4522540598156624, + "grad_norm": 2.8260579109191895, + "learning_rate": 1.2018824091570103e-05, + "loss": 1.0531, + "step": 7213 + }, + { + "epoch": 0.45231675967145274, + "grad_norm": 2.9181103706359863, + "learning_rate": 1.201683509938625e-05, + "loss": 1.0342, + "step": 7214 + }, + { + "epoch": 0.45237945952724307, + "grad_norm": 2.9833931922912598, + "learning_rate": 1.2014846024028026e-05, + "loss": 1.0077, + "step": 7215 + }, + { + "epoch": 0.4524421593830334, + "grad_norm": 3.0777037143707275, + "learning_rate": 1.201285686557747e-05, + "loss": 1.1249, + "step": 7216 + }, + { + "epoch": 0.4525048592388238, + "grad_norm": 2.838292121887207, + "learning_rate": 1.2010867624116602e-05, + "loss": 1.1193, + "step": 7217 + }, + { + "epoch": 0.4525675590946141, + "grad_norm": 2.930575132369995, + "learning_rate": 1.2008878299727471e-05, + "loss": 1.3261, + "step": 7218 + }, + { + "epoch": 0.45263025895040443, + "grad_norm": 3.119234085083008, + "learning_rate": 1.2006888892492108e-05, + "loss": 1.2095, + "step": 7219 + }, + { + "epoch": 0.45269295880619476, + "grad_norm": 3.1215531826019287, + "learning_rate": 1.200489940249256e-05, + "loss": 0.9161, + "step": 7220 + }, + { + "epoch": 0.4527556586619851, + "grad_norm": 3.004091739654541, + "learning_rate": 1.2002909829810873e-05, + "loss": 1.3096, + "step": 7221 + }, + { + "epoch": 0.4528183585177754, + "grad_norm": 2.9303765296936035, + "learning_rate": 1.2000920174529098e-05, + "loss": 1.2056, + "step": 7222 + }, + { + "epoch": 0.45288105837356574, + "grad_norm": 3.008742094039917, + "learning_rate": 1.1998930436729286e-05, + "loss": 1.1773, + "step": 7223 + }, + { + "epoch": 0.45294375822935606, + "grad_norm": 3.113553047180176, + "learning_rate": 1.1996940616493496e-05, + "loss": 1.0866, + "step": 7224 + }, + { + "epoch": 0.4530064580851464, + "grad_norm": 3.130079746246338, + "learning_rate": 1.199495071390379e-05, + "loss": 1.1436, + "step": 7225 + }, + { + "epoch": 0.4530691579409367, + "grad_norm": 3.1958770751953125, + "learning_rate": 1.1992960729042227e-05, + "loss": 1.1212, + "step": 7226 + }, + { + "epoch": 0.45313185779672704, + "grad_norm": 2.9618449211120605, + "learning_rate": 1.1990970661990877e-05, + "loss": 1.1861, + "step": 7227 + }, + { + "epoch": 0.4531945576525174, + "grad_norm": 3.213200569152832, + "learning_rate": 1.1988980512831809e-05, + "loss": 1.1538, + "step": 7228 + }, + { + "epoch": 0.45325725750830775, + "grad_norm": 3.43316650390625, + "learning_rate": 1.1986990281647101e-05, + "loss": 1.2068, + "step": 7229 + }, + { + "epoch": 0.4533199573640981, + "grad_norm": 3.258601665496826, + "learning_rate": 1.1984999968518824e-05, + "loss": 1.1316, + "step": 7230 + }, + { + "epoch": 0.4533826572198884, + "grad_norm": 3.282400608062744, + "learning_rate": 1.1983009573529063e-05, + "loss": 1.1402, + "step": 7231 + }, + { + "epoch": 0.45344535707567873, + "grad_norm": 3.156764507293701, + "learning_rate": 1.19810190967599e-05, + "loss": 1.0923, + "step": 7232 + }, + { + "epoch": 0.45350805693146906, + "grad_norm": 3.194004774093628, + "learning_rate": 1.1979028538293424e-05, + "loss": 1.0212, + "step": 7233 + }, + { + "epoch": 0.4535707567872594, + "grad_norm": 2.877145290374756, + "learning_rate": 1.1977037898211723e-05, + "loss": 1.0927, + "step": 7234 + }, + { + "epoch": 0.4536334566430497, + "grad_norm": 3.0074944496154785, + "learning_rate": 1.1975047176596893e-05, + "loss": 1.0313, + "step": 7235 + }, + { + "epoch": 0.45369615649884004, + "grad_norm": 3.039886236190796, + "learning_rate": 1.1973056373531034e-05, + "loss": 1.1116, + "step": 7236 + }, + { + "epoch": 0.45375885635463037, + "grad_norm": 3.1270904541015625, + "learning_rate": 1.1971065489096238e-05, + "loss": 1.1331, + "step": 7237 + }, + { + "epoch": 0.4538215562104207, + "grad_norm": 3.0815012454986572, + "learning_rate": 1.1969074523374622e-05, + "loss": 1.1039, + "step": 7238 + }, + { + "epoch": 0.453884256066211, + "grad_norm": 2.8676750659942627, + "learning_rate": 1.1967083476448282e-05, + "loss": 1.1791, + "step": 7239 + }, + { + "epoch": 0.4539469559220014, + "grad_norm": 3.1869640350341797, + "learning_rate": 1.1965092348399337e-05, + "loss": 1.2976, + "step": 7240 + }, + { + "epoch": 0.45400965577779173, + "grad_norm": 3.12158203125, + "learning_rate": 1.1963101139309894e-05, + "loss": 1.0507, + "step": 7241 + }, + { + "epoch": 0.45407235563358206, + "grad_norm": 2.806619167327881, + "learning_rate": 1.1961109849262078e-05, + "loss": 1.1677, + "step": 7242 + }, + { + "epoch": 0.4541350554893724, + "grad_norm": 3.0210447311401367, + "learning_rate": 1.1959118478338002e-05, + "loss": 0.9237, + "step": 7243 + }, + { + "epoch": 0.4541977553451627, + "grad_norm": 2.884495973587036, + "learning_rate": 1.1957127026619798e-05, + "loss": 1.1879, + "step": 7244 + }, + { + "epoch": 0.45426045520095304, + "grad_norm": 3.1665639877319336, + "learning_rate": 1.195513549418959e-05, + "loss": 1.0983, + "step": 7245 + }, + { + "epoch": 0.45432315505674337, + "grad_norm": 3.2777421474456787, + "learning_rate": 1.1953143881129505e-05, + "loss": 1.2024, + "step": 7246 + }, + { + "epoch": 0.4543858549125337, + "grad_norm": 3.274280309677124, + "learning_rate": 1.1951152187521684e-05, + "loss": 1.0924, + "step": 7247 + }, + { + "epoch": 0.454448554768324, + "grad_norm": 3.206353187561035, + "learning_rate": 1.194916041344826e-05, + "loss": 1.1324, + "step": 7248 + }, + { + "epoch": 0.45451125462411435, + "grad_norm": 3.285449266433716, + "learning_rate": 1.1947168558991378e-05, + "loss": 1.2029, + "step": 7249 + }, + { + "epoch": 0.4545739544799047, + "grad_norm": 2.9377503395080566, + "learning_rate": 1.1945176624233177e-05, + "loss": 1.2561, + "step": 7250 + }, + { + "epoch": 0.454636654335695, + "grad_norm": 3.359492301940918, + "learning_rate": 1.1943184609255809e-05, + "loss": 1.1156, + "step": 7251 + }, + { + "epoch": 0.4546993541914854, + "grad_norm": 2.786999225616455, + "learning_rate": 1.1941192514141421e-05, + "loss": 1.2916, + "step": 7252 + }, + { + "epoch": 0.4547620540472757, + "grad_norm": 3.19631290435791, + "learning_rate": 1.1939200338972173e-05, + "loss": 1.1589, + "step": 7253 + }, + { + "epoch": 0.45482475390306604, + "grad_norm": 3.273226022720337, + "learning_rate": 1.1937208083830214e-05, + "loss": 0.9625, + "step": 7254 + }, + { + "epoch": 0.45488745375885636, + "grad_norm": 2.822554111480713, + "learning_rate": 1.1935215748797708e-05, + "loss": 1.2059, + "step": 7255 + }, + { + "epoch": 0.4549501536146467, + "grad_norm": 2.919144868850708, + "learning_rate": 1.1933223333956825e-05, + "loss": 1.1859, + "step": 7256 + }, + { + "epoch": 0.455012853470437, + "grad_norm": 2.8471574783325195, + "learning_rate": 1.1931230839389722e-05, + "loss": 1.0171, + "step": 7257 + }, + { + "epoch": 0.45507555332622734, + "grad_norm": 3.0113894939422607, + "learning_rate": 1.192923826517858e-05, + "loss": 1.2224, + "step": 7258 + }, + { + "epoch": 0.45513825318201767, + "grad_norm": 2.806206226348877, + "learning_rate": 1.1927245611405564e-05, + "loss": 1.1151, + "step": 7259 + }, + { + "epoch": 0.455200953037808, + "grad_norm": 3.3895981311798096, + "learning_rate": 1.1925252878152858e-05, + "loss": 1.1092, + "step": 7260 + }, + { + "epoch": 0.4552636528935983, + "grad_norm": 3.0285074710845947, + "learning_rate": 1.1923260065502635e-05, + "loss": 1.0622, + "step": 7261 + }, + { + "epoch": 0.45532635274938865, + "grad_norm": 3.1256301403045654, + "learning_rate": 1.1921267173537085e-05, + "loss": 1.1775, + "step": 7262 + }, + { + "epoch": 0.45538905260517903, + "grad_norm": 2.6062750816345215, + "learning_rate": 1.1919274202338394e-05, + "loss": 1.1467, + "step": 7263 + }, + { + "epoch": 0.45545175246096936, + "grad_norm": 2.932913303375244, + "learning_rate": 1.191728115198875e-05, + "loss": 0.9471, + "step": 7264 + }, + { + "epoch": 0.4555144523167597, + "grad_norm": 2.9077377319335938, + "learning_rate": 1.1915288022570346e-05, + "loss": 1.0915, + "step": 7265 + }, + { + "epoch": 0.45557715217255, + "grad_norm": 3.4882302284240723, + "learning_rate": 1.1913294814165382e-05, + "loss": 0.9877, + "step": 7266 + }, + { + "epoch": 0.45563985202834034, + "grad_norm": 2.90403151512146, + "learning_rate": 1.1911301526856059e-05, + "loss": 1.2982, + "step": 7267 + }, + { + "epoch": 0.45570255188413067, + "grad_norm": 2.987227201461792, + "learning_rate": 1.1909308160724573e-05, + "loss": 1.065, + "step": 7268 + }, + { + "epoch": 0.455765251739921, + "grad_norm": 3.0102903842926025, + "learning_rate": 1.1907314715853138e-05, + "loss": 1.2738, + "step": 7269 + }, + { + "epoch": 0.4558279515957113, + "grad_norm": 2.9666974544525146, + "learning_rate": 1.1905321192323957e-05, + "loss": 1.1268, + "step": 7270 + }, + { + "epoch": 0.45589065145150165, + "grad_norm": 2.934213399887085, + "learning_rate": 1.1903327590219251e-05, + "loss": 1.0355, + "step": 7271 + }, + { + "epoch": 0.455953351307292, + "grad_norm": 2.9951515197753906, + "learning_rate": 1.1901333909621231e-05, + "loss": 1.0226, + "step": 7272 + }, + { + "epoch": 0.4560160511630823, + "grad_norm": 2.996225118637085, + "learning_rate": 1.1899340150612117e-05, + "loss": 1.1649, + "step": 7273 + }, + { + "epoch": 0.45607875101887263, + "grad_norm": 3.2222394943237305, + "learning_rate": 1.1897346313274132e-05, + "loss": 1.0865, + "step": 7274 + }, + { + "epoch": 0.456141450874663, + "grad_norm": 3.274594306945801, + "learning_rate": 1.18953523976895e-05, + "loss": 1.0459, + "step": 7275 + }, + { + "epoch": 0.45620415073045334, + "grad_norm": 2.9514336585998535, + "learning_rate": 1.1893358403940458e-05, + "loss": 0.9452, + "step": 7276 + }, + { + "epoch": 0.45626685058624367, + "grad_norm": 2.771493434906006, + "learning_rate": 1.1891364332109227e-05, + "loss": 1.246, + "step": 7277 + }, + { + "epoch": 0.456329550442034, + "grad_norm": 3.380284309387207, + "learning_rate": 1.1889370182278053e-05, + "loss": 1.0948, + "step": 7278 + }, + { + "epoch": 0.4563922502978243, + "grad_norm": 2.9617416858673096, + "learning_rate": 1.1887375954529167e-05, + "loss": 1.2307, + "step": 7279 + }, + { + "epoch": 0.45645495015361465, + "grad_norm": 2.884917974472046, + "learning_rate": 1.1885381648944819e-05, + "loss": 1.0964, + "step": 7280 + }, + { + "epoch": 0.456517650009405, + "grad_norm": 2.82300066947937, + "learning_rate": 1.1883387265607245e-05, + "loss": 1.0203, + "step": 7281 + }, + { + "epoch": 0.4565803498651953, + "grad_norm": 3.054462432861328, + "learning_rate": 1.18813928045987e-05, + "loss": 0.8853, + "step": 7282 + }, + { + "epoch": 0.4566430497209856, + "grad_norm": 3.204298257827759, + "learning_rate": 1.187939826600143e-05, + "loss": 1.0886, + "step": 7283 + }, + { + "epoch": 0.45670574957677595, + "grad_norm": 2.9746603965759277, + "learning_rate": 1.1877403649897697e-05, + "loss": 1.303, + "step": 7284 + }, + { + "epoch": 0.4567684494325663, + "grad_norm": 2.9030067920684814, + "learning_rate": 1.1875408956369754e-05, + "loss": 1.128, + "step": 7285 + }, + { + "epoch": 0.45683114928835666, + "grad_norm": 3.007831573486328, + "learning_rate": 1.1873414185499864e-05, + "loss": 1.2451, + "step": 7286 + }, + { + "epoch": 0.456893849144147, + "grad_norm": 2.8859829902648926, + "learning_rate": 1.1871419337370292e-05, + "loss": 1.4048, + "step": 7287 + }, + { + "epoch": 0.4569565489999373, + "grad_norm": 3.3088576793670654, + "learning_rate": 1.1869424412063302e-05, + "loss": 1.1333, + "step": 7288 + }, + { + "epoch": 0.45701924885572764, + "grad_norm": 3.3496317863464355, + "learning_rate": 1.1867429409661171e-05, + "loss": 1.0263, + "step": 7289 + }, + { + "epoch": 0.45708194871151797, + "grad_norm": 3.019793748855591, + "learning_rate": 1.1865434330246165e-05, + "loss": 1.2555, + "step": 7290 + }, + { + "epoch": 0.4571446485673083, + "grad_norm": 2.99212908744812, + "learning_rate": 1.1863439173900573e-05, + "loss": 1.2672, + "step": 7291 + }, + { + "epoch": 0.4572073484230986, + "grad_norm": 3.1242735385894775, + "learning_rate": 1.186144394070666e-05, + "loss": 1.165, + "step": 7292 + }, + { + "epoch": 0.45727004827888895, + "grad_norm": 3.221926689147949, + "learning_rate": 1.1859448630746723e-05, + "loss": 0.9431, + "step": 7293 + }, + { + "epoch": 0.4573327481346793, + "grad_norm": 2.691331624984741, + "learning_rate": 1.1857453244103043e-05, + "loss": 1.0984, + "step": 7294 + }, + { + "epoch": 0.4573954479904696, + "grad_norm": 2.720358371734619, + "learning_rate": 1.1855457780857908e-05, + "loss": 1.0169, + "step": 7295 + }, + { + "epoch": 0.45745814784625993, + "grad_norm": 2.924710273742676, + "learning_rate": 1.1853462241093614e-05, + "loss": 1.1038, + "step": 7296 + }, + { + "epoch": 0.45752084770205026, + "grad_norm": 2.9612245559692383, + "learning_rate": 1.1851466624892455e-05, + "loss": 1.2165, + "step": 7297 + }, + { + "epoch": 0.45758354755784064, + "grad_norm": 3.0186662673950195, + "learning_rate": 1.1849470932336734e-05, + "loss": 1.1799, + "step": 7298 + }, + { + "epoch": 0.45764624741363097, + "grad_norm": 3.0460784435272217, + "learning_rate": 1.1847475163508751e-05, + "loss": 1.0411, + "step": 7299 + }, + { + "epoch": 0.4577089472694213, + "grad_norm": 2.8766696453094482, + "learning_rate": 1.1845479318490812e-05, + "loss": 1.1708, + "step": 7300 + }, + { + "epoch": 0.4577716471252116, + "grad_norm": 3.0288748741149902, + "learning_rate": 1.184348339736522e-05, + "loss": 1.0666, + "step": 7301 + }, + { + "epoch": 0.45783434698100195, + "grad_norm": 2.9156336784362793, + "learning_rate": 1.1841487400214297e-05, + "loss": 1.14, + "step": 7302 + }, + { + "epoch": 0.4578970468367923, + "grad_norm": 2.861713409423828, + "learning_rate": 1.1839491327120354e-05, + "loss": 1.0232, + "step": 7303 + }, + { + "epoch": 0.4579597466925826, + "grad_norm": 3.2268290519714355, + "learning_rate": 1.1837495178165706e-05, + "loss": 1.0105, + "step": 7304 + }, + { + "epoch": 0.45802244654837293, + "grad_norm": 3.200990676879883, + "learning_rate": 1.1835498953432676e-05, + "loss": 1.1868, + "step": 7305 + }, + { + "epoch": 0.45808514640416326, + "grad_norm": 3.270707368850708, + "learning_rate": 1.183350265300359e-05, + "loss": 1.0634, + "step": 7306 + }, + { + "epoch": 0.4581478462599536, + "grad_norm": 3.399552822113037, + "learning_rate": 1.1831506276960776e-05, + "loss": 1.164, + "step": 7307 + }, + { + "epoch": 0.4582105461157439, + "grad_norm": 3.109138011932373, + "learning_rate": 1.182950982538656e-05, + "loss": 0.9997, + "step": 7308 + }, + { + "epoch": 0.4582732459715343, + "grad_norm": 3.23307466506958, + "learning_rate": 1.1827513298363282e-05, + "loss": 1.113, + "step": 7309 + }, + { + "epoch": 0.4583359458273246, + "grad_norm": 3.219209909439087, + "learning_rate": 1.1825516695973274e-05, + "loss": 0.9502, + "step": 7310 + }, + { + "epoch": 0.45839864568311495, + "grad_norm": 3.395986795425415, + "learning_rate": 1.1823520018298877e-05, + "loss": 0.9696, + "step": 7311 + }, + { + "epoch": 0.4584613455389053, + "grad_norm": 3.4799160957336426, + "learning_rate": 1.1821523265422436e-05, + "loss": 1.0673, + "step": 7312 + }, + { + "epoch": 0.4585240453946956, + "grad_norm": 3.0527689456939697, + "learning_rate": 1.1819526437426298e-05, + "loss": 1.2049, + "step": 7313 + }, + { + "epoch": 0.4585867452504859, + "grad_norm": 3.252437114715576, + "learning_rate": 1.1817529534392806e-05, + "loss": 1.0967, + "step": 7314 + }, + { + "epoch": 0.45864944510627625, + "grad_norm": 3.0166406631469727, + "learning_rate": 1.1815532556404322e-05, + "loss": 1.0809, + "step": 7315 + }, + { + "epoch": 0.4587121449620666, + "grad_norm": 2.7793703079223633, + "learning_rate": 1.1813535503543191e-05, + "loss": 1.1156, + "step": 7316 + }, + { + "epoch": 0.4587748448178569, + "grad_norm": 3.4730398654937744, + "learning_rate": 1.1811538375891778e-05, + "loss": 1.3029, + "step": 7317 + }, + { + "epoch": 0.45883754467364724, + "grad_norm": 2.855268955230713, + "learning_rate": 1.1809541173532444e-05, + "loss": 1.1658, + "step": 7318 + }, + { + "epoch": 0.45890024452943756, + "grad_norm": 3.1144490242004395, + "learning_rate": 1.1807543896547553e-05, + "loss": 1.1792, + "step": 7319 + }, + { + "epoch": 0.4589629443852279, + "grad_norm": 2.871291399002075, + "learning_rate": 1.1805546545019472e-05, + "loss": 1.1299, + "step": 7320 + }, + { + "epoch": 0.45902564424101827, + "grad_norm": 3.0603039264678955, + "learning_rate": 1.1803549119030573e-05, + "loss": 0.9108, + "step": 7321 + }, + { + "epoch": 0.4590883440968086, + "grad_norm": 3.0486772060394287, + "learning_rate": 1.1801551618663232e-05, + "loss": 0.9751, + "step": 7322 + }, + { + "epoch": 0.4591510439525989, + "grad_norm": 3.1642751693725586, + "learning_rate": 1.179955404399982e-05, + "loss": 1.1687, + "step": 7323 + }, + { + "epoch": 0.45921374380838925, + "grad_norm": 2.978916883468628, + "learning_rate": 1.1797556395122723e-05, + "loss": 1.2065, + "step": 7324 + }, + { + "epoch": 0.4592764436641796, + "grad_norm": 2.8708252906799316, + "learning_rate": 1.1795558672114321e-05, + "loss": 1.2572, + "step": 7325 + }, + { + "epoch": 0.4593391435199699, + "grad_norm": 2.9674670696258545, + "learning_rate": 1.1793560875056999e-05, + "loss": 1.1129, + "step": 7326 + }, + { + "epoch": 0.45940184337576023, + "grad_norm": 3.1618881225585938, + "learning_rate": 1.179156300403315e-05, + "loss": 1.0736, + "step": 7327 + }, + { + "epoch": 0.45946454323155056, + "grad_norm": 2.928036689758301, + "learning_rate": 1.1789565059125164e-05, + "loss": 1.1763, + "step": 7328 + }, + { + "epoch": 0.4595272430873409, + "grad_norm": 3.164504051208496, + "learning_rate": 1.1787567040415437e-05, + "loss": 1.2801, + "step": 7329 + }, + { + "epoch": 0.4595899429431312, + "grad_norm": 2.9933414459228516, + "learning_rate": 1.1785568947986368e-05, + "loss": 1.0222, + "step": 7330 + }, + { + "epoch": 0.45965264279892154, + "grad_norm": 2.7295000553131104, + "learning_rate": 1.1783570781920357e-05, + "loss": 1.0635, + "step": 7331 + }, + { + "epoch": 0.45971534265471187, + "grad_norm": 2.9934287071228027, + "learning_rate": 1.1781572542299808e-05, + "loss": 1.2643, + "step": 7332 + }, + { + "epoch": 0.45977804251050225, + "grad_norm": 3.379851818084717, + "learning_rate": 1.1779574229207133e-05, + "loss": 1.067, + "step": 7333 + }, + { + "epoch": 0.4598407423662926, + "grad_norm": 2.8872501850128174, + "learning_rate": 1.1777575842724735e-05, + "loss": 1.1072, + "step": 7334 + }, + { + "epoch": 0.4599034422220829, + "grad_norm": 2.743936538696289, + "learning_rate": 1.1775577382935035e-05, + "loss": 1.0763, + "step": 7335 + }, + { + "epoch": 0.45996614207787323, + "grad_norm": 2.966317892074585, + "learning_rate": 1.1773578849920444e-05, + "loss": 1.3097, + "step": 7336 + }, + { + "epoch": 0.46002884193366356, + "grad_norm": 3.3343796730041504, + "learning_rate": 1.1771580243763383e-05, + "loss": 1.2288, + "step": 7337 + }, + { + "epoch": 0.4600915417894539, + "grad_norm": 2.9508543014526367, + "learning_rate": 1.1769581564546278e-05, + "loss": 1.1801, + "step": 7338 + }, + { + "epoch": 0.4601542416452442, + "grad_norm": 2.998466730117798, + "learning_rate": 1.176758281235155e-05, + "loss": 1.0847, + "step": 7339 + }, + { + "epoch": 0.46021694150103454, + "grad_norm": 2.9620325565338135, + "learning_rate": 1.176558398726163e-05, + "loss": 1.1956, + "step": 7340 + }, + { + "epoch": 0.46027964135682486, + "grad_norm": 3.2324910163879395, + "learning_rate": 1.1763585089358948e-05, + "loss": 1.3066, + "step": 7341 + }, + { + "epoch": 0.4603423412126152, + "grad_norm": 2.7347612380981445, + "learning_rate": 1.1761586118725945e-05, + "loss": 1.1612, + "step": 7342 + }, + { + "epoch": 0.4604050410684055, + "grad_norm": 3.0871267318725586, + "learning_rate": 1.175958707544505e-05, + "loss": 1.0361, + "step": 7343 + }, + { + "epoch": 0.4604677409241959, + "grad_norm": 3.102900505065918, + "learning_rate": 1.1757587959598707e-05, + "loss": 1.1463, + "step": 7344 + }, + { + "epoch": 0.4605304407799862, + "grad_norm": 3.082259178161621, + "learning_rate": 1.1755588771269359e-05, + "loss": 1.1315, + "step": 7345 + }, + { + "epoch": 0.46059314063577655, + "grad_norm": 2.9450907707214355, + "learning_rate": 1.1753589510539453e-05, + "loss": 1.0614, + "step": 7346 + }, + { + "epoch": 0.4606558404915669, + "grad_norm": 3.0468733310699463, + "learning_rate": 1.1751590177491441e-05, + "loss": 1.3586, + "step": 7347 + }, + { + "epoch": 0.4607185403473572, + "grad_norm": 2.8478753566741943, + "learning_rate": 1.1749590772207772e-05, + "loss": 1.1269, + "step": 7348 + }, + { + "epoch": 0.46078124020314754, + "grad_norm": 3.0186755657196045, + "learning_rate": 1.1747591294770906e-05, + "loss": 1.1086, + "step": 7349 + }, + { + "epoch": 0.46084394005893786, + "grad_norm": 2.899491786956787, + "learning_rate": 1.1745591745263296e-05, + "loss": 1.1313, + "step": 7350 + }, + { + "epoch": 0.4609066399147282, + "grad_norm": 3.3037116527557373, + "learning_rate": 1.1743592123767406e-05, + "loss": 1.0438, + "step": 7351 + }, + { + "epoch": 0.4609693397705185, + "grad_norm": 2.7021148204803467, + "learning_rate": 1.1741592430365703e-05, + "loss": 1.226, + "step": 7352 + }, + { + "epoch": 0.46103203962630884, + "grad_norm": 3.1809582710266113, + "learning_rate": 1.1739592665140652e-05, + "loss": 1.1037, + "step": 7353 + }, + { + "epoch": 0.46109473948209917, + "grad_norm": 3.2549588680267334, + "learning_rate": 1.173759282817472e-05, + "loss": 1.2068, + "step": 7354 + }, + { + "epoch": 0.4611574393378895, + "grad_norm": 2.9594290256500244, + "learning_rate": 1.1735592919550389e-05, + "loss": 1.117, + "step": 7355 + }, + { + "epoch": 0.4612201391936799, + "grad_norm": 2.7916886806488037, + "learning_rate": 1.1733592939350125e-05, + "loss": 1.0325, + "step": 7356 + }, + { + "epoch": 0.4612828390494702, + "grad_norm": 4.021143436431885, + "learning_rate": 1.1731592887656413e-05, + "loss": 1.0497, + "step": 7357 + }, + { + "epoch": 0.46134553890526053, + "grad_norm": 2.9858410358428955, + "learning_rate": 1.1729592764551739e-05, + "loss": 1.0974, + "step": 7358 + }, + { + "epoch": 0.46140823876105086, + "grad_norm": 2.706531524658203, + "learning_rate": 1.172759257011858e-05, + "loss": 1.158, + "step": 7359 + }, + { + "epoch": 0.4614709386168412, + "grad_norm": 3.3967506885528564, + "learning_rate": 1.172559230443943e-05, + "loss": 1.1245, + "step": 7360 + }, + { + "epoch": 0.4615336384726315, + "grad_norm": 3.049349308013916, + "learning_rate": 1.1723591967596777e-05, + "loss": 1.1424, + "step": 7361 + }, + { + "epoch": 0.46159633832842184, + "grad_norm": 3.231443405151367, + "learning_rate": 1.1721591559673119e-05, + "loss": 1.0171, + "step": 7362 + }, + { + "epoch": 0.46165903818421217, + "grad_norm": 3.142348527908325, + "learning_rate": 1.1719591080750945e-05, + "loss": 1.111, + "step": 7363 + }, + { + "epoch": 0.4617217380400025, + "grad_norm": 3.156829833984375, + "learning_rate": 1.1717590530912764e-05, + "loss": 1.2868, + "step": 7364 + }, + { + "epoch": 0.4617844378957928, + "grad_norm": 2.85111927986145, + "learning_rate": 1.1715589910241072e-05, + "loss": 1.2578, + "step": 7365 + }, + { + "epoch": 0.46184713775158315, + "grad_norm": 3.287140130996704, + "learning_rate": 1.1713589218818377e-05, + "loss": 1.1276, + "step": 7366 + }, + { + "epoch": 0.46190983760737353, + "grad_norm": 3.2127504348754883, + "learning_rate": 1.1711588456727187e-05, + "loss": 1.0296, + "step": 7367 + }, + { + "epoch": 0.46197253746316386, + "grad_norm": 2.7980027198791504, + "learning_rate": 1.1709587624050016e-05, + "loss": 1.0296, + "step": 7368 + }, + { + "epoch": 0.4620352373189542, + "grad_norm": 2.8615329265594482, + "learning_rate": 1.1707586720869375e-05, + "loss": 1.2165, + "step": 7369 + }, + { + "epoch": 0.4620979371747445, + "grad_norm": 3.0135765075683594, + "learning_rate": 1.1705585747267786e-05, + "loss": 1.1584, + "step": 7370 + }, + { + "epoch": 0.46216063703053484, + "grad_norm": 2.864360809326172, + "learning_rate": 1.1703584703327764e-05, + "loss": 1.1056, + "step": 7371 + }, + { + "epoch": 0.46222333688632516, + "grad_norm": 2.9842543601989746, + "learning_rate": 1.1701583589131835e-05, + "loss": 1.1586, + "step": 7372 + }, + { + "epoch": 0.4622860367421155, + "grad_norm": 3.1282904148101807, + "learning_rate": 1.1699582404762527e-05, + "loss": 1.1485, + "step": 7373 + }, + { + "epoch": 0.4623487365979058, + "grad_norm": 2.8522937297821045, + "learning_rate": 1.1697581150302362e-05, + "loss": 1.2358, + "step": 7374 + }, + { + "epoch": 0.46241143645369615, + "grad_norm": 2.939047336578369, + "learning_rate": 1.1695579825833879e-05, + "loss": 1.0727, + "step": 7375 + }, + { + "epoch": 0.46247413630948647, + "grad_norm": 2.7773404121398926, + "learning_rate": 1.1693578431439609e-05, + "loss": 1.0224, + "step": 7376 + }, + { + "epoch": 0.4625368361652768, + "grad_norm": 2.673719882965088, + "learning_rate": 1.1691576967202092e-05, + "loss": 1.1622, + "step": 7377 + }, + { + "epoch": 0.4625995360210671, + "grad_norm": 3.011716604232788, + "learning_rate": 1.1689575433203863e-05, + "loss": 1.0497, + "step": 7378 + }, + { + "epoch": 0.4626622358768575, + "grad_norm": 3.318747043609619, + "learning_rate": 1.1687573829527474e-05, + "loss": 1.016, + "step": 7379 + }, + { + "epoch": 0.46272493573264784, + "grad_norm": 2.9826035499572754, + "learning_rate": 1.1685572156255465e-05, + "loss": 1.1756, + "step": 7380 + }, + { + "epoch": 0.46278763558843816, + "grad_norm": 2.7788970470428467, + "learning_rate": 1.1683570413470384e-05, + "loss": 1.2336, + "step": 7381 + }, + { + "epoch": 0.4628503354442285, + "grad_norm": 2.584028959274292, + "learning_rate": 1.1681568601254792e-05, + "loss": 1.115, + "step": 7382 + }, + { + "epoch": 0.4629130353000188, + "grad_norm": 3.030170440673828, + "learning_rate": 1.1679566719691231e-05, + "loss": 0.9194, + "step": 7383 + }, + { + "epoch": 0.46297573515580914, + "grad_norm": 2.936182975769043, + "learning_rate": 1.1677564768862268e-05, + "loss": 1.1303, + "step": 7384 + }, + { + "epoch": 0.46303843501159947, + "grad_norm": 2.932774305343628, + "learning_rate": 1.167556274885046e-05, + "loss": 1.2497, + "step": 7385 + }, + { + "epoch": 0.4631011348673898, + "grad_norm": 3.3421733379364014, + "learning_rate": 1.1673560659738372e-05, + "loss": 1.0557, + "step": 7386 + }, + { + "epoch": 0.4631638347231801, + "grad_norm": 3.18338680267334, + "learning_rate": 1.1671558501608568e-05, + "loss": 1.1344, + "step": 7387 + }, + { + "epoch": 0.46322653457897045, + "grad_norm": 2.6084299087524414, + "learning_rate": 1.1669556274543621e-05, + "loss": 1.2453, + "step": 7388 + }, + { + "epoch": 0.4632892344347608, + "grad_norm": 2.9496865272521973, + "learning_rate": 1.1667553978626098e-05, + "loss": 1.1758, + "step": 7389 + }, + { + "epoch": 0.4633519342905511, + "grad_norm": 2.9917900562286377, + "learning_rate": 1.1665551613938577e-05, + "loss": 1.1348, + "step": 7390 + }, + { + "epoch": 0.4634146341463415, + "grad_norm": 3.1616384983062744, + "learning_rate": 1.1663549180563633e-05, + "loss": 1.0536, + "step": 7391 + }, + { + "epoch": 0.4634773340021318, + "grad_norm": 3.5498716831207275, + "learning_rate": 1.1661546678583848e-05, + "loss": 1.1559, + "step": 7392 + }, + { + "epoch": 0.46354003385792214, + "grad_norm": 3.0382239818573, + "learning_rate": 1.1659544108081808e-05, + "loss": 1.1215, + "step": 7393 + }, + { + "epoch": 0.46360273371371247, + "grad_norm": 3.06514310836792, + "learning_rate": 1.1657541469140094e-05, + "loss": 1.1978, + "step": 7394 + }, + { + "epoch": 0.4636654335695028, + "grad_norm": 3.155182123184204, + "learning_rate": 1.16555387618413e-05, + "loss": 1.0581, + "step": 7395 + }, + { + "epoch": 0.4637281334252931, + "grad_norm": 3.405451536178589, + "learning_rate": 1.1653535986268015e-05, + "loss": 1.1979, + "step": 7396 + }, + { + "epoch": 0.46379083328108345, + "grad_norm": 2.9479222297668457, + "learning_rate": 1.1651533142502833e-05, + "loss": 1.0895, + "step": 7397 + }, + { + "epoch": 0.4638535331368738, + "grad_norm": 3.1616358757019043, + "learning_rate": 1.164953023062835e-05, + "loss": 1.0016, + "step": 7398 + }, + { + "epoch": 0.4639162329926641, + "grad_norm": 3.015350580215454, + "learning_rate": 1.164752725072717e-05, + "loss": 0.9291, + "step": 7399 + }, + { + "epoch": 0.46397893284845443, + "grad_norm": 3.1952083110809326, + "learning_rate": 1.1645524202881895e-05, + "loss": 1.0918, + "step": 7400 + }, + { + "epoch": 0.46404163270424476, + "grad_norm": 3.059478521347046, + "learning_rate": 1.1643521087175129e-05, + "loss": 1.0821, + "step": 7401 + }, + { + "epoch": 0.46410433256003514, + "grad_norm": 3.312764883041382, + "learning_rate": 1.164151790368948e-05, + "loss": 0.9836, + "step": 7402 + }, + { + "epoch": 0.46416703241582546, + "grad_norm": 3.034844160079956, + "learning_rate": 1.1639514652507564e-05, + "loss": 1.1151, + "step": 7403 + }, + { + "epoch": 0.4642297322716158, + "grad_norm": 3.2215404510498047, + "learning_rate": 1.163751133371199e-05, + "loss": 1.1898, + "step": 7404 + }, + { + "epoch": 0.4642924321274061, + "grad_norm": 3.1341516971588135, + "learning_rate": 1.1635507947385379e-05, + "loss": 1.0274, + "step": 7405 + }, + { + "epoch": 0.46435513198319645, + "grad_norm": 3.28205943107605, + "learning_rate": 1.1633504493610348e-05, + "loss": 1.0964, + "step": 7406 + }, + { + "epoch": 0.46441783183898677, + "grad_norm": 2.9742679595947266, + "learning_rate": 1.163150097246952e-05, + "loss": 1.239, + "step": 7407 + }, + { + "epoch": 0.4644805316947771, + "grad_norm": 3.0297203063964844, + "learning_rate": 1.1629497384045522e-05, + "loss": 1.108, + "step": 7408 + }, + { + "epoch": 0.4645432315505674, + "grad_norm": 3.0633857250213623, + "learning_rate": 1.1627493728420978e-05, + "loss": 1.1449, + "step": 7409 + }, + { + "epoch": 0.46460593140635775, + "grad_norm": 3.4078638553619385, + "learning_rate": 1.1625490005678524e-05, + "loss": 1.0983, + "step": 7410 + }, + { + "epoch": 0.4646686312621481, + "grad_norm": 2.8110337257385254, + "learning_rate": 1.162348621590079e-05, + "loss": 0.9691, + "step": 7411 + }, + { + "epoch": 0.4647313311179384, + "grad_norm": 2.6928369998931885, + "learning_rate": 1.1621482359170413e-05, + "loss": 1.2004, + "step": 7412 + }, + { + "epoch": 0.46479403097372873, + "grad_norm": 3.1155052185058594, + "learning_rate": 1.1619478435570035e-05, + "loss": 1.2353, + "step": 7413 + }, + { + "epoch": 0.4648567308295191, + "grad_norm": 3.0141518115997314, + "learning_rate": 1.1617474445182293e-05, + "loss": 0.9994, + "step": 7414 + }, + { + "epoch": 0.46491943068530944, + "grad_norm": 3.122487783432007, + "learning_rate": 1.1615470388089836e-05, + "loss": 1.0657, + "step": 7415 + }, + { + "epoch": 0.46498213054109977, + "grad_norm": 2.9362096786499023, + "learning_rate": 1.1613466264375309e-05, + "loss": 1.2882, + "step": 7416 + }, + { + "epoch": 0.4650448303968901, + "grad_norm": 3.167875051498413, + "learning_rate": 1.1611462074121363e-05, + "loss": 1.1271, + "step": 7417 + }, + { + "epoch": 0.4651075302526804, + "grad_norm": 2.7094054222106934, + "learning_rate": 1.160945781741065e-05, + "loss": 1.2199, + "step": 7418 + }, + { + "epoch": 0.46517023010847075, + "grad_norm": 2.6796646118164062, + "learning_rate": 1.160745349432583e-05, + "loss": 1.2165, + "step": 7419 + }, + { + "epoch": 0.4652329299642611, + "grad_norm": 3.181467294692993, + "learning_rate": 1.160544910494955e-05, + "loss": 1.0393, + "step": 7420 + }, + { + "epoch": 0.4652956298200514, + "grad_norm": 2.9771928787231445, + "learning_rate": 1.1603444649364485e-05, + "loss": 1.2364, + "step": 7421 + }, + { + "epoch": 0.46535832967584173, + "grad_norm": 3.0227720737457275, + "learning_rate": 1.1601440127653291e-05, + "loss": 0.9797, + "step": 7422 + }, + { + "epoch": 0.46542102953163206, + "grad_norm": 2.9214682579040527, + "learning_rate": 1.1599435539898636e-05, + "loss": 1.1346, + "step": 7423 + }, + { + "epoch": 0.4654837293874224, + "grad_norm": 2.7365243434906006, + "learning_rate": 1.159743088618319e-05, + "loss": 1.1759, + "step": 7424 + }, + { + "epoch": 0.46554642924321277, + "grad_norm": 3.1010804176330566, + "learning_rate": 1.1595426166589622e-05, + "loss": 1.0406, + "step": 7425 + }, + { + "epoch": 0.4656091290990031, + "grad_norm": 3.287201404571533, + "learning_rate": 1.1593421381200614e-05, + "loss": 0.9667, + "step": 7426 + }, + { + "epoch": 0.4656718289547934, + "grad_norm": 2.9074528217315674, + "learning_rate": 1.1591416530098835e-05, + "loss": 1.1356, + "step": 7427 + }, + { + "epoch": 0.46573452881058375, + "grad_norm": 3.0031168460845947, + "learning_rate": 1.158941161336697e-05, + "loss": 1.1612, + "step": 7428 + }, + { + "epoch": 0.4657972286663741, + "grad_norm": 3.3910348415374756, + "learning_rate": 1.1587406631087701e-05, + "loss": 1.0475, + "step": 7429 + }, + { + "epoch": 0.4658599285221644, + "grad_norm": 2.892116069793701, + "learning_rate": 1.1585401583343714e-05, + "loss": 1.1972, + "step": 7430 + }, + { + "epoch": 0.46592262837795473, + "grad_norm": 2.9998409748077393, + "learning_rate": 1.1583396470217693e-05, + "loss": 1.1241, + "step": 7431 + }, + { + "epoch": 0.46598532823374506, + "grad_norm": 2.866539239883423, + "learning_rate": 1.1581391291792336e-05, + "loss": 1.1246, + "step": 7432 + }, + { + "epoch": 0.4660480280895354, + "grad_norm": 3.206801176071167, + "learning_rate": 1.157938604815033e-05, + "loss": 1.0527, + "step": 7433 + }, + { + "epoch": 0.4661107279453257, + "grad_norm": 2.9112355709075928, + "learning_rate": 1.1577380739374376e-05, + "loss": 1.1295, + "step": 7434 + }, + { + "epoch": 0.46617342780111604, + "grad_norm": 2.9030954837799072, + "learning_rate": 1.1575375365547173e-05, + "loss": 1.0979, + "step": 7435 + }, + { + "epoch": 0.46623612765690636, + "grad_norm": 2.9004456996917725, + "learning_rate": 1.1573369926751416e-05, + "loss": 1.083, + "step": 7436 + }, + { + "epoch": 0.46629882751269675, + "grad_norm": 2.944045305252075, + "learning_rate": 1.1571364423069822e-05, + "loss": 1.0411, + "step": 7437 + }, + { + "epoch": 0.46636152736848707, + "grad_norm": 3.1340363025665283, + "learning_rate": 1.1569358854585084e-05, + "loss": 1.1343, + "step": 7438 + }, + { + "epoch": 0.4664242272242774, + "grad_norm": 3.339256525039673, + "learning_rate": 1.1567353221379921e-05, + "loss": 1.1294, + "step": 7439 + }, + { + "epoch": 0.4664869270800677, + "grad_norm": 3.0844311714172363, + "learning_rate": 1.1565347523537042e-05, + "loss": 1.1704, + "step": 7440 + }, + { + "epoch": 0.46654962693585805, + "grad_norm": 3.1952624320983887, + "learning_rate": 1.1563341761139167e-05, + "loss": 0.8854, + "step": 7441 + }, + { + "epoch": 0.4666123267916484, + "grad_norm": 2.95158314704895, + "learning_rate": 1.1561335934269004e-05, + "loss": 1.1846, + "step": 7442 + }, + { + "epoch": 0.4666750266474387, + "grad_norm": 3.010629177093506, + "learning_rate": 1.155933004300928e-05, + "loss": 1.0947, + "step": 7443 + }, + { + "epoch": 0.46673772650322903, + "grad_norm": 3.0610604286193848, + "learning_rate": 1.1557324087442719e-05, + "loss": 1.0894, + "step": 7444 + }, + { + "epoch": 0.46680042635901936, + "grad_norm": 3.331324815750122, + "learning_rate": 1.1555318067652042e-05, + "loss": 1.0704, + "step": 7445 + }, + { + "epoch": 0.4668631262148097, + "grad_norm": 3.07143235206604, + "learning_rate": 1.1553311983719984e-05, + "loss": 0.9633, + "step": 7446 + }, + { + "epoch": 0.4669258260706, + "grad_norm": 2.989386558532715, + "learning_rate": 1.1551305835729267e-05, + "loss": 1.0164, + "step": 7447 + }, + { + "epoch": 0.4669885259263904, + "grad_norm": 3.082468032836914, + "learning_rate": 1.1549299623762633e-05, + "loss": 1.1526, + "step": 7448 + }, + { + "epoch": 0.4670512257821807, + "grad_norm": 3.1197900772094727, + "learning_rate": 1.1547293347902813e-05, + "loss": 1.1832, + "step": 7449 + }, + { + "epoch": 0.46711392563797105, + "grad_norm": 3.060811996459961, + "learning_rate": 1.1545287008232551e-05, + "loss": 1.1018, + "step": 7450 + }, + { + "epoch": 0.4671766254937614, + "grad_norm": 3.0225231647491455, + "learning_rate": 1.1543280604834581e-05, + "loss": 1.1794, + "step": 7451 + }, + { + "epoch": 0.4672393253495517, + "grad_norm": 3.220254421234131, + "learning_rate": 1.1541274137791654e-05, + "loss": 0.9685, + "step": 7452 + }, + { + "epoch": 0.46730202520534203, + "grad_norm": 2.792567014694214, + "learning_rate": 1.1539267607186513e-05, + "loss": 1.1893, + "step": 7453 + }, + { + "epoch": 0.46736472506113236, + "grad_norm": 2.8436834812164307, + "learning_rate": 1.1537261013101908e-05, + "loss": 1.2326, + "step": 7454 + }, + { + "epoch": 0.4674274249169227, + "grad_norm": 2.797283887863159, + "learning_rate": 1.1535254355620593e-05, + "loss": 1.1073, + "step": 7455 + }, + { + "epoch": 0.467490124772713, + "grad_norm": 3.0196521282196045, + "learning_rate": 1.1533247634825322e-05, + "loss": 0.9998, + "step": 7456 + }, + { + "epoch": 0.46755282462850334, + "grad_norm": 2.9540674686431885, + "learning_rate": 1.153124085079885e-05, + "loss": 1.1906, + "step": 7457 + }, + { + "epoch": 0.46761552448429367, + "grad_norm": 2.887082099914551, + "learning_rate": 1.152923400362394e-05, + "loss": 1.0126, + "step": 7458 + }, + { + "epoch": 0.467678224340084, + "grad_norm": 3.3435933589935303, + "learning_rate": 1.1527227093383354e-05, + "loss": 1.0828, + "step": 7459 + }, + { + "epoch": 0.4677409241958744, + "grad_norm": 3.287200927734375, + "learning_rate": 1.1525220120159852e-05, + "loss": 1.1232, + "step": 7460 + }, + { + "epoch": 0.4678036240516647, + "grad_norm": 3.019786834716797, + "learning_rate": 1.152321308403621e-05, + "loss": 0.9995, + "step": 7461 + }, + { + "epoch": 0.46786632390745503, + "grad_norm": 3.2964634895324707, + "learning_rate": 1.152120598509519e-05, + "loss": 1.1127, + "step": 7462 + }, + { + "epoch": 0.46792902376324536, + "grad_norm": 2.887749195098877, + "learning_rate": 1.1519198823419569e-05, + "loss": 1.0205, + "step": 7463 + }, + { + "epoch": 0.4679917236190357, + "grad_norm": 3.0475592613220215, + "learning_rate": 1.1517191599092125e-05, + "loss": 1.0233, + "step": 7464 + }, + { + "epoch": 0.468054423474826, + "grad_norm": 3.2078731060028076, + "learning_rate": 1.151518431219563e-05, + "loss": 0.9742, + "step": 7465 + }, + { + "epoch": 0.46811712333061634, + "grad_norm": 2.882603168487549, + "learning_rate": 1.151317696281287e-05, + "loss": 1.1789, + "step": 7466 + }, + { + "epoch": 0.46817982318640666, + "grad_norm": 3.263247013092041, + "learning_rate": 1.1511169551026626e-05, + "loss": 1.0839, + "step": 7467 + }, + { + "epoch": 0.468242523042197, + "grad_norm": 3.1124305725097656, + "learning_rate": 1.1509162076919685e-05, + "loss": 0.9726, + "step": 7468 + }, + { + "epoch": 0.4683052228979873, + "grad_norm": 3.032529830932617, + "learning_rate": 1.1507154540574833e-05, + "loss": 1.1881, + "step": 7469 + }, + { + "epoch": 0.46836792275377764, + "grad_norm": 3.522386074066162, + "learning_rate": 1.150514694207486e-05, + "loss": 1.1275, + "step": 7470 + }, + { + "epoch": 0.46843062260956797, + "grad_norm": 3.018921375274658, + "learning_rate": 1.1503139281502562e-05, + "loss": 1.1075, + "step": 7471 + }, + { + "epoch": 0.46849332246535835, + "grad_norm": 3.366074562072754, + "learning_rate": 1.1501131558940737e-05, + "loss": 1.0156, + "step": 7472 + }, + { + "epoch": 0.4685560223211487, + "grad_norm": 3.112262725830078, + "learning_rate": 1.1499123774472176e-05, + "loss": 1.0958, + "step": 7473 + }, + { + "epoch": 0.468618722176939, + "grad_norm": 3.1227033138275146, + "learning_rate": 1.1497115928179688e-05, + "loss": 0.9626, + "step": 7474 + }, + { + "epoch": 0.46868142203272933, + "grad_norm": 3.3003575801849365, + "learning_rate": 1.1495108020146075e-05, + "loss": 1.2378, + "step": 7475 + }, + { + "epoch": 0.46874412188851966, + "grad_norm": 2.9435205459594727, + "learning_rate": 1.149310005045414e-05, + "loss": 1.0478, + "step": 7476 + }, + { + "epoch": 0.46880682174431, + "grad_norm": 3.079777956008911, + "learning_rate": 1.1491092019186696e-05, + "loss": 1.1538, + "step": 7477 + }, + { + "epoch": 0.4688695216001003, + "grad_norm": 3.505882501602173, + "learning_rate": 1.148908392642655e-05, + "loss": 1.1607, + "step": 7478 + }, + { + "epoch": 0.46893222145589064, + "grad_norm": 2.897705078125, + "learning_rate": 1.1487075772256517e-05, + "loss": 1.2329, + "step": 7479 + }, + { + "epoch": 0.46899492131168097, + "grad_norm": 3.3312463760375977, + "learning_rate": 1.1485067556759414e-05, + "loss": 1.1838, + "step": 7480 + }, + { + "epoch": 0.4690576211674713, + "grad_norm": 2.971715211868286, + "learning_rate": 1.1483059280018065e-05, + "loss": 1.1544, + "step": 7481 + }, + { + "epoch": 0.4691203210232616, + "grad_norm": 3.0102691650390625, + "learning_rate": 1.1481050942115281e-05, + "loss": 1.1085, + "step": 7482 + }, + { + "epoch": 0.469183020879052, + "grad_norm": 3.2063066959381104, + "learning_rate": 1.1479042543133895e-05, + "loss": 1.0713, + "step": 7483 + }, + { + "epoch": 0.46924572073484233, + "grad_norm": 2.8664207458496094, + "learning_rate": 1.1477034083156728e-05, + "loss": 1.1574, + "step": 7484 + }, + { + "epoch": 0.46930842059063266, + "grad_norm": 2.8793177604675293, + "learning_rate": 1.147502556226661e-05, + "loss": 1.1231, + "step": 7485 + }, + { + "epoch": 0.469371120446423, + "grad_norm": 3.4069576263427734, + "learning_rate": 1.1473016980546377e-05, + "loss": 1.1578, + "step": 7486 + }, + { + "epoch": 0.4694338203022133, + "grad_norm": 3.350881338119507, + "learning_rate": 1.1471008338078855e-05, + "loss": 1.0983, + "step": 7487 + }, + { + "epoch": 0.46949652015800364, + "grad_norm": 2.8713386058807373, + "learning_rate": 1.1468999634946889e-05, + "loss": 1.312, + "step": 7488 + }, + { + "epoch": 0.46955922001379397, + "grad_norm": 3.0379533767700195, + "learning_rate": 1.1466990871233312e-05, + "loss": 1.2624, + "step": 7489 + }, + { + "epoch": 0.4696219198695843, + "grad_norm": 3.141547679901123, + "learning_rate": 1.146498204702097e-05, + "loss": 0.9971, + "step": 7490 + }, + { + "epoch": 0.4696846197253746, + "grad_norm": 2.778829574584961, + "learning_rate": 1.1462973162392699e-05, + "loss": 1.1696, + "step": 7491 + }, + { + "epoch": 0.46974731958116495, + "grad_norm": 3.0143625736236572, + "learning_rate": 1.1460964217431356e-05, + "loss": 1.1179, + "step": 7492 + }, + { + "epoch": 0.4698100194369553, + "grad_norm": 2.9904990196228027, + "learning_rate": 1.145895521221978e-05, + "loss": 1.0726, + "step": 7493 + }, + { + "epoch": 0.4698727192927456, + "grad_norm": 2.830986738204956, + "learning_rate": 1.145694614684083e-05, + "loss": 1.2326, + "step": 7494 + }, + { + "epoch": 0.469935419148536, + "grad_norm": 3.1511483192443848, + "learning_rate": 1.1454937021377357e-05, + "loss": 1.027, + "step": 7495 + }, + { + "epoch": 0.4699981190043263, + "grad_norm": 2.898312568664551, + "learning_rate": 1.1452927835912218e-05, + "loss": 1.0344, + "step": 7496 + }, + { + "epoch": 0.47006081886011664, + "grad_norm": 2.7166213989257812, + "learning_rate": 1.1450918590528272e-05, + "loss": 1.0924, + "step": 7497 + }, + { + "epoch": 0.47012351871590696, + "grad_norm": 3.2019777297973633, + "learning_rate": 1.1448909285308379e-05, + "loss": 1.1125, + "step": 7498 + }, + { + "epoch": 0.4701862185716973, + "grad_norm": 3.2307236194610596, + "learning_rate": 1.1446899920335407e-05, + "loss": 1.1159, + "step": 7499 + }, + { + "epoch": 0.4702489184274876, + "grad_norm": 2.9217660427093506, + "learning_rate": 1.1444890495692214e-05, + "loss": 1.2871, + "step": 7500 + }, + { + "epoch": 0.47031161828327794, + "grad_norm": 2.995701551437378, + "learning_rate": 1.1442881011461677e-05, + "loss": 1.0877, + "step": 7501 + }, + { + "epoch": 0.47037431813906827, + "grad_norm": 3.0006582736968994, + "learning_rate": 1.1440871467726664e-05, + "loss": 1.1429, + "step": 7502 + }, + { + "epoch": 0.4704370179948586, + "grad_norm": 3.2450644969940186, + "learning_rate": 1.1438861864570048e-05, + "loss": 1.0148, + "step": 7503 + }, + { + "epoch": 0.4704997178506489, + "grad_norm": 3.3616650104522705, + "learning_rate": 1.1436852202074706e-05, + "loss": 1.1739, + "step": 7504 + }, + { + "epoch": 0.47056241770643925, + "grad_norm": 3.0924670696258545, + "learning_rate": 1.1434842480323515e-05, + "loss": 1.1046, + "step": 7505 + }, + { + "epoch": 0.47062511756222963, + "grad_norm": 3.357524871826172, + "learning_rate": 1.1432832699399361e-05, + "loss": 1.081, + "step": 7506 + }, + { + "epoch": 0.47068781741801996, + "grad_norm": 2.857750177383423, + "learning_rate": 1.143082285938512e-05, + "loss": 1.1747, + "step": 7507 + }, + { + "epoch": 0.4707505172738103, + "grad_norm": 3.189748764038086, + "learning_rate": 1.1428812960363685e-05, + "loss": 1.1233, + "step": 7508 + }, + { + "epoch": 0.4708132171296006, + "grad_norm": 2.835524559020996, + "learning_rate": 1.1426803002417939e-05, + "loss": 1.127, + "step": 7509 + }, + { + "epoch": 0.47087591698539094, + "grad_norm": 3.0220046043395996, + "learning_rate": 1.1424792985630778e-05, + "loss": 1.1681, + "step": 7510 + }, + { + "epoch": 0.47093861684118127, + "grad_norm": 3.370807409286499, + "learning_rate": 1.1422782910085086e-05, + "loss": 0.9616, + "step": 7511 + }, + { + "epoch": 0.4710013166969716, + "grad_norm": 2.9308810234069824, + "learning_rate": 1.1420772775863771e-05, + "loss": 1.1253, + "step": 7512 + }, + { + "epoch": 0.4710640165527619, + "grad_norm": 2.9273133277893066, + "learning_rate": 1.1418762583049721e-05, + "loss": 1.048, + "step": 7513 + }, + { + "epoch": 0.47112671640855225, + "grad_norm": 3.100020408630371, + "learning_rate": 1.1416752331725842e-05, + "loss": 1.3058, + "step": 7514 + }, + { + "epoch": 0.4711894162643426, + "grad_norm": 3.092349052429199, + "learning_rate": 1.1414742021975032e-05, + "loss": 1.1494, + "step": 7515 + }, + { + "epoch": 0.4712521161201329, + "grad_norm": 3.094649314880371, + "learning_rate": 1.14127316538802e-05, + "loss": 1.0708, + "step": 7516 + }, + { + "epoch": 0.47131481597592323, + "grad_norm": 3.0272960662841797, + "learning_rate": 1.1410721227524256e-05, + "loss": 1.2761, + "step": 7517 + }, + { + "epoch": 0.4713775158317136, + "grad_norm": 2.926767349243164, + "learning_rate": 1.1408710742990104e-05, + "loss": 1.1385, + "step": 7518 + }, + { + "epoch": 0.47144021568750394, + "grad_norm": 2.792231559753418, + "learning_rate": 1.140670020036066e-05, + "loss": 1.3375, + "step": 7519 + }, + { + "epoch": 0.47150291554329427, + "grad_norm": 2.9711556434631348, + "learning_rate": 1.1404689599718838e-05, + "loss": 1.0223, + "step": 7520 + }, + { + "epoch": 0.4715656153990846, + "grad_norm": 3.10408091545105, + "learning_rate": 1.1402678941147557e-05, + "loss": 1.16, + "step": 7521 + }, + { + "epoch": 0.4716283152548749, + "grad_norm": 3.201315402984619, + "learning_rate": 1.1400668224729737e-05, + "loss": 1.0477, + "step": 7522 + }, + { + "epoch": 0.47169101511066525, + "grad_norm": 2.9799649715423584, + "learning_rate": 1.1398657450548296e-05, + "loss": 1.1366, + "step": 7523 + }, + { + "epoch": 0.4717537149664556, + "grad_norm": 2.9276492595672607, + "learning_rate": 1.139664661868616e-05, + "loss": 1.0617, + "step": 7524 + }, + { + "epoch": 0.4718164148222459, + "grad_norm": 3.147897958755493, + "learning_rate": 1.1394635729226255e-05, + "loss": 1.1274, + "step": 7525 + }, + { + "epoch": 0.4718791146780362, + "grad_norm": 3.0179672241210938, + "learning_rate": 1.1392624782251516e-05, + "loss": 0.9673, + "step": 7526 + }, + { + "epoch": 0.47194181453382655, + "grad_norm": 3.1294474601745605, + "learning_rate": 1.1390613777844866e-05, + "loss": 1.0574, + "step": 7527 + }, + { + "epoch": 0.4720045143896169, + "grad_norm": 3.4661664962768555, + "learning_rate": 1.1388602716089245e-05, + "loss": 1.1549, + "step": 7528 + }, + { + "epoch": 0.47206721424540726, + "grad_norm": 2.990463972091675, + "learning_rate": 1.1386591597067586e-05, + "loss": 1.1572, + "step": 7529 + }, + { + "epoch": 0.4721299141011976, + "grad_norm": 3.1444194316864014, + "learning_rate": 1.1384580420862833e-05, + "loss": 1.1141, + "step": 7530 + }, + { + "epoch": 0.4721926139569879, + "grad_norm": 3.227107286453247, + "learning_rate": 1.138256918755792e-05, + "loss": 1.075, + "step": 7531 + }, + { + "epoch": 0.47225531381277824, + "grad_norm": 2.767106771469116, + "learning_rate": 1.1380557897235794e-05, + "loss": 1.1818, + "step": 7532 + }, + { + "epoch": 0.47231801366856857, + "grad_norm": 3.041156530380249, + "learning_rate": 1.13785465499794e-05, + "loss": 1.1332, + "step": 7533 + }, + { + "epoch": 0.4723807135243589, + "grad_norm": 3.1533584594726562, + "learning_rate": 1.1376535145871685e-05, + "loss": 1.1588, + "step": 7534 + }, + { + "epoch": 0.4724434133801492, + "grad_norm": 3.3171072006225586, + "learning_rate": 1.13745236849956e-05, + "loss": 1.0347, + "step": 7535 + }, + { + "epoch": 0.47250611323593955, + "grad_norm": 2.811558485031128, + "learning_rate": 1.1372512167434098e-05, + "loss": 1.2601, + "step": 7536 + }, + { + "epoch": 0.4725688130917299, + "grad_norm": 2.75618314743042, + "learning_rate": 1.1370500593270137e-05, + "loss": 1.2618, + "step": 7537 + }, + { + "epoch": 0.4726315129475202, + "grad_norm": 3.2607271671295166, + "learning_rate": 1.136848896258667e-05, + "loss": 1.0655, + "step": 7538 + }, + { + "epoch": 0.47269421280331053, + "grad_norm": 3.0325889587402344, + "learning_rate": 1.1366477275466659e-05, + "loss": 1.0835, + "step": 7539 + }, + { + "epoch": 0.47275691265910086, + "grad_norm": 2.7864296436309814, + "learning_rate": 1.1364465531993065e-05, + "loss": 1.1519, + "step": 7540 + }, + { + "epoch": 0.47281961251489124, + "grad_norm": 2.665565013885498, + "learning_rate": 1.1362453732248854e-05, + "loss": 1.2717, + "step": 7541 + }, + { + "epoch": 0.47288231237068157, + "grad_norm": 2.5578763484954834, + "learning_rate": 1.136044187631699e-05, + "loss": 1.131, + "step": 7542 + }, + { + "epoch": 0.4729450122264719, + "grad_norm": 2.6909682750701904, + "learning_rate": 1.1358429964280444e-05, + "loss": 1.2226, + "step": 7543 + }, + { + "epoch": 0.4730077120822622, + "grad_norm": 2.973752975463867, + "learning_rate": 1.1356417996222187e-05, + "loss": 1.0689, + "step": 7544 + }, + { + "epoch": 0.47307041193805255, + "grad_norm": 2.9260435104370117, + "learning_rate": 1.135440597222519e-05, + "loss": 1.1883, + "step": 7545 + }, + { + "epoch": 0.4731331117938429, + "grad_norm": 3.500865936279297, + "learning_rate": 1.1352393892372438e-05, + "loss": 1.0794, + "step": 7546 + }, + { + "epoch": 0.4731958116496332, + "grad_norm": 3.1465086936950684, + "learning_rate": 1.1350381756746898e-05, + "loss": 1.1935, + "step": 7547 + }, + { + "epoch": 0.47325851150542353, + "grad_norm": 2.8954148292541504, + "learning_rate": 1.1348369565431557e-05, + "loss": 1.052, + "step": 7548 + }, + { + "epoch": 0.47332121136121386, + "grad_norm": 3.190211534500122, + "learning_rate": 1.1346357318509395e-05, + "loss": 1.1872, + "step": 7549 + }, + { + "epoch": 0.4733839112170042, + "grad_norm": 3.143068552017212, + "learning_rate": 1.1344345016063401e-05, + "loss": 1.1464, + "step": 7550 + }, + { + "epoch": 0.4734466110727945, + "grad_norm": 2.735642671585083, + "learning_rate": 1.1342332658176556e-05, + "loss": 1.0481, + "step": 7551 + }, + { + "epoch": 0.47350931092858484, + "grad_norm": 3.0940945148468018, + "learning_rate": 1.1340320244931857e-05, + "loss": 1.2, + "step": 7552 + }, + { + "epoch": 0.4735720107843752, + "grad_norm": 2.9004476070404053, + "learning_rate": 1.1338307776412289e-05, + "loss": 1.1075, + "step": 7553 + }, + { + "epoch": 0.47363471064016555, + "grad_norm": 2.9064791202545166, + "learning_rate": 1.1336295252700855e-05, + "loss": 1.1931, + "step": 7554 + }, + { + "epoch": 0.4736974104959559, + "grad_norm": 3.3265488147735596, + "learning_rate": 1.1334282673880541e-05, + "loss": 1.1223, + "step": 7555 + }, + { + "epoch": 0.4737601103517462, + "grad_norm": 3.040442705154419, + "learning_rate": 1.1332270040034353e-05, + "loss": 1.1826, + "step": 7556 + }, + { + "epoch": 0.4738228102075365, + "grad_norm": 2.8288938999176025, + "learning_rate": 1.1330257351245293e-05, + "loss": 1.0976, + "step": 7557 + }, + { + "epoch": 0.47388551006332685, + "grad_norm": 3.187725782394409, + "learning_rate": 1.1328244607596359e-05, + "loss": 1.0782, + "step": 7558 + }, + { + "epoch": 0.4739482099191172, + "grad_norm": 3.589006185531616, + "learning_rate": 1.132623180917056e-05, + "loss": 1.0294, + "step": 7559 + }, + { + "epoch": 0.4740109097749075, + "grad_norm": 3.2853200435638428, + "learning_rate": 1.1324218956050904e-05, + "loss": 1.0798, + "step": 7560 + }, + { + "epoch": 0.47407360963069783, + "grad_norm": 3.5162274837493896, + "learning_rate": 1.1322206048320402e-05, + "loss": 1.1109, + "step": 7561 + }, + { + "epoch": 0.47413630948648816, + "grad_norm": 2.9433465003967285, + "learning_rate": 1.1320193086062059e-05, + "loss": 1.152, + "step": 7562 + }, + { + "epoch": 0.4741990093422785, + "grad_norm": 3.375659942626953, + "learning_rate": 1.1318180069358901e-05, + "loss": 1.083, + "step": 7563 + }, + { + "epoch": 0.47426170919806887, + "grad_norm": 3.107194185256958, + "learning_rate": 1.1316166998293937e-05, + "loss": 1.2514, + "step": 7564 + }, + { + "epoch": 0.4743244090538592, + "grad_norm": 3.2445192337036133, + "learning_rate": 1.131415387295019e-05, + "loss": 0.9952, + "step": 7565 + }, + { + "epoch": 0.4743871089096495, + "grad_norm": 3.23793363571167, + "learning_rate": 1.1312140693410679e-05, + "loss": 1.0736, + "step": 7566 + }, + { + "epoch": 0.47444980876543985, + "grad_norm": 3.225346326828003, + "learning_rate": 1.1310127459758426e-05, + "loss": 1.0832, + "step": 7567 + }, + { + "epoch": 0.4745125086212302, + "grad_norm": 3.145516872406006, + "learning_rate": 1.1308114172076464e-05, + "loss": 1.0786, + "step": 7568 + }, + { + "epoch": 0.4745752084770205, + "grad_norm": 3.55277419090271, + "learning_rate": 1.1306100830447812e-05, + "loss": 0.9601, + "step": 7569 + }, + { + "epoch": 0.47463790833281083, + "grad_norm": 3.2485015392303467, + "learning_rate": 1.1304087434955508e-05, + "loss": 1.1912, + "step": 7570 + }, + { + "epoch": 0.47470060818860116, + "grad_norm": 2.960999011993408, + "learning_rate": 1.130207398568258e-05, + "loss": 1.2366, + "step": 7571 + }, + { + "epoch": 0.4747633080443915, + "grad_norm": 3.176215171813965, + "learning_rate": 1.1300060482712065e-05, + "loss": 1.0743, + "step": 7572 + }, + { + "epoch": 0.4748260079001818, + "grad_norm": 2.6886541843414307, + "learning_rate": 1.1298046926126997e-05, + "loss": 1.0693, + "step": 7573 + }, + { + "epoch": 0.47488870775597214, + "grad_norm": 3.527207374572754, + "learning_rate": 1.129603331601042e-05, + "loss": 0.9698, + "step": 7574 + }, + { + "epoch": 0.47495140761176247, + "grad_norm": 3.0923092365264893, + "learning_rate": 1.1294019652445369e-05, + "loss": 1.1038, + "step": 7575 + }, + { + "epoch": 0.47501410746755285, + "grad_norm": 3.092952013015747, + "learning_rate": 1.1292005935514892e-05, + "loss": 1.1482, + "step": 7576 + }, + { + "epoch": 0.4750768073233432, + "grad_norm": 3.047837257385254, + "learning_rate": 1.1289992165302036e-05, + "loss": 0.9538, + "step": 7577 + }, + { + "epoch": 0.4751395071791335, + "grad_norm": 3.038651943206787, + "learning_rate": 1.1287978341889842e-05, + "loss": 1.1935, + "step": 7578 + }, + { + "epoch": 0.47520220703492383, + "grad_norm": 3.240617513656616, + "learning_rate": 1.128596446536137e-05, + "loss": 1.092, + "step": 7579 + }, + { + "epoch": 0.47526490689071416, + "grad_norm": 2.80666446685791, + "learning_rate": 1.1283950535799667e-05, + "loss": 1.263, + "step": 7580 + }, + { + "epoch": 0.4753276067465045, + "grad_norm": 3.0602903366088867, + "learning_rate": 1.1281936553287784e-05, + "loss": 1.1488, + "step": 7581 + }, + { + "epoch": 0.4753903066022948, + "grad_norm": 3.1518523693084717, + "learning_rate": 1.1279922517908784e-05, + "loss": 0.9378, + "step": 7582 + }, + { + "epoch": 0.47545300645808514, + "grad_norm": 2.674818515777588, + "learning_rate": 1.1277908429745726e-05, + "loss": 1.1765, + "step": 7583 + }, + { + "epoch": 0.47551570631387546, + "grad_norm": 2.892976760864258, + "learning_rate": 1.1275894288881664e-05, + "loss": 1.1165, + "step": 7584 + }, + { + "epoch": 0.4755784061696658, + "grad_norm": 3.2823476791381836, + "learning_rate": 1.1273880095399667e-05, + "loss": 1.2296, + "step": 7585 + }, + { + "epoch": 0.4756411060254561, + "grad_norm": 3.3606905937194824, + "learning_rate": 1.1271865849382798e-05, + "loss": 1.1458, + "step": 7586 + }, + { + "epoch": 0.4757038058812465, + "grad_norm": 3.245793581008911, + "learning_rate": 1.1269851550914128e-05, + "loss": 1.0847, + "step": 7587 + }, + { + "epoch": 0.4757665057370368, + "grad_norm": 2.9892070293426514, + "learning_rate": 1.1267837200076726e-05, + "loss": 1.1121, + "step": 7588 + }, + { + "epoch": 0.47582920559282715, + "grad_norm": 3.3069255352020264, + "learning_rate": 1.126582279695366e-05, + "loss": 1.0989, + "step": 7589 + }, + { + "epoch": 0.4758919054486175, + "grad_norm": 3.138946771621704, + "learning_rate": 1.1263808341628009e-05, + "loss": 1.155, + "step": 7590 + }, + { + "epoch": 0.4759546053044078, + "grad_norm": 2.9597082138061523, + "learning_rate": 1.1261793834182843e-05, + "loss": 1.0434, + "step": 7591 + }, + { + "epoch": 0.47601730516019813, + "grad_norm": 2.9039199352264404, + "learning_rate": 1.1259779274701248e-05, + "loss": 1.1095, + "step": 7592 + }, + { + "epoch": 0.47608000501598846, + "grad_norm": 3.0596561431884766, + "learning_rate": 1.1257764663266297e-05, + "loss": 0.9945, + "step": 7593 + }, + { + "epoch": 0.4761427048717788, + "grad_norm": 3.3016862869262695, + "learning_rate": 1.125574999996108e-05, + "loss": 1.1138, + "step": 7594 + }, + { + "epoch": 0.4762054047275691, + "grad_norm": 3.181864023208618, + "learning_rate": 1.1253735284868677e-05, + "loss": 1.1965, + "step": 7595 + }, + { + "epoch": 0.47626810458335944, + "grad_norm": 2.8761565685272217, + "learning_rate": 1.1251720518072175e-05, + "loss": 1.2635, + "step": 7596 + }, + { + "epoch": 0.47633080443914977, + "grad_norm": 3.0488979816436768, + "learning_rate": 1.1249705699654665e-05, + "loss": 1.1642, + "step": 7597 + }, + { + "epoch": 0.4763935042949401, + "grad_norm": 2.784501075744629, + "learning_rate": 1.1247690829699236e-05, + "loss": 1.0315, + "step": 7598 + }, + { + "epoch": 0.4764562041507305, + "grad_norm": 2.976269006729126, + "learning_rate": 1.1245675908288984e-05, + "loss": 1.2012, + "step": 7599 + }, + { + "epoch": 0.4765189040065208, + "grad_norm": 3.1984915733337402, + "learning_rate": 1.1243660935507003e-05, + "loss": 1.0264, + "step": 7600 + }, + { + "epoch": 0.47658160386231113, + "grad_norm": 2.823099374771118, + "learning_rate": 1.1241645911436392e-05, + "loss": 1.0834, + "step": 7601 + }, + { + "epoch": 0.47664430371810146, + "grad_norm": 3.4903366565704346, + "learning_rate": 1.1239630836160246e-05, + "loss": 1.0317, + "step": 7602 + }, + { + "epoch": 0.4767070035738918, + "grad_norm": 2.9536681175231934, + "learning_rate": 1.1237615709761673e-05, + "loss": 1.1925, + "step": 7603 + }, + { + "epoch": 0.4767697034296821, + "grad_norm": 2.854703903198242, + "learning_rate": 1.1235600532323774e-05, + "loss": 1.166, + "step": 7604 + }, + { + "epoch": 0.47683240328547244, + "grad_norm": 2.9968628883361816, + "learning_rate": 1.1233585303929654e-05, + "loss": 1.0808, + "step": 7605 + }, + { + "epoch": 0.47689510314126277, + "grad_norm": 2.811340570449829, + "learning_rate": 1.1231570024662424e-05, + "loss": 1.0778, + "step": 7606 + }, + { + "epoch": 0.4769578029970531, + "grad_norm": 2.947138786315918, + "learning_rate": 1.122955469460519e-05, + "loss": 1.1626, + "step": 7607 + }, + { + "epoch": 0.4770205028528434, + "grad_norm": 2.956664562225342, + "learning_rate": 1.1227539313841068e-05, + "loss": 1.1192, + "step": 7608 + }, + { + "epoch": 0.47708320270863375, + "grad_norm": 3.4521732330322266, + "learning_rate": 1.1225523882453169e-05, + "loss": 1.1082, + "step": 7609 + }, + { + "epoch": 0.47714590256442413, + "grad_norm": 2.730195999145508, + "learning_rate": 1.1223508400524612e-05, + "loss": 1.1307, + "step": 7610 + }, + { + "epoch": 0.47720860242021446, + "grad_norm": 3.287835121154785, + "learning_rate": 1.1221492868138517e-05, + "loss": 1.0538, + "step": 7611 + }, + { + "epoch": 0.4772713022760048, + "grad_norm": 2.98039174079895, + "learning_rate": 1.1219477285378002e-05, + "loss": 1.2255, + "step": 7612 + }, + { + "epoch": 0.4773340021317951, + "grad_norm": 3.290388822555542, + "learning_rate": 1.121746165232619e-05, + "loss": 0.9038, + "step": 7613 + }, + { + "epoch": 0.47739670198758544, + "grad_norm": 3.030890464782715, + "learning_rate": 1.121544596906621e-05, + "loss": 1.0331, + "step": 7614 + }, + { + "epoch": 0.47745940184337576, + "grad_norm": 3.2766382694244385, + "learning_rate": 1.121343023568118e-05, + "loss": 1.162, + "step": 7615 + }, + { + "epoch": 0.4775221016991661, + "grad_norm": 3.0845136642456055, + "learning_rate": 1.1211414452254239e-05, + "loss": 1.1105, + "step": 7616 + }, + { + "epoch": 0.4775848015549564, + "grad_norm": 3.066962480545044, + "learning_rate": 1.1209398618868508e-05, + "loss": 1.0827, + "step": 7617 + }, + { + "epoch": 0.47764750141074674, + "grad_norm": 3.2951459884643555, + "learning_rate": 1.1207382735607128e-05, + "loss": 1.1956, + "step": 7618 + }, + { + "epoch": 0.47771020126653707, + "grad_norm": 3.10616397857666, + "learning_rate": 1.1205366802553231e-05, + "loss": 1.1237, + "step": 7619 + }, + { + "epoch": 0.4777729011223274, + "grad_norm": 2.808471202850342, + "learning_rate": 1.1203350819789955e-05, + "loss": 1.0938, + "step": 7620 + }, + { + "epoch": 0.4778356009781177, + "grad_norm": 3.2072227001190186, + "learning_rate": 1.1201334787400437e-05, + "loss": 1.0787, + "step": 7621 + }, + { + "epoch": 0.4778983008339081, + "grad_norm": 3.242812156677246, + "learning_rate": 1.1199318705467821e-05, + "loss": 1.0405, + "step": 7622 + }, + { + "epoch": 0.47796100068969843, + "grad_norm": 3.0073904991149902, + "learning_rate": 1.1197302574075248e-05, + "loss": 1.0738, + "step": 7623 + }, + { + "epoch": 0.47802370054548876, + "grad_norm": 3.432295083999634, + "learning_rate": 1.1195286393305866e-05, + "loss": 1.1567, + "step": 7624 + }, + { + "epoch": 0.4780864004012791, + "grad_norm": 3.1236050128936768, + "learning_rate": 1.1193270163242822e-05, + "loss": 1.1811, + "step": 7625 + }, + { + "epoch": 0.4781491002570694, + "grad_norm": 3.506481885910034, + "learning_rate": 1.1191253883969263e-05, + "loss": 1.0209, + "step": 7626 + }, + { + "epoch": 0.47821180011285974, + "grad_norm": 3.1185688972473145, + "learning_rate": 1.1189237555568342e-05, + "loss": 1.0529, + "step": 7627 + }, + { + "epoch": 0.47827449996865007, + "grad_norm": 2.89616060256958, + "learning_rate": 1.1187221178123212e-05, + "loss": 1.2817, + "step": 7628 + }, + { + "epoch": 0.4783371998244404, + "grad_norm": 3.4340028762817383, + "learning_rate": 1.118520475171703e-05, + "loss": 1.1007, + "step": 7629 + }, + { + "epoch": 0.4783998996802307, + "grad_norm": 2.9899303913116455, + "learning_rate": 1.1183188276432948e-05, + "loss": 0.9925, + "step": 7630 + }, + { + "epoch": 0.47846259953602105, + "grad_norm": 2.9357588291168213, + "learning_rate": 1.1181171752354133e-05, + "loss": 1.139, + "step": 7631 + }, + { + "epoch": 0.4785252993918114, + "grad_norm": 2.8542797565460205, + "learning_rate": 1.1179155179563743e-05, + "loss": 1.1491, + "step": 7632 + }, + { + "epoch": 0.4785879992476017, + "grad_norm": 3.188014507293701, + "learning_rate": 1.1177138558144941e-05, + "loss": 1.0798, + "step": 7633 + }, + { + "epoch": 0.4786506991033921, + "grad_norm": 3.1529595851898193, + "learning_rate": 1.1175121888180895e-05, + "loss": 1.0456, + "step": 7634 + }, + { + "epoch": 0.4787133989591824, + "grad_norm": 2.8452165126800537, + "learning_rate": 1.117310516975477e-05, + "loss": 0.9926, + "step": 7635 + }, + { + "epoch": 0.47877609881497274, + "grad_norm": 3.561793804168701, + "learning_rate": 1.1171088402949739e-05, + "loss": 0.9761, + "step": 7636 + }, + { + "epoch": 0.47883879867076307, + "grad_norm": 2.7121832370758057, + "learning_rate": 1.1169071587848968e-05, + "loss": 1.169, + "step": 7637 + }, + { + "epoch": 0.4789014985265534, + "grad_norm": 2.9332613945007324, + "learning_rate": 1.1167054724535635e-05, + "loss": 1.2739, + "step": 7638 + }, + { + "epoch": 0.4789641983823437, + "grad_norm": 2.9116735458374023, + "learning_rate": 1.1165037813092913e-05, + "loss": 1.0697, + "step": 7639 + }, + { + "epoch": 0.47902689823813405, + "grad_norm": 3.060894250869751, + "learning_rate": 1.1163020853603986e-05, + "loss": 1.1183, + "step": 7640 + }, + { + "epoch": 0.4790895980939244, + "grad_norm": 2.983771562576294, + "learning_rate": 1.1161003846152021e-05, + "loss": 1.1038, + "step": 7641 + }, + { + "epoch": 0.4791522979497147, + "grad_norm": 3.094872236251831, + "learning_rate": 1.115898679082021e-05, + "loss": 0.9593, + "step": 7642 + }, + { + "epoch": 0.479214997805505, + "grad_norm": 3.31032657623291, + "learning_rate": 1.1156969687691732e-05, + "loss": 1.0613, + "step": 7643 + }, + { + "epoch": 0.47927769766129535, + "grad_norm": 3.495145797729492, + "learning_rate": 1.1154952536849774e-05, + "loss": 0.9848, + "step": 7644 + }, + { + "epoch": 0.47934039751708574, + "grad_norm": 3.1906344890594482, + "learning_rate": 1.1152935338377525e-05, + "loss": 1.2098, + "step": 7645 + }, + { + "epoch": 0.47940309737287606, + "grad_norm": 3.138446092605591, + "learning_rate": 1.115091809235817e-05, + "loss": 1.3226, + "step": 7646 + }, + { + "epoch": 0.4794657972286664, + "grad_norm": 3.049813985824585, + "learning_rate": 1.1148900798874904e-05, + "loss": 0.9635, + "step": 7647 + }, + { + "epoch": 0.4795284970844567, + "grad_norm": 3.2758841514587402, + "learning_rate": 1.1146883458010918e-05, + "loss": 1.0143, + "step": 7648 + }, + { + "epoch": 0.47959119694024704, + "grad_norm": 2.9519617557525635, + "learning_rate": 1.1144866069849409e-05, + "loss": 1.2063, + "step": 7649 + }, + { + "epoch": 0.47965389679603737, + "grad_norm": 2.9831607341766357, + "learning_rate": 1.114284863447357e-05, + "loss": 1.133, + "step": 7650 + }, + { + "epoch": 0.4797165966518277, + "grad_norm": 3.3974804878234863, + "learning_rate": 1.114083115196661e-05, + "loss": 1.1455, + "step": 7651 + }, + { + "epoch": 0.479779296507618, + "grad_norm": 2.931187391281128, + "learning_rate": 1.1138813622411717e-05, + "loss": 1.1761, + "step": 7652 + }, + { + "epoch": 0.47984199636340835, + "grad_norm": 3.1562228202819824, + "learning_rate": 1.1136796045892102e-05, + "loss": 1.1518, + "step": 7653 + }, + { + "epoch": 0.4799046962191987, + "grad_norm": 3.0086424350738525, + "learning_rate": 1.1134778422490971e-05, + "loss": 1.2288, + "step": 7654 + }, + { + "epoch": 0.479967396074989, + "grad_norm": 3.0607619285583496, + "learning_rate": 1.1132760752291524e-05, + "loss": 1.1314, + "step": 7655 + }, + { + "epoch": 0.48003009593077933, + "grad_norm": 3.356386423110962, + "learning_rate": 1.113074303537698e-05, + "loss": 1.1534, + "step": 7656 + }, + { + "epoch": 0.4800927957865697, + "grad_norm": 3.1608593463897705, + "learning_rate": 1.1128725271830538e-05, + "loss": 1.0889, + "step": 7657 + }, + { + "epoch": 0.48015549564236004, + "grad_norm": 2.9501333236694336, + "learning_rate": 1.112670746173542e-05, + "loss": 1.3007, + "step": 7658 + }, + { + "epoch": 0.48021819549815037, + "grad_norm": 3.0779128074645996, + "learning_rate": 1.1124689605174834e-05, + "loss": 1.1175, + "step": 7659 + }, + { + "epoch": 0.4802808953539407, + "grad_norm": 2.638820171356201, + "learning_rate": 1.1122671702232004e-05, + "loss": 1.145, + "step": 7660 + }, + { + "epoch": 0.480343595209731, + "grad_norm": 3.049434185028076, + "learning_rate": 1.112065375299014e-05, + "loss": 1.0746, + "step": 7661 + }, + { + "epoch": 0.48040629506552135, + "grad_norm": 3.0974323749542236, + "learning_rate": 1.111863575753247e-05, + "loss": 1.2505, + "step": 7662 + }, + { + "epoch": 0.4804689949213117, + "grad_norm": 3.2038686275482178, + "learning_rate": 1.1116617715942208e-05, + "loss": 1.0596, + "step": 7663 + }, + { + "epoch": 0.480531694777102, + "grad_norm": 3.3167266845703125, + "learning_rate": 1.1114599628302585e-05, + "loss": 0.9947, + "step": 7664 + }, + { + "epoch": 0.48059439463289233, + "grad_norm": 3.1553707122802734, + "learning_rate": 1.1112581494696825e-05, + "loss": 1.0756, + "step": 7665 + }, + { + "epoch": 0.48065709448868266, + "grad_norm": 2.965857744216919, + "learning_rate": 1.1110563315208157e-05, + "loss": 1.1027, + "step": 7666 + }, + { + "epoch": 0.480719794344473, + "grad_norm": 2.827388286590576, + "learning_rate": 1.1108545089919807e-05, + "loss": 1.0661, + "step": 7667 + }, + { + "epoch": 0.48078249420026337, + "grad_norm": 2.7848522663116455, + "learning_rate": 1.1106526818915008e-05, + "loss": 1.1551, + "step": 7668 + }, + { + "epoch": 0.4808451940560537, + "grad_norm": 3.0728354454040527, + "learning_rate": 1.1104508502276999e-05, + "loss": 1.1018, + "step": 7669 + }, + { + "epoch": 0.480907893911844, + "grad_norm": 3.6333889961242676, + "learning_rate": 1.1102490140089009e-05, + "loss": 1.1212, + "step": 7670 + }, + { + "epoch": 0.48097059376763435, + "grad_norm": 3.4929347038269043, + "learning_rate": 1.1100471732434277e-05, + "loss": 1.1961, + "step": 7671 + }, + { + "epoch": 0.4810332936234247, + "grad_norm": 2.9079384803771973, + "learning_rate": 1.1098453279396042e-05, + "loss": 1.0661, + "step": 7672 + }, + { + "epoch": 0.481095993479215, + "grad_norm": 3.245511770248413, + "learning_rate": 1.1096434781057545e-05, + "loss": 1.0518, + "step": 7673 + }, + { + "epoch": 0.4811586933350053, + "grad_norm": 3.0891687870025635, + "learning_rate": 1.1094416237502032e-05, + "loss": 1.1241, + "step": 7674 + }, + { + "epoch": 0.48122139319079565, + "grad_norm": 2.893383741378784, + "learning_rate": 1.1092397648812746e-05, + "loss": 1.0781, + "step": 7675 + }, + { + "epoch": 0.481284093046586, + "grad_norm": 2.918168067932129, + "learning_rate": 1.1090379015072932e-05, + "loss": 1.1654, + "step": 7676 + }, + { + "epoch": 0.4813467929023763, + "grad_norm": 2.9249794483184814, + "learning_rate": 1.1088360336365837e-05, + "loss": 0.9934, + "step": 7677 + }, + { + "epoch": 0.48140949275816663, + "grad_norm": 2.815960168838501, + "learning_rate": 1.108634161277472e-05, + "loss": 1.1586, + "step": 7678 + }, + { + "epoch": 0.48147219261395696, + "grad_norm": 3.219503164291382, + "learning_rate": 1.1084322844382822e-05, + "loss": 1.087, + "step": 7679 + }, + { + "epoch": 0.48153489246974734, + "grad_norm": 3.3270413875579834, + "learning_rate": 1.1082304031273408e-05, + "loss": 1.1091, + "step": 7680 + }, + { + "epoch": 0.48159759232553767, + "grad_norm": 2.922849178314209, + "learning_rate": 1.1080285173529724e-05, + "loss": 1.1933, + "step": 7681 + }, + { + "epoch": 0.481660292181328, + "grad_norm": 3.4985764026641846, + "learning_rate": 1.1078266271235034e-05, + "loss": 1.118, + "step": 7682 + }, + { + "epoch": 0.4817229920371183, + "grad_norm": 3.436943531036377, + "learning_rate": 1.1076247324472595e-05, + "loss": 1.1416, + "step": 7683 + }, + { + "epoch": 0.48178569189290865, + "grad_norm": 3.1191112995147705, + "learning_rate": 1.1074228333325669e-05, + "loss": 1.1838, + "step": 7684 + }, + { + "epoch": 0.481848391748699, + "grad_norm": 3.1494479179382324, + "learning_rate": 1.1072209297877523e-05, + "loss": 1.1637, + "step": 7685 + }, + { + "epoch": 0.4819110916044893, + "grad_norm": 3.507256031036377, + "learning_rate": 1.1070190218211414e-05, + "loss": 1.2024, + "step": 7686 + }, + { + "epoch": 0.48197379146027963, + "grad_norm": 2.90341854095459, + "learning_rate": 1.1068171094410618e-05, + "loss": 1.2981, + "step": 7687 + }, + { + "epoch": 0.48203649131606996, + "grad_norm": 2.9257071018218994, + "learning_rate": 1.1066151926558397e-05, + "loss": 1.0402, + "step": 7688 + }, + { + "epoch": 0.4820991911718603, + "grad_norm": 3.0075876712799072, + "learning_rate": 1.1064132714738024e-05, + "loss": 1.1652, + "step": 7689 + }, + { + "epoch": 0.4821618910276506, + "grad_norm": 3.2593588829040527, + "learning_rate": 1.1062113459032773e-05, + "loss": 1.1197, + "step": 7690 + }, + { + "epoch": 0.482224590883441, + "grad_norm": 2.945937395095825, + "learning_rate": 1.1060094159525916e-05, + "loss": 1.1951, + "step": 7691 + }, + { + "epoch": 0.4822872907392313, + "grad_norm": 3.195600748062134, + "learning_rate": 1.1058074816300727e-05, + "loss": 0.9912, + "step": 7692 + }, + { + "epoch": 0.48234999059502165, + "grad_norm": 3.2257726192474365, + "learning_rate": 1.105605542944049e-05, + "loss": 1.0822, + "step": 7693 + }, + { + "epoch": 0.482412690450812, + "grad_norm": 3.4775800704956055, + "learning_rate": 1.1054035999028478e-05, + "loss": 1.3311, + "step": 7694 + }, + { + "epoch": 0.4824753903066023, + "grad_norm": 2.9956061840057373, + "learning_rate": 1.1052016525147977e-05, + "loss": 0.947, + "step": 7695 + }, + { + "epoch": 0.48253809016239263, + "grad_norm": 3.3036341667175293, + "learning_rate": 1.104999700788227e-05, + "loss": 1.0368, + "step": 7696 + }, + { + "epoch": 0.48260079001818296, + "grad_norm": 3.2514190673828125, + "learning_rate": 1.1047977447314637e-05, + "loss": 1.1431, + "step": 7697 + }, + { + "epoch": 0.4826634898739733, + "grad_norm": 3.0450987815856934, + "learning_rate": 1.1045957843528372e-05, + "loss": 0.9546, + "step": 7698 + }, + { + "epoch": 0.4827261897297636, + "grad_norm": 2.9394214153289795, + "learning_rate": 1.1043938196606755e-05, + "loss": 1.1243, + "step": 7699 + }, + { + "epoch": 0.48278888958555394, + "grad_norm": 3.0056259632110596, + "learning_rate": 1.1041918506633087e-05, + "loss": 1.066, + "step": 7700 + }, + { + "epoch": 0.48285158944134426, + "grad_norm": 3.2816646099090576, + "learning_rate": 1.103989877369065e-05, + "loss": 1.2024, + "step": 7701 + }, + { + "epoch": 0.4829142892971346, + "grad_norm": 2.8667361736297607, + "learning_rate": 1.1037878997862745e-05, + "loss": 1.0242, + "step": 7702 + }, + { + "epoch": 0.482976989152925, + "grad_norm": 3.19527268409729, + "learning_rate": 1.1035859179232661e-05, + "loss": 1.0545, + "step": 7703 + }, + { + "epoch": 0.4830396890087153, + "grad_norm": 3.178840160369873, + "learning_rate": 1.10338393178837e-05, + "loss": 1.2905, + "step": 7704 + }, + { + "epoch": 0.4831023888645056, + "grad_norm": 3.2206923961639404, + "learning_rate": 1.1031819413899165e-05, + "loss": 1.2258, + "step": 7705 + }, + { + "epoch": 0.48316508872029595, + "grad_norm": 2.9899542331695557, + "learning_rate": 1.1029799467362348e-05, + "loss": 1.0356, + "step": 7706 + }, + { + "epoch": 0.4832277885760863, + "grad_norm": 3.2268142700195312, + "learning_rate": 1.102777947835656e-05, + "loss": 1.0164, + "step": 7707 + }, + { + "epoch": 0.4832904884318766, + "grad_norm": 3.4630322456359863, + "learning_rate": 1.1025759446965097e-05, + "loss": 1.0253, + "step": 7708 + }, + { + "epoch": 0.48335318828766693, + "grad_norm": 3.297680616378784, + "learning_rate": 1.1023739373271271e-05, + "loss": 1.1722, + "step": 7709 + }, + { + "epoch": 0.48341588814345726, + "grad_norm": 2.94793963432312, + "learning_rate": 1.102171925735839e-05, + "loss": 1.1827, + "step": 7710 + }, + { + "epoch": 0.4834785879992476, + "grad_norm": 3.101896047592163, + "learning_rate": 1.1019699099309762e-05, + "loss": 1.2642, + "step": 7711 + }, + { + "epoch": 0.4835412878550379, + "grad_norm": 3.513653516769409, + "learning_rate": 1.1017678899208697e-05, + "loss": 1.1349, + "step": 7712 + }, + { + "epoch": 0.48360398771082824, + "grad_norm": 3.3255093097686768, + "learning_rate": 1.101565865713851e-05, + "loss": 1.0069, + "step": 7713 + }, + { + "epoch": 0.48366668756661857, + "grad_norm": 3.1380558013916016, + "learning_rate": 1.1013638373182516e-05, + "loss": 1.1234, + "step": 7714 + }, + { + "epoch": 0.48372938742240895, + "grad_norm": 3.1394331455230713, + "learning_rate": 1.101161804742403e-05, + "loss": 1.0533, + "step": 7715 + }, + { + "epoch": 0.4837920872781993, + "grad_norm": 3.3694849014282227, + "learning_rate": 1.1009597679946373e-05, + "loss": 1.0374, + "step": 7716 + }, + { + "epoch": 0.4838547871339896, + "grad_norm": 3.049778461456299, + "learning_rate": 1.1007577270832864e-05, + "loss": 1.0775, + "step": 7717 + }, + { + "epoch": 0.48391748698977993, + "grad_norm": 2.8695077896118164, + "learning_rate": 1.1005556820166825e-05, + "loss": 1.0991, + "step": 7718 + }, + { + "epoch": 0.48398018684557026, + "grad_norm": 2.927640914916992, + "learning_rate": 1.1003536328031577e-05, + "loss": 1.1183, + "step": 7719 + }, + { + "epoch": 0.4840428867013606, + "grad_norm": 3.2335164546966553, + "learning_rate": 1.1001515794510449e-05, + "loss": 0.9451, + "step": 7720 + }, + { + "epoch": 0.4841055865571509, + "grad_norm": 2.8877511024475098, + "learning_rate": 1.0999495219686762e-05, + "loss": 1.1855, + "step": 7721 + }, + { + "epoch": 0.48416828641294124, + "grad_norm": 3.0006961822509766, + "learning_rate": 1.0997474603643852e-05, + "loss": 1.1018, + "step": 7722 + }, + { + "epoch": 0.48423098626873157, + "grad_norm": 2.854189157485962, + "learning_rate": 1.0995453946465045e-05, + "loss": 1.1295, + "step": 7723 + }, + { + "epoch": 0.4842936861245219, + "grad_norm": 3.4039909839630127, + "learning_rate": 1.0993433248233672e-05, + "loss": 1.1223, + "step": 7724 + }, + { + "epoch": 0.4843563859803122, + "grad_norm": 2.919710636138916, + "learning_rate": 1.0991412509033073e-05, + "loss": 1.1442, + "step": 7725 + }, + { + "epoch": 0.4844190858361026, + "grad_norm": 3.4688336849212646, + "learning_rate": 1.0989391728946575e-05, + "loss": 1.0904, + "step": 7726 + }, + { + "epoch": 0.48448178569189293, + "grad_norm": 3.1622376441955566, + "learning_rate": 1.098737090805752e-05, + "loss": 1.1446, + "step": 7727 + }, + { + "epoch": 0.48454448554768326, + "grad_norm": 3.15564227104187, + "learning_rate": 1.0985350046449244e-05, + "loss": 1.0665, + "step": 7728 + }, + { + "epoch": 0.4846071854034736, + "grad_norm": 3.0488474369049072, + "learning_rate": 1.0983329144205092e-05, + "loss": 1.2716, + "step": 7729 + }, + { + "epoch": 0.4846698852592639, + "grad_norm": 3.0741732120513916, + "learning_rate": 1.09813082014084e-05, + "loss": 1.0716, + "step": 7730 + }, + { + "epoch": 0.48473258511505424, + "grad_norm": 2.997169017791748, + "learning_rate": 1.0979287218142518e-05, + "loss": 1.1767, + "step": 7731 + }, + { + "epoch": 0.48479528497084456, + "grad_norm": 2.937833070755005, + "learning_rate": 1.0977266194490785e-05, + "loss": 1.1268, + "step": 7732 + }, + { + "epoch": 0.4848579848266349, + "grad_norm": 2.932361602783203, + "learning_rate": 1.0975245130536555e-05, + "loss": 1.0167, + "step": 7733 + }, + { + "epoch": 0.4849206846824252, + "grad_norm": 3.537074565887451, + "learning_rate": 1.0973224026363171e-05, + "loss": 1.1025, + "step": 7734 + }, + { + "epoch": 0.48498338453821555, + "grad_norm": 3.138270854949951, + "learning_rate": 1.0971202882053987e-05, + "loss": 1.1512, + "step": 7735 + }, + { + "epoch": 0.48504608439400587, + "grad_norm": 3.344392776489258, + "learning_rate": 1.0969181697692353e-05, + "loss": 1.1921, + "step": 7736 + }, + { + "epoch": 0.4851087842497962, + "grad_norm": 3.396237373352051, + "learning_rate": 1.0967160473361623e-05, + "loss": 1.0263, + "step": 7737 + }, + { + "epoch": 0.4851714841055866, + "grad_norm": 3.2069780826568604, + "learning_rate": 1.0965139209145153e-05, + "loss": 1.0808, + "step": 7738 + }, + { + "epoch": 0.4852341839613769, + "grad_norm": 3.5083274841308594, + "learning_rate": 1.0963117905126299e-05, + "loss": 1.0509, + "step": 7739 + }, + { + "epoch": 0.48529688381716723, + "grad_norm": 3.0569493770599365, + "learning_rate": 1.0961096561388423e-05, + "loss": 1.2449, + "step": 7740 + }, + { + "epoch": 0.48535958367295756, + "grad_norm": 3.2578377723693848, + "learning_rate": 1.095907517801488e-05, + "loss": 1.1936, + "step": 7741 + }, + { + "epoch": 0.4854222835287479, + "grad_norm": 2.913372755050659, + "learning_rate": 1.0957053755089037e-05, + "loss": 1.2128, + "step": 7742 + }, + { + "epoch": 0.4854849833845382, + "grad_norm": 3.013916492462158, + "learning_rate": 1.0955032292694254e-05, + "loss": 1.1548, + "step": 7743 + }, + { + "epoch": 0.48554768324032854, + "grad_norm": 3.0558745861053467, + "learning_rate": 1.09530107909139e-05, + "loss": 1.0788, + "step": 7744 + }, + { + "epoch": 0.48561038309611887, + "grad_norm": 3.189410924911499, + "learning_rate": 1.0950989249831337e-05, + "loss": 0.9374, + "step": 7745 + }, + { + "epoch": 0.4856730829519092, + "grad_norm": 3.010740280151367, + "learning_rate": 1.0948967669529935e-05, + "loss": 1.0359, + "step": 7746 + }, + { + "epoch": 0.4857357828076995, + "grad_norm": 3.0334672927856445, + "learning_rate": 1.0946946050093069e-05, + "loss": 1.1364, + "step": 7747 + }, + { + "epoch": 0.48579848266348985, + "grad_norm": 2.925107479095459, + "learning_rate": 1.0944924391604102e-05, + "loss": 1.0045, + "step": 7748 + }, + { + "epoch": 0.48586118251928023, + "grad_norm": 3.258021831512451, + "learning_rate": 1.0942902694146413e-05, + "loss": 1.099, + "step": 7749 + }, + { + "epoch": 0.48592388237507056, + "grad_norm": 3.3099207878112793, + "learning_rate": 1.0940880957803377e-05, + "loss": 0.9066, + "step": 7750 + }, + { + "epoch": 0.4859865822308609, + "grad_norm": 2.8566243648529053, + "learning_rate": 1.0938859182658368e-05, + "loss": 1.0224, + "step": 7751 + }, + { + "epoch": 0.4860492820866512, + "grad_norm": 3.227254867553711, + "learning_rate": 1.0936837368794765e-05, + "loss": 1.1556, + "step": 7752 + }, + { + "epoch": 0.48611198194244154, + "grad_norm": 3.2489609718322754, + "learning_rate": 1.0934815516295951e-05, + "loss": 1.0513, + "step": 7753 + }, + { + "epoch": 0.48617468179823187, + "grad_norm": 3.061716318130493, + "learning_rate": 1.0932793625245302e-05, + "loss": 1.023, + "step": 7754 + }, + { + "epoch": 0.4862373816540222, + "grad_norm": 2.9704601764678955, + "learning_rate": 1.0930771695726201e-05, + "loss": 1.2113, + "step": 7755 + }, + { + "epoch": 0.4863000815098125, + "grad_norm": 3.0043985843658447, + "learning_rate": 1.0928749727822038e-05, + "loss": 1.2099, + "step": 7756 + }, + { + "epoch": 0.48636278136560285, + "grad_norm": 3.2751500606536865, + "learning_rate": 1.0926727721616193e-05, + "loss": 1.04, + "step": 7757 + }, + { + "epoch": 0.4864254812213932, + "grad_norm": 3.1282527446746826, + "learning_rate": 1.092470567719206e-05, + "loss": 1.0553, + "step": 7758 + }, + { + "epoch": 0.4864881810771835, + "grad_norm": 2.8777034282684326, + "learning_rate": 1.092268359463302e-05, + "loss": 1.1162, + "step": 7759 + }, + { + "epoch": 0.48655088093297383, + "grad_norm": 3.1067192554473877, + "learning_rate": 1.0920661474022474e-05, + "loss": 1.2054, + "step": 7760 + }, + { + "epoch": 0.4866135807887642, + "grad_norm": 3.0911030769348145, + "learning_rate": 1.0918639315443805e-05, + "loss": 1.087, + "step": 7761 + }, + { + "epoch": 0.48667628064455454, + "grad_norm": 3.2538437843322754, + "learning_rate": 1.091661711898041e-05, + "loss": 1.2109, + "step": 7762 + }, + { + "epoch": 0.48673898050034486, + "grad_norm": 2.9076781272888184, + "learning_rate": 1.0914594884715685e-05, + "loss": 1.2242, + "step": 7763 + }, + { + "epoch": 0.4868016803561352, + "grad_norm": 3.15541410446167, + "learning_rate": 1.0912572612733027e-05, + "loss": 1.2219, + "step": 7764 + }, + { + "epoch": 0.4868643802119255, + "grad_norm": 2.780320167541504, + "learning_rate": 1.0910550303115836e-05, + "loss": 0.9638, + "step": 7765 + }, + { + "epoch": 0.48692708006771585, + "grad_norm": 2.881753444671631, + "learning_rate": 1.090852795594751e-05, + "loss": 1.1125, + "step": 7766 + }, + { + "epoch": 0.48698977992350617, + "grad_norm": 3.1856448650360107, + "learning_rate": 1.0906505571311453e-05, + "loss": 1.2404, + "step": 7767 + }, + { + "epoch": 0.4870524797792965, + "grad_norm": 2.9310221672058105, + "learning_rate": 1.0904483149291064e-05, + "loss": 1.1589, + "step": 7768 + }, + { + "epoch": 0.4871151796350868, + "grad_norm": 3.053786277770996, + "learning_rate": 1.0902460689969752e-05, + "loss": 0.9752, + "step": 7769 + }, + { + "epoch": 0.48717787949087715, + "grad_norm": 3.2866036891937256, + "learning_rate": 1.090043819343092e-05, + "loss": 1.2033, + "step": 7770 + }, + { + "epoch": 0.4872405793466675, + "grad_norm": 3.060844659805298, + "learning_rate": 1.089841565975798e-05, + "loss": 0.9737, + "step": 7771 + }, + { + "epoch": 0.48730327920245786, + "grad_norm": 3.1528003215789795, + "learning_rate": 1.0896393089034336e-05, + "loss": 1.1012, + "step": 7772 + }, + { + "epoch": 0.4873659790582482, + "grad_norm": 2.971306324005127, + "learning_rate": 1.0894370481343406e-05, + "loss": 1.1014, + "step": 7773 + }, + { + "epoch": 0.4874286789140385, + "grad_norm": 3.204011917114258, + "learning_rate": 1.0892347836768594e-05, + "loss": 1.0004, + "step": 7774 + }, + { + "epoch": 0.48749137876982884, + "grad_norm": 2.8119571208953857, + "learning_rate": 1.089032515539332e-05, + "loss": 1.058, + "step": 7775 + }, + { + "epoch": 0.48755407862561917, + "grad_norm": 3.21614408493042, + "learning_rate": 1.0888302437300999e-05, + "loss": 1.0053, + "step": 7776 + }, + { + "epoch": 0.4876167784814095, + "grad_norm": 3.373741388320923, + "learning_rate": 1.0886279682575045e-05, + "loss": 1.2699, + "step": 7777 + }, + { + "epoch": 0.4876794783371998, + "grad_norm": 2.8911385536193848, + "learning_rate": 1.0884256891298879e-05, + "loss": 1.2429, + "step": 7778 + }, + { + "epoch": 0.48774217819299015, + "grad_norm": 3.2246735095977783, + "learning_rate": 1.0882234063555918e-05, + "loss": 1.1095, + "step": 7779 + }, + { + "epoch": 0.4878048780487805, + "grad_norm": 3.1708004474639893, + "learning_rate": 1.088021119942959e-05, + "loss": 1.3787, + "step": 7780 + }, + { + "epoch": 0.4878675779045708, + "grad_norm": 2.8348097801208496, + "learning_rate": 1.0878188299003309e-05, + "loss": 1.1004, + "step": 7781 + }, + { + "epoch": 0.48793027776036113, + "grad_norm": 2.941654920578003, + "learning_rate": 1.0876165362360509e-05, + "loss": 1.0721, + "step": 7782 + }, + { + "epoch": 0.48799297761615146, + "grad_norm": 3.1448097229003906, + "learning_rate": 1.0874142389584608e-05, + "loss": 1.09, + "step": 7783 + }, + { + "epoch": 0.48805567747194184, + "grad_norm": 3.2803916931152344, + "learning_rate": 1.0872119380759039e-05, + "loss": 0.9916, + "step": 7784 + }, + { + "epoch": 0.48811837732773217, + "grad_norm": 2.81382417678833, + "learning_rate": 1.0870096335967226e-05, + "loss": 1.1675, + "step": 7785 + }, + { + "epoch": 0.4881810771835225, + "grad_norm": 2.9191460609436035, + "learning_rate": 1.0868073255292601e-05, + "loss": 1.0773, + "step": 7786 + }, + { + "epoch": 0.4882437770393128, + "grad_norm": 3.1125998497009277, + "learning_rate": 1.0866050138818602e-05, + "loss": 1.134, + "step": 7787 + }, + { + "epoch": 0.48830647689510315, + "grad_norm": 3.1240172386169434, + "learning_rate": 1.0864026986628653e-05, + "loss": 1.0475, + "step": 7788 + }, + { + "epoch": 0.4883691767508935, + "grad_norm": 3.2224669456481934, + "learning_rate": 1.0862003798806195e-05, + "loss": 1.1116, + "step": 7789 + }, + { + "epoch": 0.4884318766066838, + "grad_norm": 2.8994300365448, + "learning_rate": 1.0859980575434663e-05, + "loss": 1.0837, + "step": 7790 + }, + { + "epoch": 0.48849457646247413, + "grad_norm": 2.7823944091796875, + "learning_rate": 1.0857957316597496e-05, + "loss": 1.0871, + "step": 7791 + }, + { + "epoch": 0.48855727631826446, + "grad_norm": 2.917055368423462, + "learning_rate": 1.085593402237813e-05, + "loss": 1.0846, + "step": 7792 + }, + { + "epoch": 0.4886199761740548, + "grad_norm": 3.142906427383423, + "learning_rate": 1.0853910692860008e-05, + "loss": 1.0197, + "step": 7793 + }, + { + "epoch": 0.4886826760298451, + "grad_norm": 3.1320064067840576, + "learning_rate": 1.0851887328126569e-05, + "loss": 1.0813, + "step": 7794 + }, + { + "epoch": 0.48874537588563544, + "grad_norm": 2.790195941925049, + "learning_rate": 1.0849863928261264e-05, + "loss": 1.1004, + "step": 7795 + }, + { + "epoch": 0.4888080757414258, + "grad_norm": 3.0192131996154785, + "learning_rate": 1.084784049334753e-05, + "loss": 1.1186, + "step": 7796 + }, + { + "epoch": 0.48887077559721615, + "grad_norm": 2.7859537601470947, + "learning_rate": 1.0845817023468817e-05, + "loss": 1.0838, + "step": 7797 + }, + { + "epoch": 0.48893347545300647, + "grad_norm": 3.111041784286499, + "learning_rate": 1.0843793518708576e-05, + "loss": 1.1818, + "step": 7798 + }, + { + "epoch": 0.4889961753087968, + "grad_norm": 3.047562599182129, + "learning_rate": 1.084176997915025e-05, + "loss": 1.0055, + "step": 7799 + }, + { + "epoch": 0.4890588751645871, + "grad_norm": 3.1269967555999756, + "learning_rate": 1.0839746404877297e-05, + "loss": 1.0774, + "step": 7800 + }, + { + "epoch": 0.48912157502037745, + "grad_norm": 3.0557405948638916, + "learning_rate": 1.0837722795973162e-05, + "loss": 1.1343, + "step": 7801 + }, + { + "epoch": 0.4891842748761678, + "grad_norm": 3.2692675590515137, + "learning_rate": 1.0835699152521304e-05, + "loss": 1.0942, + "step": 7802 + }, + { + "epoch": 0.4892469747319581, + "grad_norm": 3.3101654052734375, + "learning_rate": 1.0833675474605177e-05, + "loss": 1.1743, + "step": 7803 + }, + { + "epoch": 0.48930967458774843, + "grad_norm": 3.1600427627563477, + "learning_rate": 1.0831651762308237e-05, + "loss": 1.0781, + "step": 7804 + }, + { + "epoch": 0.48937237444353876, + "grad_norm": 3.679231882095337, + "learning_rate": 1.082962801571394e-05, + "loss": 0.9861, + "step": 7805 + }, + { + "epoch": 0.4894350742993291, + "grad_norm": 3.121166944503784, + "learning_rate": 1.0827604234905749e-05, + "loss": 1.0716, + "step": 7806 + }, + { + "epoch": 0.48949777415511947, + "grad_norm": 3.1355836391448975, + "learning_rate": 1.0825580419967123e-05, + "loss": 1.2311, + "step": 7807 + }, + { + "epoch": 0.4895604740109098, + "grad_norm": 3.140127658843994, + "learning_rate": 1.0823556570981524e-05, + "loss": 1.0838, + "step": 7808 + }, + { + "epoch": 0.4896231738667001, + "grad_norm": 3.0411465167999268, + "learning_rate": 1.0821532688032417e-05, + "loss": 1.0047, + "step": 7809 + }, + { + "epoch": 0.48968587372249045, + "grad_norm": 3.2531938552856445, + "learning_rate": 1.0819508771203263e-05, + "loss": 0.981, + "step": 7810 + }, + { + "epoch": 0.4897485735782808, + "grad_norm": 3.6457595825195312, + "learning_rate": 1.0817484820577537e-05, + "loss": 1.2914, + "step": 7811 + }, + { + "epoch": 0.4898112734340711, + "grad_norm": 3.102670431137085, + "learning_rate": 1.0815460836238695e-05, + "loss": 1.1244, + "step": 7812 + }, + { + "epoch": 0.48987397328986143, + "grad_norm": 3.3179450035095215, + "learning_rate": 1.081343681827022e-05, + "loss": 1.041, + "step": 7813 + }, + { + "epoch": 0.48993667314565176, + "grad_norm": 3.1325247287750244, + "learning_rate": 1.081141276675557e-05, + "loss": 1.0099, + "step": 7814 + }, + { + "epoch": 0.4899993730014421, + "grad_norm": 2.9010512828826904, + "learning_rate": 1.0809388681778223e-05, + "loss": 1.0606, + "step": 7815 + }, + { + "epoch": 0.4900620728572324, + "grad_norm": 2.7071211338043213, + "learning_rate": 1.080736456342165e-05, + "loss": 1.2113, + "step": 7816 + }, + { + "epoch": 0.49012477271302274, + "grad_norm": 3.212958574295044, + "learning_rate": 1.0805340411769327e-05, + "loss": 1.0995, + "step": 7817 + }, + { + "epoch": 0.49018747256881307, + "grad_norm": 2.836733818054199, + "learning_rate": 1.0803316226904733e-05, + "loss": 1.2228, + "step": 7818 + }, + { + "epoch": 0.49025017242460345, + "grad_norm": 2.8573858737945557, + "learning_rate": 1.0801292008911341e-05, + "loss": 1.1614, + "step": 7819 + }, + { + "epoch": 0.4903128722803938, + "grad_norm": 2.9323954582214355, + "learning_rate": 1.0799267757872633e-05, + "loss": 1.2135, + "step": 7820 + }, + { + "epoch": 0.4903755721361841, + "grad_norm": 2.8875250816345215, + "learning_rate": 1.0797243473872086e-05, + "loss": 1.357, + "step": 7821 + }, + { + "epoch": 0.49043827199197443, + "grad_norm": 3.6355032920837402, + "learning_rate": 1.0795219156993186e-05, + "loss": 1.1728, + "step": 7822 + }, + { + "epoch": 0.49050097184776476, + "grad_norm": 2.664679527282715, + "learning_rate": 1.079319480731941e-05, + "loss": 1.1105, + "step": 7823 + }, + { + "epoch": 0.4905636717035551, + "grad_norm": 3.170039176940918, + "learning_rate": 1.0791170424934248e-05, + "loss": 1.1056, + "step": 7824 + }, + { + "epoch": 0.4906263715593454, + "grad_norm": 3.4819281101226807, + "learning_rate": 1.078914600992118e-05, + "loss": 1.1717, + "step": 7825 + }, + { + "epoch": 0.49068907141513574, + "grad_norm": 3.022080659866333, + "learning_rate": 1.0787121562363701e-05, + "loss": 1.0124, + "step": 7826 + }, + { + "epoch": 0.49075177127092606, + "grad_norm": 3.3261520862579346, + "learning_rate": 1.078509708234529e-05, + "loss": 1.0616, + "step": 7827 + }, + { + "epoch": 0.4908144711267164, + "grad_norm": 3.280787467956543, + "learning_rate": 1.0783072569949443e-05, + "loss": 1.0607, + "step": 7828 + }, + { + "epoch": 0.4908771709825067, + "grad_norm": 3.324333429336548, + "learning_rate": 1.0781048025259648e-05, + "loss": 1.2608, + "step": 7829 + }, + { + "epoch": 0.4909398708382971, + "grad_norm": 3.2859113216400146, + "learning_rate": 1.0779023448359399e-05, + "loss": 0.9882, + "step": 7830 + }, + { + "epoch": 0.4910025706940874, + "grad_norm": 3.0452587604522705, + "learning_rate": 1.077699883933219e-05, + "loss": 1.0883, + "step": 7831 + }, + { + "epoch": 0.49106527054987775, + "grad_norm": 2.7149927616119385, + "learning_rate": 1.0774974198261512e-05, + "loss": 1.0645, + "step": 7832 + }, + { + "epoch": 0.4911279704056681, + "grad_norm": 3.4188859462738037, + "learning_rate": 1.0772949525230867e-05, + "loss": 1.1978, + "step": 7833 + }, + { + "epoch": 0.4911906702614584, + "grad_norm": 3.3292765617370605, + "learning_rate": 1.0770924820323746e-05, + "loss": 1.1528, + "step": 7834 + }, + { + "epoch": 0.49125337011724873, + "grad_norm": 2.92388916015625, + "learning_rate": 1.0768900083623658e-05, + "loss": 1.0309, + "step": 7835 + }, + { + "epoch": 0.49131606997303906, + "grad_norm": 3.47278094291687, + "learning_rate": 1.076687531521409e-05, + "loss": 1.1866, + "step": 7836 + }, + { + "epoch": 0.4913787698288294, + "grad_norm": 3.3729782104492188, + "learning_rate": 1.0764850515178555e-05, + "loss": 1.1245, + "step": 7837 + }, + { + "epoch": 0.4914414696846197, + "grad_norm": 3.358417510986328, + "learning_rate": 1.0762825683600552e-05, + "loss": 1.1958, + "step": 7838 + }, + { + "epoch": 0.49150416954041004, + "grad_norm": 3.2158193588256836, + "learning_rate": 1.0760800820563581e-05, + "loss": 1.1982, + "step": 7839 + }, + { + "epoch": 0.49156686939620037, + "grad_norm": 2.6393368244171143, + "learning_rate": 1.0758775926151155e-05, + "loss": 1.251, + "step": 7840 + }, + { + "epoch": 0.4916295692519907, + "grad_norm": 2.905656099319458, + "learning_rate": 1.0756751000446775e-05, + "loss": 1.2007, + "step": 7841 + }, + { + "epoch": 0.4916922691077811, + "grad_norm": 3.107475996017456, + "learning_rate": 1.075472604353395e-05, + "loss": 1.1481, + "step": 7842 + }, + { + "epoch": 0.4917549689635714, + "grad_norm": 3.103966474533081, + "learning_rate": 1.0752701055496188e-05, + "loss": 1.0652, + "step": 7843 + }, + { + "epoch": 0.49181766881936173, + "grad_norm": 2.969116449356079, + "learning_rate": 1.0750676036417007e-05, + "loss": 1.0353, + "step": 7844 + }, + { + "epoch": 0.49188036867515206, + "grad_norm": 3.2381176948547363, + "learning_rate": 1.0748650986379909e-05, + "loss": 1.1362, + "step": 7845 + }, + { + "epoch": 0.4919430685309424, + "grad_norm": 3.098466157913208, + "learning_rate": 1.0746625905468413e-05, + "loss": 1.094, + "step": 7846 + }, + { + "epoch": 0.4920057683867327, + "grad_norm": 2.9506771564483643, + "learning_rate": 1.074460079376603e-05, + "loss": 1.0801, + "step": 7847 + }, + { + "epoch": 0.49206846824252304, + "grad_norm": 2.963610887527466, + "learning_rate": 1.0742575651356279e-05, + "loss": 1.2314, + "step": 7848 + }, + { + "epoch": 0.49213116809831337, + "grad_norm": 2.9132068157196045, + "learning_rate": 1.074055047832268e-05, + "loss": 1.0047, + "step": 7849 + }, + { + "epoch": 0.4921938679541037, + "grad_norm": 2.9057419300079346, + "learning_rate": 1.073852527474874e-05, + "loss": 1.1883, + "step": 7850 + }, + { + "epoch": 0.492256567809894, + "grad_norm": 3.0495009422302246, + "learning_rate": 1.0736500040717991e-05, + "loss": 1.1205, + "step": 7851 + }, + { + "epoch": 0.49231926766568435, + "grad_norm": 3.2519516944885254, + "learning_rate": 1.0734474776313944e-05, + "loss": 1.1504, + "step": 7852 + }, + { + "epoch": 0.49238196752147473, + "grad_norm": 2.9269356727600098, + "learning_rate": 1.0732449481620127e-05, + "loss": 1.1062, + "step": 7853 + }, + { + "epoch": 0.49244466737726506, + "grad_norm": 3.1181693077087402, + "learning_rate": 1.073042415672006e-05, + "loss": 1.1641, + "step": 7854 + }, + { + "epoch": 0.4925073672330554, + "grad_norm": 2.8669466972351074, + "learning_rate": 1.0728398801697272e-05, + "loss": 1.0963, + "step": 7855 + }, + { + "epoch": 0.4925700670888457, + "grad_norm": 2.9480724334716797, + "learning_rate": 1.0726373416635284e-05, + "loss": 0.971, + "step": 7856 + }, + { + "epoch": 0.49263276694463604, + "grad_norm": 2.6686253547668457, + "learning_rate": 1.0724348001617626e-05, + "loss": 1.1374, + "step": 7857 + }, + { + "epoch": 0.49269546680042636, + "grad_norm": 3.1856980323791504, + "learning_rate": 1.0722322556727821e-05, + "loss": 0.9432, + "step": 7858 + }, + { + "epoch": 0.4927581666562167, + "grad_norm": 2.7823994159698486, + "learning_rate": 1.0720297082049406e-05, + "loss": 1.0482, + "step": 7859 + }, + { + "epoch": 0.492820866512007, + "grad_norm": 2.7753686904907227, + "learning_rate": 1.0718271577665907e-05, + "loss": 1.1971, + "step": 7860 + }, + { + "epoch": 0.49288356636779734, + "grad_norm": 3.3997251987457275, + "learning_rate": 1.0716246043660855e-05, + "loss": 1.3547, + "step": 7861 + }, + { + "epoch": 0.49294626622358767, + "grad_norm": 3.057718515396118, + "learning_rate": 1.0714220480117785e-05, + "loss": 1.1898, + "step": 7862 + }, + { + "epoch": 0.493008966079378, + "grad_norm": 3.152099609375, + "learning_rate": 1.0712194887120232e-05, + "loss": 1.0378, + "step": 7863 + }, + { + "epoch": 0.4930716659351683, + "grad_norm": 3.045632839202881, + "learning_rate": 1.0710169264751733e-05, + "loss": 1.2885, + "step": 7864 + }, + { + "epoch": 0.4931343657909587, + "grad_norm": 2.7151782512664795, + "learning_rate": 1.0708143613095818e-05, + "loss": 1.1971, + "step": 7865 + }, + { + "epoch": 0.49319706564674903, + "grad_norm": 2.609997034072876, + "learning_rate": 1.0706117932236033e-05, + "loss": 1.2296, + "step": 7866 + }, + { + "epoch": 0.49325976550253936, + "grad_norm": 3.631758689880371, + "learning_rate": 1.0704092222255914e-05, + "loss": 1.0833, + "step": 7867 + }, + { + "epoch": 0.4933224653583297, + "grad_norm": 3.0807037353515625, + "learning_rate": 1.0702066483239e-05, + "loss": 1.0869, + "step": 7868 + }, + { + "epoch": 0.49338516521412, + "grad_norm": 2.942897319793701, + "learning_rate": 1.0700040715268831e-05, + "loss": 1.1237, + "step": 7869 + }, + { + "epoch": 0.49344786506991034, + "grad_norm": 2.9472129344940186, + "learning_rate": 1.0698014918428956e-05, + "loss": 1.1311, + "step": 7870 + }, + { + "epoch": 0.49351056492570067, + "grad_norm": 3.242145538330078, + "learning_rate": 1.0695989092802914e-05, + "loss": 1.2076, + "step": 7871 + }, + { + "epoch": 0.493573264781491, + "grad_norm": 3.3200201988220215, + "learning_rate": 1.0693963238474249e-05, + "loss": 0.8682, + "step": 7872 + }, + { + "epoch": 0.4936359646372813, + "grad_norm": 2.8752148151397705, + "learning_rate": 1.0691937355526514e-05, + "loss": 1.2027, + "step": 7873 + }, + { + "epoch": 0.49369866449307165, + "grad_norm": 3.3346309661865234, + "learning_rate": 1.0689911444043249e-05, + "loss": 0.9852, + "step": 7874 + }, + { + "epoch": 0.493761364348862, + "grad_norm": 2.908409595489502, + "learning_rate": 1.0687885504108008e-05, + "loss": 1.2616, + "step": 7875 + }, + { + "epoch": 0.4938240642046523, + "grad_norm": 3.2152223587036133, + "learning_rate": 1.0685859535804336e-05, + "loss": 1.2204, + "step": 7876 + }, + { + "epoch": 0.4938867640604427, + "grad_norm": 3.500920057296753, + "learning_rate": 1.0683833539215792e-05, + "loss": 1.138, + "step": 7877 + }, + { + "epoch": 0.493949463916233, + "grad_norm": 2.972515344619751, + "learning_rate": 1.0681807514425917e-05, + "loss": 1.1987, + "step": 7878 + }, + { + "epoch": 0.49401216377202334, + "grad_norm": 3.1774823665618896, + "learning_rate": 1.0679781461518275e-05, + "loss": 1.1186, + "step": 7879 + }, + { + "epoch": 0.49407486362781367, + "grad_norm": 3.2116384506225586, + "learning_rate": 1.0677755380576413e-05, + "loss": 1.1413, + "step": 7880 + }, + { + "epoch": 0.494137563483604, + "grad_norm": 3.0685973167419434, + "learning_rate": 1.0675729271683893e-05, + "loss": 1.1528, + "step": 7881 + }, + { + "epoch": 0.4942002633393943, + "grad_norm": 3.2574737071990967, + "learning_rate": 1.0673703134924263e-05, + "loss": 1.0441, + "step": 7882 + }, + { + "epoch": 0.49426296319518465, + "grad_norm": 2.80472993850708, + "learning_rate": 1.0671676970381088e-05, + "loss": 1.289, + "step": 7883 + }, + { + "epoch": 0.494325663050975, + "grad_norm": 2.9990906715393066, + "learning_rate": 1.0669650778137927e-05, + "loss": 1.1356, + "step": 7884 + }, + { + "epoch": 0.4943883629067653, + "grad_norm": 2.9582877159118652, + "learning_rate": 1.0667624558278338e-05, + "loss": 1.1733, + "step": 7885 + }, + { + "epoch": 0.4944510627625556, + "grad_norm": 3.034208059310913, + "learning_rate": 1.0665598310885886e-05, + "loss": 1.1474, + "step": 7886 + }, + { + "epoch": 0.49451376261834595, + "grad_norm": 3.2539162635803223, + "learning_rate": 1.0663572036044126e-05, + "loss": 1.0055, + "step": 7887 + }, + { + "epoch": 0.49457646247413634, + "grad_norm": 3.109266996383667, + "learning_rate": 1.0661545733836629e-05, + "loss": 0.9604, + "step": 7888 + }, + { + "epoch": 0.49463916232992666, + "grad_norm": 3.0859177112579346, + "learning_rate": 1.0659519404346955e-05, + "loss": 1.1959, + "step": 7889 + }, + { + "epoch": 0.494701862185717, + "grad_norm": 3.2308242321014404, + "learning_rate": 1.0657493047658673e-05, + "loss": 0.9693, + "step": 7890 + }, + { + "epoch": 0.4947645620415073, + "grad_norm": 3.1137092113494873, + "learning_rate": 1.0655466663855349e-05, + "loss": 1.1153, + "step": 7891 + }, + { + "epoch": 0.49482726189729764, + "grad_norm": 3.2407968044281006, + "learning_rate": 1.065344025302055e-05, + "loss": 1.1162, + "step": 7892 + }, + { + "epoch": 0.49488996175308797, + "grad_norm": 3.141775608062744, + "learning_rate": 1.0651413815237848e-05, + "loss": 1.0943, + "step": 7893 + }, + { + "epoch": 0.4949526616088783, + "grad_norm": 3.2822041511535645, + "learning_rate": 1.0649387350590812e-05, + "loss": 1.0716, + "step": 7894 + }, + { + "epoch": 0.4950153614646686, + "grad_norm": 3.100492477416992, + "learning_rate": 1.0647360859163011e-05, + "loss": 1.145, + "step": 7895 + }, + { + "epoch": 0.49507806132045895, + "grad_norm": 3.472259759902954, + "learning_rate": 1.0645334341038022e-05, + "loss": 1.0245, + "step": 7896 + }, + { + "epoch": 0.4951407611762493, + "grad_norm": 2.886634588241577, + "learning_rate": 1.0643307796299419e-05, + "loss": 1.2356, + "step": 7897 + }, + { + "epoch": 0.4952034610320396, + "grad_norm": 3.2329463958740234, + "learning_rate": 1.0641281225030768e-05, + "loss": 1.0403, + "step": 7898 + }, + { + "epoch": 0.49526616088782993, + "grad_norm": 3.092994451522827, + "learning_rate": 1.0639254627315658e-05, + "loss": 1.2142, + "step": 7899 + }, + { + "epoch": 0.4953288607436203, + "grad_norm": 3.0202271938323975, + "learning_rate": 1.0637228003237653e-05, + "loss": 1.1153, + "step": 7900 + }, + { + "epoch": 0.49539156059941064, + "grad_norm": 3.0373618602752686, + "learning_rate": 1.0635201352880342e-05, + "loss": 0.97, + "step": 7901 + }, + { + "epoch": 0.49545426045520097, + "grad_norm": 3.3110692501068115, + "learning_rate": 1.0633174676327298e-05, + "loss": 1.0071, + "step": 7902 + }, + { + "epoch": 0.4955169603109913, + "grad_norm": 3.363330364227295, + "learning_rate": 1.0631147973662102e-05, + "loss": 1.0746, + "step": 7903 + }, + { + "epoch": 0.4955796601667816, + "grad_norm": 3.0159237384796143, + "learning_rate": 1.0629121244968337e-05, + "loss": 1.1412, + "step": 7904 + }, + { + "epoch": 0.49564236002257195, + "grad_norm": 2.977071523666382, + "learning_rate": 1.0627094490329584e-05, + "loss": 1.0506, + "step": 7905 + }, + { + "epoch": 0.4957050598783623, + "grad_norm": 2.9395196437835693, + "learning_rate": 1.0625067709829428e-05, + "loss": 1.1412, + "step": 7906 + }, + { + "epoch": 0.4957677597341526, + "grad_norm": 2.965139150619507, + "learning_rate": 1.0623040903551449e-05, + "loss": 1.0825, + "step": 7907 + }, + { + "epoch": 0.49583045958994293, + "grad_norm": 2.8765065670013428, + "learning_rate": 1.0621014071579241e-05, + "loss": 1.157, + "step": 7908 + }, + { + "epoch": 0.49589315944573326, + "grad_norm": 3.4060020446777344, + "learning_rate": 1.0618987213996381e-05, + "loss": 1.1446, + "step": 7909 + }, + { + "epoch": 0.4959558593015236, + "grad_norm": 3.1346209049224854, + "learning_rate": 1.0616960330886464e-05, + "loss": 1.1408, + "step": 7910 + }, + { + "epoch": 0.49601855915731397, + "grad_norm": 3.3008615970611572, + "learning_rate": 1.0614933422333073e-05, + "loss": 1.1536, + "step": 7911 + }, + { + "epoch": 0.4960812590131043, + "grad_norm": 2.9914674758911133, + "learning_rate": 1.0612906488419805e-05, + "loss": 1.1087, + "step": 7912 + }, + { + "epoch": 0.4961439588688946, + "grad_norm": 2.974302291870117, + "learning_rate": 1.0610879529230242e-05, + "loss": 1.1646, + "step": 7913 + }, + { + "epoch": 0.49620665872468495, + "grad_norm": 2.9677207469940186, + "learning_rate": 1.0608852544847983e-05, + "loss": 0.9658, + "step": 7914 + }, + { + "epoch": 0.4962693585804753, + "grad_norm": 3.1466221809387207, + "learning_rate": 1.060682553535662e-05, + "loss": 1.0057, + "step": 7915 + }, + { + "epoch": 0.4963320584362656, + "grad_norm": 3.186671018600464, + "learning_rate": 1.0604798500839741e-05, + "loss": 1.0432, + "step": 7916 + }, + { + "epoch": 0.4963947582920559, + "grad_norm": 3.2540998458862305, + "learning_rate": 1.0602771441380951e-05, + "loss": 1.0319, + "step": 7917 + }, + { + "epoch": 0.49645745814784625, + "grad_norm": 2.9170050621032715, + "learning_rate": 1.0600744357063837e-05, + "loss": 1.0728, + "step": 7918 + }, + { + "epoch": 0.4965201580036366, + "grad_norm": 3.0639500617980957, + "learning_rate": 1.0598717247972001e-05, + "loss": 1.098, + "step": 7919 + }, + { + "epoch": 0.4965828578594269, + "grad_norm": 3.0659968852996826, + "learning_rate": 1.0596690114189036e-05, + "loss": 1.1062, + "step": 7920 + }, + { + "epoch": 0.49664555771521723, + "grad_norm": 3.3851304054260254, + "learning_rate": 1.0594662955798549e-05, + "loss": 1.2612, + "step": 7921 + }, + { + "epoch": 0.49670825757100756, + "grad_norm": 3.023550510406494, + "learning_rate": 1.0592635772884135e-05, + "loss": 1.0745, + "step": 7922 + }, + { + "epoch": 0.49677095742679794, + "grad_norm": 3.2724876403808594, + "learning_rate": 1.0590608565529393e-05, + "loss": 1.2135, + "step": 7923 + }, + { + "epoch": 0.49683365728258827, + "grad_norm": 3.2148842811584473, + "learning_rate": 1.0588581333817931e-05, + "loss": 0.9974, + "step": 7924 + }, + { + "epoch": 0.4968963571383786, + "grad_norm": 3.2049543857574463, + "learning_rate": 1.0586554077833346e-05, + "loss": 1.0934, + "step": 7925 + }, + { + "epoch": 0.4969590569941689, + "grad_norm": 2.806262493133545, + "learning_rate": 1.0584526797659249e-05, + "loss": 1.1571, + "step": 7926 + }, + { + "epoch": 0.49702175684995925, + "grad_norm": 2.818232774734497, + "learning_rate": 1.058249949337924e-05, + "loss": 1.1789, + "step": 7927 + }, + { + "epoch": 0.4970844567057496, + "grad_norm": 3.318767786026001, + "learning_rate": 1.0580472165076928e-05, + "loss": 1.1567, + "step": 7928 + }, + { + "epoch": 0.4971471565615399, + "grad_norm": 3.3121042251586914, + "learning_rate": 1.0578444812835914e-05, + "loss": 1.0716, + "step": 7929 + }, + { + "epoch": 0.49720985641733023, + "grad_norm": 3.2052018642425537, + "learning_rate": 1.0576417436739814e-05, + "loss": 1.1495, + "step": 7930 + }, + { + "epoch": 0.49727255627312056, + "grad_norm": 3.382112741470337, + "learning_rate": 1.0574390036872233e-05, + "loss": 1.1422, + "step": 7931 + }, + { + "epoch": 0.4973352561289109, + "grad_norm": 3.2026543617248535, + "learning_rate": 1.0572362613316782e-05, + "loss": 1.035, + "step": 7932 + }, + { + "epoch": 0.4973979559847012, + "grad_norm": 2.8079376220703125, + "learning_rate": 1.0570335166157071e-05, + "loss": 1.2343, + "step": 7933 + }, + { + "epoch": 0.4974606558404916, + "grad_norm": 2.957953691482544, + "learning_rate": 1.0568307695476712e-05, + "loss": 1.1058, + "step": 7934 + }, + { + "epoch": 0.4975233556962819, + "grad_norm": 3.3936240673065186, + "learning_rate": 1.056628020135932e-05, + "loss": 1.0556, + "step": 7935 + }, + { + "epoch": 0.49758605555207225, + "grad_norm": 3.0077006816864014, + "learning_rate": 1.0564252683888508e-05, + "loss": 0.915, + "step": 7936 + }, + { + "epoch": 0.4976487554078626, + "grad_norm": 3.0397560596466064, + "learning_rate": 1.0562225143147892e-05, + "loss": 1.1645, + "step": 7937 + }, + { + "epoch": 0.4977114552636529, + "grad_norm": 3.807051420211792, + "learning_rate": 1.0560197579221085e-05, + "loss": 0.9918, + "step": 7938 + }, + { + "epoch": 0.49777415511944323, + "grad_norm": 3.1526522636413574, + "learning_rate": 1.0558169992191708e-05, + "loss": 1.0442, + "step": 7939 + }, + { + "epoch": 0.49783685497523356, + "grad_norm": 3.260592460632324, + "learning_rate": 1.055614238214337e-05, + "loss": 1.2551, + "step": 7940 + }, + { + "epoch": 0.4978995548310239, + "grad_norm": 3.1793925762176514, + "learning_rate": 1.05541147491597e-05, + "loss": 0.9676, + "step": 7941 + }, + { + "epoch": 0.4979622546868142, + "grad_norm": 2.944709539413452, + "learning_rate": 1.0552087093324314e-05, + "loss": 1.1999, + "step": 7942 + }, + { + "epoch": 0.49802495454260454, + "grad_norm": 2.7713327407836914, + "learning_rate": 1.0550059414720833e-05, + "loss": 1.1522, + "step": 7943 + }, + { + "epoch": 0.49808765439839486, + "grad_norm": 3.1558361053466797, + "learning_rate": 1.0548031713432874e-05, + "loss": 1.0779, + "step": 7944 + }, + { + "epoch": 0.4981503542541852, + "grad_norm": 3.165053129196167, + "learning_rate": 1.0546003989544064e-05, + "loss": 1.01, + "step": 7945 + }, + { + "epoch": 0.4982130541099756, + "grad_norm": 3.345611095428467, + "learning_rate": 1.0543976243138028e-05, + "loss": 1.2962, + "step": 7946 + }, + { + "epoch": 0.4982757539657659, + "grad_norm": 3.3544633388519287, + "learning_rate": 1.0541948474298387e-05, + "loss": 1.1041, + "step": 7947 + }, + { + "epoch": 0.4983384538215562, + "grad_norm": 2.794351816177368, + "learning_rate": 1.0539920683108769e-05, + "loss": 1.0869, + "step": 7948 + }, + { + "epoch": 0.49840115367734655, + "grad_norm": 3.159285306930542, + "learning_rate": 1.0537892869652795e-05, + "loss": 1.1384, + "step": 7949 + }, + { + "epoch": 0.4984638535331369, + "grad_norm": 3.3569717407226562, + "learning_rate": 1.0535865034014097e-05, + "loss": 1.0269, + "step": 7950 + }, + { + "epoch": 0.4985265533889272, + "grad_norm": 3.096529483795166, + "learning_rate": 1.05338371762763e-05, + "loss": 0.9756, + "step": 7951 + }, + { + "epoch": 0.49858925324471753, + "grad_norm": 3.0744729042053223, + "learning_rate": 1.0531809296523039e-05, + "loss": 1.0848, + "step": 7952 + }, + { + "epoch": 0.49865195310050786, + "grad_norm": 3.237750768661499, + "learning_rate": 1.0529781394837935e-05, + "loss": 0.9123, + "step": 7953 + }, + { + "epoch": 0.4987146529562982, + "grad_norm": 3.008416175842285, + "learning_rate": 1.0527753471304625e-05, + "loss": 1.0925, + "step": 7954 + }, + { + "epoch": 0.4987773528120885, + "grad_norm": 3.339799165725708, + "learning_rate": 1.0525725526006738e-05, + "loss": 0.9813, + "step": 7955 + }, + { + "epoch": 0.49884005266787884, + "grad_norm": 3.1456856727600098, + "learning_rate": 1.0523697559027907e-05, + "loss": 1.2094, + "step": 7956 + }, + { + "epoch": 0.49890275252366917, + "grad_norm": 3.0534722805023193, + "learning_rate": 1.0521669570451768e-05, + "loss": 1.1088, + "step": 7957 + }, + { + "epoch": 0.49896545237945955, + "grad_norm": 3.315077066421509, + "learning_rate": 1.051964156036195e-05, + "loss": 1.0159, + "step": 7958 + }, + { + "epoch": 0.4990281522352499, + "grad_norm": 3.5130906105041504, + "learning_rate": 1.0517613528842096e-05, + "loss": 1.1136, + "step": 7959 + }, + { + "epoch": 0.4990908520910402, + "grad_norm": 3.099605083465576, + "learning_rate": 1.0515585475975833e-05, + "loss": 1.2387, + "step": 7960 + }, + { + "epoch": 0.49915355194683053, + "grad_norm": 3.6862330436706543, + "learning_rate": 1.0513557401846805e-05, + "loss": 1.0873, + "step": 7961 + }, + { + "epoch": 0.49921625180262086, + "grad_norm": 3.0801048278808594, + "learning_rate": 1.0511529306538645e-05, + "loss": 1.1363, + "step": 7962 + }, + { + "epoch": 0.4992789516584112, + "grad_norm": 3.0513367652893066, + "learning_rate": 1.0509501190134998e-05, + "loss": 1.1462, + "step": 7963 + }, + { + "epoch": 0.4993416515142015, + "grad_norm": 3.055629014968872, + "learning_rate": 1.0507473052719497e-05, + "loss": 1.0775, + "step": 7964 + }, + { + "epoch": 0.49940435136999184, + "grad_norm": 3.1883370876312256, + "learning_rate": 1.0505444894375782e-05, + "loss": 1.1618, + "step": 7965 + }, + { + "epoch": 0.49946705122578217, + "grad_norm": 3.2034695148468018, + "learning_rate": 1.0503416715187504e-05, + "loss": 1.1694, + "step": 7966 + }, + { + "epoch": 0.4995297510815725, + "grad_norm": 3.176190137863159, + "learning_rate": 1.0501388515238293e-05, + "loss": 0.9474, + "step": 7967 + }, + { + "epoch": 0.4995924509373628, + "grad_norm": 2.763256311416626, + "learning_rate": 1.0499360294611802e-05, + "loss": 1.0172, + "step": 7968 + }, + { + "epoch": 0.4996551507931532, + "grad_norm": 2.7859227657318115, + "learning_rate": 1.049733205339167e-05, + "loss": 1.186, + "step": 7969 + }, + { + "epoch": 0.49971785064894353, + "grad_norm": 3.0144762992858887, + "learning_rate": 1.049530379166154e-05, + "loss": 1.085, + "step": 7970 + }, + { + "epoch": 0.49978055050473386, + "grad_norm": 2.7248854637145996, + "learning_rate": 1.0493275509505061e-05, + "loss": 1.2147, + "step": 7971 + }, + { + "epoch": 0.4998432503605242, + "grad_norm": 2.884373903274536, + "learning_rate": 1.049124720700588e-05, + "loss": 1.1205, + "step": 7972 + }, + { + "epoch": 0.4999059502163145, + "grad_norm": 2.9111735820770264, + "learning_rate": 1.048921888424764e-05, + "loss": 1.1533, + "step": 7973 + }, + { + "epoch": 0.49996865007210484, + "grad_norm": 3.517615556716919, + "learning_rate": 1.0487190541313992e-05, + "loss": 1.0723, + "step": 7974 + }, + { + "epoch": 0.5000313499278952, + "grad_norm": 3.280893564224243, + "learning_rate": 1.0485162178288589e-05, + "loss": 0.983, + "step": 7975 + }, + { + "epoch": 0.5000940497836855, + "grad_norm": 2.9179177284240723, + "learning_rate": 1.0483133795255072e-05, + "loss": 1.1784, + "step": 7976 + }, + { + "epoch": 0.5001567496394759, + "grad_norm": 2.9976649284362793, + "learning_rate": 1.0481105392297098e-05, + "loss": 1.1972, + "step": 7977 + }, + { + "epoch": 0.5002194494952662, + "grad_norm": 3.1516709327697754, + "learning_rate": 1.0479076969498316e-05, + "loss": 1.066, + "step": 7978 + }, + { + "epoch": 0.5002821493510565, + "grad_norm": 2.923825740814209, + "learning_rate": 1.0477048526942379e-05, + "loss": 1.2829, + "step": 7979 + }, + { + "epoch": 0.5003448492068469, + "grad_norm": 3.1889872550964355, + "learning_rate": 1.047502006471294e-05, + "loss": 0.9619, + "step": 7980 + }, + { + "epoch": 0.5004075490626372, + "grad_norm": 3.1116936206817627, + "learning_rate": 1.0472991582893654e-05, + "loss": 1.2687, + "step": 7981 + }, + { + "epoch": 0.5004702489184275, + "grad_norm": 3.2242870330810547, + "learning_rate": 1.0470963081568172e-05, + "loss": 1.0221, + "step": 7982 + }, + { + "epoch": 0.5005329487742178, + "grad_norm": 2.8524794578552246, + "learning_rate": 1.0468934560820157e-05, + "loss": 1.1049, + "step": 7983 + }, + { + "epoch": 0.5005956486300082, + "grad_norm": 3.2122652530670166, + "learning_rate": 1.0466906020733253e-05, + "loss": 0.9367, + "step": 7984 + }, + { + "epoch": 0.5006583484857985, + "grad_norm": 2.965608596801758, + "learning_rate": 1.0464877461391127e-05, + "loss": 0.9152, + "step": 7985 + }, + { + "epoch": 0.5007210483415888, + "grad_norm": 3.5643422603607178, + "learning_rate": 1.0462848882877439e-05, + "loss": 0.9496, + "step": 7986 + }, + { + "epoch": 0.5007837481973791, + "grad_norm": 2.9401369094848633, + "learning_rate": 1.0460820285275836e-05, + "loss": 1.2729, + "step": 7987 + }, + { + "epoch": 0.5008464480531695, + "grad_norm": 3.311225175857544, + "learning_rate": 1.045879166866999e-05, + "loss": 1.0782, + "step": 7988 + }, + { + "epoch": 0.5009091479089598, + "grad_norm": 3.3010289669036865, + "learning_rate": 1.045676303314355e-05, + "loss": 1.1554, + "step": 7989 + }, + { + "epoch": 0.5009718477647501, + "grad_norm": 3.4795689582824707, + "learning_rate": 1.0454734378780188e-05, + "loss": 1.0588, + "step": 7990 + }, + { + "epoch": 0.5010345476205404, + "grad_norm": 3.326082706451416, + "learning_rate": 1.0452705705663558e-05, + "loss": 1.0954, + "step": 7991 + }, + { + "epoch": 0.5010972474763308, + "grad_norm": 3.44716215133667, + "learning_rate": 1.0450677013877324e-05, + "loss": 1.0266, + "step": 7992 + }, + { + "epoch": 0.5011599473321211, + "grad_norm": 2.8192801475524902, + "learning_rate": 1.044864830350515e-05, + "loss": 1.04, + "step": 7993 + }, + { + "epoch": 0.5012226471879114, + "grad_norm": 3.0325286388397217, + "learning_rate": 1.0446619574630703e-05, + "loss": 1.1731, + "step": 7994 + }, + { + "epoch": 0.5012853470437018, + "grad_norm": 3.2384791374206543, + "learning_rate": 1.0444590827337643e-05, + "loss": 1.102, + "step": 7995 + }, + { + "epoch": 0.5013480468994921, + "grad_norm": 3.484673261642456, + "learning_rate": 1.0442562061709638e-05, + "loss": 0.9116, + "step": 7996 + }, + { + "epoch": 0.5014107467552824, + "grad_norm": 3.164747714996338, + "learning_rate": 1.0440533277830355e-05, + "loss": 1.2068, + "step": 7997 + }, + { + "epoch": 0.5014734466110728, + "grad_norm": 3.0652239322662354, + "learning_rate": 1.043850447578346e-05, + "loss": 0.9715, + "step": 7998 + }, + { + "epoch": 0.5015361464668632, + "grad_norm": 3.3995485305786133, + "learning_rate": 1.0436475655652623e-05, + "loss": 1.0646, + "step": 7999 + }, + { + "epoch": 0.5015988463226535, + "grad_norm": 2.9345431327819824, + "learning_rate": 1.0434446817521507e-05, + "loss": 1.1787, + "step": 8000 + }, + { + "epoch": 0.5015988463226535, + "eval_loss": 1.1247245073318481, + "eval_runtime": 144.1637, + "eval_samples_per_second": 4.37, + "eval_steps_per_second": 1.096, + "step": 8000 + }, + { + "epoch": 0.5016615461784438, + "grad_norm": 3.0741889476776123, + "learning_rate": 1.0432417961473788e-05, + "loss": 1.0928, + "step": 8001 + }, + { + "epoch": 0.5017242460342342, + "grad_norm": 2.869981527328491, + "learning_rate": 1.0430389087593133e-05, + "loss": 1.1037, + "step": 8002 + }, + { + "epoch": 0.5017869458900245, + "grad_norm": 3.1132943630218506, + "learning_rate": 1.0428360195963214e-05, + "loss": 1.1249, + "step": 8003 + }, + { + "epoch": 0.5018496457458148, + "grad_norm": 3.410399913787842, + "learning_rate": 1.0426331286667701e-05, + "loss": 1.1571, + "step": 8004 + }, + { + "epoch": 0.5019123456016051, + "grad_norm": 2.9881787300109863, + "learning_rate": 1.0424302359790267e-05, + "loss": 1.0853, + "step": 8005 + }, + { + "epoch": 0.5019750454573955, + "grad_norm": 2.7841973304748535, + "learning_rate": 1.0422273415414586e-05, + "loss": 1.2319, + "step": 8006 + }, + { + "epoch": 0.5020377453131858, + "grad_norm": 3.4328136444091797, + "learning_rate": 1.0420244453624328e-05, + "loss": 1.144, + "step": 8007 + }, + { + "epoch": 0.5021004451689761, + "grad_norm": 2.8653790950775146, + "learning_rate": 1.0418215474503172e-05, + "loss": 1.2087, + "step": 8008 + }, + { + "epoch": 0.5021631450247664, + "grad_norm": 3.229492664337158, + "learning_rate": 1.0416186478134793e-05, + "loss": 1.0983, + "step": 8009 + }, + { + "epoch": 0.5022258448805568, + "grad_norm": 3.038813829421997, + "learning_rate": 1.0414157464602866e-05, + "loss": 1.0662, + "step": 8010 + }, + { + "epoch": 0.5022885447363471, + "grad_norm": 3.2251975536346436, + "learning_rate": 1.0412128433991064e-05, + "loss": 1.0152, + "step": 8011 + }, + { + "epoch": 0.5023512445921374, + "grad_norm": 2.9718096256256104, + "learning_rate": 1.041009938638307e-05, + "loss": 1.2284, + "step": 8012 + }, + { + "epoch": 0.5024139444479278, + "grad_norm": 2.919016122817993, + "learning_rate": 1.0408070321862558e-05, + "loss": 1.111, + "step": 8013 + }, + { + "epoch": 0.5024766443037181, + "grad_norm": 3.157529592514038, + "learning_rate": 1.040604124051321e-05, + "loss": 1.0809, + "step": 8014 + }, + { + "epoch": 0.5025393441595084, + "grad_norm": 2.974626302719116, + "learning_rate": 1.04040121424187e-05, + "loss": 1.0801, + "step": 8015 + }, + { + "epoch": 0.5026020440152987, + "grad_norm": 3.0426907539367676, + "learning_rate": 1.0401983027662714e-05, + "loss": 0.9533, + "step": 8016 + }, + { + "epoch": 0.5026647438710891, + "grad_norm": 2.8992936611175537, + "learning_rate": 1.0399953896328932e-05, + "loss": 1.1683, + "step": 8017 + }, + { + "epoch": 0.5027274437268794, + "grad_norm": 3.265922784805298, + "learning_rate": 1.039792474850103e-05, + "loss": 1.0551, + "step": 8018 + }, + { + "epoch": 0.5027901435826697, + "grad_norm": 2.9561386108398438, + "learning_rate": 1.0395895584262696e-05, + "loss": 1.2179, + "step": 8019 + }, + { + "epoch": 0.50285284343846, + "grad_norm": 3.129359722137451, + "learning_rate": 1.039386640369761e-05, + "loss": 0.9884, + "step": 8020 + }, + { + "epoch": 0.5029155432942505, + "grad_norm": 2.9844672679901123, + "learning_rate": 1.039183720688946e-05, + "loss": 1.0401, + "step": 8021 + }, + { + "epoch": 0.5029782431500408, + "grad_norm": 2.985915422439575, + "learning_rate": 1.038980799392192e-05, + "loss": 0.8883, + "step": 8022 + }, + { + "epoch": 0.5030409430058311, + "grad_norm": 3.1711177825927734, + "learning_rate": 1.0387778764878687e-05, + "loss": 1.042, + "step": 8023 + }, + { + "epoch": 0.5031036428616215, + "grad_norm": 3.165659189224243, + "learning_rate": 1.0385749519843437e-05, + "loss": 1.0059, + "step": 8024 + }, + { + "epoch": 0.5031663427174118, + "grad_norm": 2.871058464050293, + "learning_rate": 1.0383720258899864e-05, + "loss": 1.0876, + "step": 8025 + }, + { + "epoch": 0.5032290425732021, + "grad_norm": 2.970308542251587, + "learning_rate": 1.0381690982131646e-05, + "loss": 1.1719, + "step": 8026 + }, + { + "epoch": 0.5032917424289924, + "grad_norm": 3.1967642307281494, + "learning_rate": 1.0379661689622477e-05, + "loss": 1.2481, + "step": 8027 + }, + { + "epoch": 0.5033544422847828, + "grad_norm": 3.1594271659851074, + "learning_rate": 1.0377632381456046e-05, + "loss": 0.9962, + "step": 8028 + }, + { + "epoch": 0.5034171421405731, + "grad_norm": 3.0660080909729004, + "learning_rate": 1.0375603057716035e-05, + "loss": 1.1151, + "step": 8029 + }, + { + "epoch": 0.5034798419963634, + "grad_norm": 3.0339696407318115, + "learning_rate": 1.037357371848614e-05, + "loss": 1.1763, + "step": 8030 + }, + { + "epoch": 0.5035425418521537, + "grad_norm": 2.9274091720581055, + "learning_rate": 1.0371544363850047e-05, + "loss": 1.1655, + "step": 8031 + }, + { + "epoch": 0.5036052417079441, + "grad_norm": 3.4276788234710693, + "learning_rate": 1.0369514993891451e-05, + "loss": 1.0432, + "step": 8032 + }, + { + "epoch": 0.5036679415637344, + "grad_norm": 3.1509790420532227, + "learning_rate": 1.0367485608694038e-05, + "loss": 1.0684, + "step": 8033 + }, + { + "epoch": 0.5037306414195247, + "grad_norm": 3.335385799407959, + "learning_rate": 1.0365456208341505e-05, + "loss": 1.0745, + "step": 8034 + }, + { + "epoch": 0.503793341275315, + "grad_norm": 3.134765863418579, + "learning_rate": 1.036342679291754e-05, + "loss": 1.0704, + "step": 8035 + }, + { + "epoch": 0.5038560411311054, + "grad_norm": 3.780078411102295, + "learning_rate": 1.0361397362505836e-05, + "loss": 1.1377, + "step": 8036 + }, + { + "epoch": 0.5039187409868957, + "grad_norm": 3.222750425338745, + "learning_rate": 1.0359367917190094e-05, + "loss": 1.1569, + "step": 8037 + }, + { + "epoch": 0.503981440842686, + "grad_norm": 2.965888261795044, + "learning_rate": 1.0357338457053998e-05, + "loss": 1.1512, + "step": 8038 + }, + { + "epoch": 0.5040441406984764, + "grad_norm": 2.955193519592285, + "learning_rate": 1.0355308982181254e-05, + "loss": 1.0957, + "step": 8039 + }, + { + "epoch": 0.5041068405542667, + "grad_norm": 2.9122910499572754, + "learning_rate": 1.0353279492655549e-05, + "loss": 1.2175, + "step": 8040 + }, + { + "epoch": 0.504169540410057, + "grad_norm": 2.7125022411346436, + "learning_rate": 1.0351249988560581e-05, + "loss": 1.0563, + "step": 8041 + }, + { + "epoch": 0.5042322402658473, + "grad_norm": 3.0568995475769043, + "learning_rate": 1.034922046998005e-05, + "loss": 1.0866, + "step": 8042 + }, + { + "epoch": 0.5042949401216377, + "grad_norm": 2.837599515914917, + "learning_rate": 1.0347190936997652e-05, + "loss": 1.0405, + "step": 8043 + }, + { + "epoch": 0.5043576399774281, + "grad_norm": 3.347506046295166, + "learning_rate": 1.0345161389697083e-05, + "loss": 1.02, + "step": 8044 + }, + { + "epoch": 0.5044203398332184, + "grad_norm": 2.8083336353302, + "learning_rate": 1.0343131828162045e-05, + "loss": 1.1845, + "step": 8045 + }, + { + "epoch": 0.5044830396890088, + "grad_norm": 3.0127811431884766, + "learning_rate": 1.0341102252476234e-05, + "loss": 1.1548, + "step": 8046 + }, + { + "epoch": 0.5045457395447991, + "grad_norm": 3.4329261779785156, + "learning_rate": 1.0339072662723351e-05, + "loss": 1.0608, + "step": 8047 + }, + { + "epoch": 0.5046084394005894, + "grad_norm": 3.347639560699463, + "learning_rate": 1.0337043058987097e-05, + "loss": 1.0572, + "step": 8048 + }, + { + "epoch": 0.5046711392563797, + "grad_norm": 3.102030038833618, + "learning_rate": 1.0335013441351173e-05, + "loss": 1.0667, + "step": 8049 + }, + { + "epoch": 0.5047338391121701, + "grad_norm": 3.1245152950286865, + "learning_rate": 1.0332983809899281e-05, + "loss": 1.1022, + "step": 8050 + }, + { + "epoch": 0.5047965389679604, + "grad_norm": 3.1917052268981934, + "learning_rate": 1.0330954164715119e-05, + "loss": 1.1651, + "step": 8051 + }, + { + "epoch": 0.5048592388237507, + "grad_norm": 3.113370656967163, + "learning_rate": 1.0328924505882395e-05, + "loss": 1.0282, + "step": 8052 + }, + { + "epoch": 0.504921938679541, + "grad_norm": 3.1179919242858887, + "learning_rate": 1.032689483348481e-05, + "loss": 1.0012, + "step": 8053 + }, + { + "epoch": 0.5049846385353314, + "grad_norm": 2.807656764984131, + "learning_rate": 1.032486514760607e-05, + "loss": 1.1712, + "step": 8054 + }, + { + "epoch": 0.5050473383911217, + "grad_norm": 3.1391329765319824, + "learning_rate": 1.0322835448329873e-05, + "loss": 0.9764, + "step": 8055 + }, + { + "epoch": 0.505110038246912, + "grad_norm": 3.2050087451934814, + "learning_rate": 1.032080573573993e-05, + "loss": 0.9364, + "step": 8056 + }, + { + "epoch": 0.5051727381027024, + "grad_norm": 3.1775002479553223, + "learning_rate": 1.0318776009919945e-05, + "loss": 0.9909, + "step": 8057 + }, + { + "epoch": 0.5052354379584927, + "grad_norm": 3.20876145362854, + "learning_rate": 1.0316746270953622e-05, + "loss": 1.2732, + "step": 8058 + }, + { + "epoch": 0.505298137814283, + "grad_norm": 3.1638083457946777, + "learning_rate": 1.031471651892467e-05, + "loss": 1.1282, + "step": 8059 + }, + { + "epoch": 0.5053608376700733, + "grad_norm": 2.7970457077026367, + "learning_rate": 1.0312686753916795e-05, + "loss": 1.0158, + "step": 8060 + }, + { + "epoch": 0.5054235375258637, + "grad_norm": 3.176100254058838, + "learning_rate": 1.0310656976013704e-05, + "loss": 1.0265, + "step": 8061 + }, + { + "epoch": 0.505486237381654, + "grad_norm": 3.340578317642212, + "learning_rate": 1.0308627185299105e-05, + "loss": 1.0629, + "step": 8062 + }, + { + "epoch": 0.5055489372374443, + "grad_norm": 3.1246438026428223, + "learning_rate": 1.0306597381856709e-05, + "loss": 1.2066, + "step": 8063 + }, + { + "epoch": 0.5056116370932346, + "grad_norm": 3.111114263534546, + "learning_rate": 1.0304567565770222e-05, + "loss": 0.9562, + "step": 8064 + }, + { + "epoch": 0.505674336949025, + "grad_norm": 3.3743255138397217, + "learning_rate": 1.0302537737123358e-05, + "loss": 1.0267, + "step": 8065 + }, + { + "epoch": 0.5057370368048153, + "grad_norm": 3.3607778549194336, + "learning_rate": 1.030050789599982e-05, + "loss": 1.1299, + "step": 8066 + }, + { + "epoch": 0.5057997366606056, + "grad_norm": 2.5440196990966797, + "learning_rate": 1.0298478042483327e-05, + "loss": 1.0833, + "step": 8067 + }, + { + "epoch": 0.5058624365163961, + "grad_norm": 2.967095136642456, + "learning_rate": 1.0296448176657586e-05, + "loss": 1.0518, + "step": 8068 + }, + { + "epoch": 0.5059251363721864, + "grad_norm": 3.2400336265563965, + "learning_rate": 1.0294418298606307e-05, + "loss": 1.2132, + "step": 8069 + }, + { + "epoch": 0.5059878362279767, + "grad_norm": 3.230551242828369, + "learning_rate": 1.0292388408413207e-05, + "loss": 1.1547, + "step": 8070 + }, + { + "epoch": 0.506050536083767, + "grad_norm": 3.0525734424591064, + "learning_rate": 1.0290358506161994e-05, + "loss": 1.327, + "step": 8071 + }, + { + "epoch": 0.5061132359395574, + "grad_norm": 3.086571216583252, + "learning_rate": 1.0288328591936388e-05, + "loss": 1.2376, + "step": 8072 + }, + { + "epoch": 0.5061759357953477, + "grad_norm": 2.8325767517089844, + "learning_rate": 1.0286298665820094e-05, + "loss": 1.1573, + "step": 8073 + }, + { + "epoch": 0.506238635651138, + "grad_norm": 2.936654806137085, + "learning_rate": 1.0284268727896833e-05, + "loss": 1.0924, + "step": 8074 + }, + { + "epoch": 0.5063013355069284, + "grad_norm": 3.2727668285369873, + "learning_rate": 1.0282238778250315e-05, + "loss": 1.0279, + "step": 8075 + }, + { + "epoch": 0.5063640353627187, + "grad_norm": 2.8766825199127197, + "learning_rate": 1.0280208816964259e-05, + "loss": 1.2137, + "step": 8076 + }, + { + "epoch": 0.506426735218509, + "grad_norm": 3.0688254833221436, + "learning_rate": 1.027817884412238e-05, + "loss": 1.1326, + "step": 8077 + }, + { + "epoch": 0.5064894350742993, + "grad_norm": 2.7508435249328613, + "learning_rate": 1.027614885980839e-05, + "loss": 1.1315, + "step": 8078 + }, + { + "epoch": 0.5065521349300897, + "grad_norm": 2.881148099899292, + "learning_rate": 1.027411886410601e-05, + "loss": 1.2068, + "step": 8079 + }, + { + "epoch": 0.50661483478588, + "grad_norm": 2.857328176498413, + "learning_rate": 1.0272088857098958e-05, + "loss": 1.1012, + "step": 8080 + }, + { + "epoch": 0.5066775346416703, + "grad_norm": 2.9957618713378906, + "learning_rate": 1.0270058838870948e-05, + "loss": 0.9802, + "step": 8081 + }, + { + "epoch": 0.5067402344974606, + "grad_norm": 2.9717750549316406, + "learning_rate": 1.0268028809505702e-05, + "loss": 1.2121, + "step": 8082 + }, + { + "epoch": 0.506802934353251, + "grad_norm": 2.7182998657226562, + "learning_rate": 1.0265998769086935e-05, + "loss": 1.2219, + "step": 8083 + }, + { + "epoch": 0.5068656342090413, + "grad_norm": 3.222167491912842, + "learning_rate": 1.0263968717698365e-05, + "loss": 1.1342, + "step": 8084 + }, + { + "epoch": 0.5069283340648316, + "grad_norm": 2.9138343334198, + "learning_rate": 1.0261938655423718e-05, + "loss": 1.1698, + "step": 8085 + }, + { + "epoch": 0.506991033920622, + "grad_norm": 2.9762885570526123, + "learning_rate": 1.0259908582346705e-05, + "loss": 1.1244, + "step": 8086 + }, + { + "epoch": 0.5070537337764123, + "grad_norm": 3.037397861480713, + "learning_rate": 1.025787849855105e-05, + "loss": 1.1057, + "step": 8087 + }, + { + "epoch": 0.5071164336322026, + "grad_norm": 2.8938632011413574, + "learning_rate": 1.0255848404120479e-05, + "loss": 1.1655, + "step": 8088 + }, + { + "epoch": 0.5071791334879929, + "grad_norm": 3.3265624046325684, + "learning_rate": 1.0253818299138705e-05, + "loss": 1.094, + "step": 8089 + }, + { + "epoch": 0.5072418333437833, + "grad_norm": 3.2240962982177734, + "learning_rate": 1.0251788183689457e-05, + "loss": 1.0791, + "step": 8090 + }, + { + "epoch": 0.5073045331995737, + "grad_norm": 2.957923650741577, + "learning_rate": 1.024975805785645e-05, + "loss": 0.9837, + "step": 8091 + }, + { + "epoch": 0.507367233055364, + "grad_norm": 2.9544143676757812, + "learning_rate": 1.0247727921723412e-05, + "loss": 1.1285, + "step": 8092 + }, + { + "epoch": 0.5074299329111543, + "grad_norm": 3.3899710178375244, + "learning_rate": 1.0245697775374061e-05, + "loss": 0.9696, + "step": 8093 + }, + { + "epoch": 0.5074926327669447, + "grad_norm": 2.977795362472534, + "learning_rate": 1.0243667618892124e-05, + "loss": 1.1785, + "step": 8094 + }, + { + "epoch": 0.507555332622735, + "grad_norm": 2.8282418251037598, + "learning_rate": 1.0241637452361323e-05, + "loss": 1.0821, + "step": 8095 + }, + { + "epoch": 0.5076180324785253, + "grad_norm": 3.0376877784729004, + "learning_rate": 1.0239607275865383e-05, + "loss": 1.0514, + "step": 8096 + }, + { + "epoch": 0.5076807323343157, + "grad_norm": 2.794109582901001, + "learning_rate": 1.0237577089488032e-05, + "loss": 1.2658, + "step": 8097 + }, + { + "epoch": 0.507743432190106, + "grad_norm": 2.9318740367889404, + "learning_rate": 1.0235546893312989e-05, + "loss": 1.1595, + "step": 8098 + }, + { + "epoch": 0.5078061320458963, + "grad_norm": 3.304901361465454, + "learning_rate": 1.0233516687423981e-05, + "loss": 1.0091, + "step": 8099 + }, + { + "epoch": 0.5078688319016866, + "grad_norm": 3.0703701972961426, + "learning_rate": 1.0231486471904737e-05, + "loss": 1.1284, + "step": 8100 + }, + { + "epoch": 0.507931531757477, + "grad_norm": 3.4066667556762695, + "learning_rate": 1.0229456246838978e-05, + "loss": 1.0674, + "step": 8101 + }, + { + "epoch": 0.5079942316132673, + "grad_norm": 3.4312756061553955, + "learning_rate": 1.0227426012310435e-05, + "loss": 1.3007, + "step": 8102 + }, + { + "epoch": 0.5080569314690576, + "grad_norm": 3.1070687770843506, + "learning_rate": 1.0225395768402833e-05, + "loss": 1.2062, + "step": 8103 + }, + { + "epoch": 0.5081196313248479, + "grad_norm": 3.21260929107666, + "learning_rate": 1.02233655151999e-05, + "loss": 0.9947, + "step": 8104 + }, + { + "epoch": 0.5081823311806383, + "grad_norm": 2.991726875305176, + "learning_rate": 1.0221335252785362e-05, + "loss": 1.0956, + "step": 8105 + }, + { + "epoch": 0.5082450310364286, + "grad_norm": 3.31485915184021, + "learning_rate": 1.021930498124295e-05, + "loss": 1.1773, + "step": 8106 + }, + { + "epoch": 0.5083077308922189, + "grad_norm": 3.279430627822876, + "learning_rate": 1.0217274700656393e-05, + "loss": 1.1042, + "step": 8107 + }, + { + "epoch": 0.5083704307480092, + "grad_norm": 3.420144557952881, + "learning_rate": 1.0215244411109413e-05, + "loss": 1.1051, + "step": 8108 + }, + { + "epoch": 0.5084331306037996, + "grad_norm": 3.2529678344726562, + "learning_rate": 1.0213214112685747e-05, + "loss": 1.1621, + "step": 8109 + }, + { + "epoch": 0.5084958304595899, + "grad_norm": 3.087344169616699, + "learning_rate": 1.0211183805469122e-05, + "loss": 1.0962, + "step": 8110 + }, + { + "epoch": 0.5085585303153802, + "grad_norm": 3.044154167175293, + "learning_rate": 1.0209153489543268e-05, + "loss": 0.9712, + "step": 8111 + }, + { + "epoch": 0.5086212301711706, + "grad_norm": 3.108684778213501, + "learning_rate": 1.0207123164991912e-05, + "loss": 0.999, + "step": 8112 + }, + { + "epoch": 0.5086839300269609, + "grad_norm": 3.128533124923706, + "learning_rate": 1.0205092831898789e-05, + "loss": 1.1323, + "step": 8113 + }, + { + "epoch": 0.5087466298827513, + "grad_norm": 3.4661083221435547, + "learning_rate": 1.020306249034763e-05, + "loss": 0.9638, + "step": 8114 + }, + { + "epoch": 0.5088093297385416, + "grad_norm": 2.755542516708374, + "learning_rate": 1.0201032140422162e-05, + "loss": 1.1634, + "step": 8115 + }, + { + "epoch": 0.508872029594332, + "grad_norm": 3.0148141384124756, + "learning_rate": 1.0199001782206124e-05, + "loss": 1.1391, + "step": 8116 + }, + { + "epoch": 0.5089347294501223, + "grad_norm": 3.372931718826294, + "learning_rate": 1.019697141578324e-05, + "loss": 1.1413, + "step": 8117 + }, + { + "epoch": 0.5089974293059126, + "grad_norm": 3.3116211891174316, + "learning_rate": 1.019494104123725e-05, + "loss": 1.1076, + "step": 8118 + }, + { + "epoch": 0.509060129161703, + "grad_norm": 2.9527533054351807, + "learning_rate": 1.019291065865188e-05, + "loss": 0.9363, + "step": 8119 + }, + { + "epoch": 0.5091228290174933, + "grad_norm": 3.1561481952667236, + "learning_rate": 1.0190880268110867e-05, + "loss": 1.0697, + "step": 8120 + }, + { + "epoch": 0.5091855288732836, + "grad_norm": 2.9467647075653076, + "learning_rate": 1.0188849869697943e-05, + "loss": 1.1882, + "step": 8121 + }, + { + "epoch": 0.5092482287290739, + "grad_norm": 3.503365993499756, + "learning_rate": 1.0186819463496843e-05, + "loss": 1.0404, + "step": 8122 + }, + { + "epoch": 0.5093109285848643, + "grad_norm": 3.0347952842712402, + "learning_rate": 1.01847890495913e-05, + "loss": 1.0062, + "step": 8123 + }, + { + "epoch": 0.5093736284406546, + "grad_norm": 3.226093053817749, + "learning_rate": 1.0182758628065047e-05, + "loss": 0.9855, + "step": 8124 + }, + { + "epoch": 0.5094363282964449, + "grad_norm": 3.0786943435668945, + "learning_rate": 1.0180728199001823e-05, + "loss": 1.2207, + "step": 8125 + }, + { + "epoch": 0.5094990281522352, + "grad_norm": 3.5301570892333984, + "learning_rate": 1.0178697762485357e-05, + "loss": 1.1609, + "step": 8126 + }, + { + "epoch": 0.5095617280080256, + "grad_norm": 3.056755542755127, + "learning_rate": 1.0176667318599392e-05, + "loss": 1.2288, + "step": 8127 + }, + { + "epoch": 0.5096244278638159, + "grad_norm": 2.9833309650421143, + "learning_rate": 1.0174636867427653e-05, + "loss": 1.2994, + "step": 8128 + }, + { + "epoch": 0.5096871277196062, + "grad_norm": 3.2460644245147705, + "learning_rate": 1.0172606409053887e-05, + "loss": 1.055, + "step": 8129 + }, + { + "epoch": 0.5097498275753966, + "grad_norm": 3.27040958404541, + "learning_rate": 1.0170575943561822e-05, + "loss": 1.2555, + "step": 8130 + }, + { + "epoch": 0.5098125274311869, + "grad_norm": 3.2897396087646484, + "learning_rate": 1.01685454710352e-05, + "loss": 1.2357, + "step": 8131 + }, + { + "epoch": 0.5098752272869772, + "grad_norm": 3.059267282485962, + "learning_rate": 1.0166514991557752e-05, + "loss": 1.1838, + "step": 8132 + }, + { + "epoch": 0.5099379271427675, + "grad_norm": 2.837062358856201, + "learning_rate": 1.0164484505213222e-05, + "loss": 1.0267, + "step": 8133 + }, + { + "epoch": 0.5100006269985579, + "grad_norm": 3.1839210987091064, + "learning_rate": 1.0162454012085343e-05, + "loss": 1.1783, + "step": 8134 + }, + { + "epoch": 0.5100633268543482, + "grad_norm": 3.106231927871704, + "learning_rate": 1.016042351225785e-05, + "loss": 1.1455, + "step": 8135 + }, + { + "epoch": 0.5101260267101385, + "grad_norm": 3.093334197998047, + "learning_rate": 1.0158393005814492e-05, + "loss": 1.0479, + "step": 8136 + }, + { + "epoch": 0.510188726565929, + "grad_norm": 3.0103254318237305, + "learning_rate": 1.0156362492838991e-05, + "loss": 1.2116, + "step": 8137 + }, + { + "epoch": 0.5102514264217193, + "grad_norm": 2.9321069717407227, + "learning_rate": 1.01543319734151e-05, + "loss": 1.0813, + "step": 8138 + }, + { + "epoch": 0.5103141262775096, + "grad_norm": 3.386855363845825, + "learning_rate": 1.0152301447626549e-05, + "loss": 1.1237, + "step": 8139 + }, + { + "epoch": 0.5103768261332999, + "grad_norm": 3.223217010498047, + "learning_rate": 1.015027091555708e-05, + "loss": 0.9506, + "step": 8140 + }, + { + "epoch": 0.5104395259890903, + "grad_norm": 3.2939422130584717, + "learning_rate": 1.014824037729043e-05, + "loss": 1.0533, + "step": 8141 + }, + { + "epoch": 0.5105022258448806, + "grad_norm": 2.9335341453552246, + "learning_rate": 1.0146209832910346e-05, + "loss": 1.0995, + "step": 8142 + }, + { + "epoch": 0.5105649257006709, + "grad_norm": 3.324681520462036, + "learning_rate": 1.0144179282500558e-05, + "loss": 1.1315, + "step": 8143 + }, + { + "epoch": 0.5106276255564612, + "grad_norm": 3.0845284461975098, + "learning_rate": 1.0142148726144807e-05, + "loss": 1.0786, + "step": 8144 + }, + { + "epoch": 0.5106903254122516, + "grad_norm": 3.643886089324951, + "learning_rate": 1.0140118163926843e-05, + "loss": 1.1515, + "step": 8145 + }, + { + "epoch": 0.5107530252680419, + "grad_norm": 3.068225860595703, + "learning_rate": 1.0138087595930394e-05, + "loss": 1.2276, + "step": 8146 + }, + { + "epoch": 0.5108157251238322, + "grad_norm": 3.156705856323242, + "learning_rate": 1.0136057022239213e-05, + "loss": 1.2418, + "step": 8147 + }, + { + "epoch": 0.5108784249796225, + "grad_norm": 3.2366645336151123, + "learning_rate": 1.0134026442937028e-05, + "loss": 1.1699, + "step": 8148 + }, + { + "epoch": 0.5109411248354129, + "grad_norm": 3.0309603214263916, + "learning_rate": 1.013199585810759e-05, + "loss": 1.2251, + "step": 8149 + }, + { + "epoch": 0.5110038246912032, + "grad_norm": 3.829688310623169, + "learning_rate": 1.0129965267834636e-05, + "loss": 1.1502, + "step": 8150 + }, + { + "epoch": 0.5110665245469935, + "grad_norm": 2.7224674224853516, + "learning_rate": 1.012793467220191e-05, + "loss": 1.1365, + "step": 8151 + }, + { + "epoch": 0.5111292244027839, + "grad_norm": 2.9372899532318115, + "learning_rate": 1.012590407129315e-05, + "loss": 1.1274, + "step": 8152 + }, + { + "epoch": 0.5111919242585742, + "grad_norm": 2.9031002521514893, + "learning_rate": 1.0123873465192102e-05, + "loss": 1.0902, + "step": 8153 + }, + { + "epoch": 0.5112546241143645, + "grad_norm": 3.188891649246216, + "learning_rate": 1.0121842853982509e-05, + "loss": 1.102, + "step": 8154 + }, + { + "epoch": 0.5113173239701548, + "grad_norm": 3.104243278503418, + "learning_rate": 1.0119812237748108e-05, + "loss": 0.9669, + "step": 8155 + }, + { + "epoch": 0.5113800238259452, + "grad_norm": 2.8396215438842773, + "learning_rate": 1.0117781616572646e-05, + "loss": 0.9965, + "step": 8156 + }, + { + "epoch": 0.5114427236817355, + "grad_norm": 3.0564472675323486, + "learning_rate": 1.0115750990539865e-05, + "loss": 1.2106, + "step": 8157 + }, + { + "epoch": 0.5115054235375258, + "grad_norm": 2.8958370685577393, + "learning_rate": 1.011372035973351e-05, + "loss": 1.0469, + "step": 8158 + }, + { + "epoch": 0.5115681233933161, + "grad_norm": 3.093052387237549, + "learning_rate": 1.011168972423732e-05, + "loss": 1.0071, + "step": 8159 + }, + { + "epoch": 0.5116308232491066, + "grad_norm": 3.1884312629699707, + "learning_rate": 1.0109659084135044e-05, + "loss": 1.16, + "step": 8160 + }, + { + "epoch": 0.5116935231048969, + "grad_norm": 3.0902130603790283, + "learning_rate": 1.010762843951042e-05, + "loss": 1.0718, + "step": 8161 + }, + { + "epoch": 0.5117562229606872, + "grad_norm": 2.858924627304077, + "learning_rate": 1.0105597790447195e-05, + "loss": 1.1233, + "step": 8162 + }, + { + "epoch": 0.5118189228164776, + "grad_norm": 3.0710830688476562, + "learning_rate": 1.0103567137029111e-05, + "loss": 0.9732, + "step": 8163 + }, + { + "epoch": 0.5118816226722679, + "grad_norm": 2.963315486907959, + "learning_rate": 1.0101536479339916e-05, + "loss": 1.1858, + "step": 8164 + }, + { + "epoch": 0.5119443225280582, + "grad_norm": 2.7407119274139404, + "learning_rate": 1.0099505817463351e-05, + "loss": 1.2093, + "step": 8165 + }, + { + "epoch": 0.5120070223838485, + "grad_norm": 2.9080328941345215, + "learning_rate": 1.009747515148316e-05, + "loss": 1.1809, + "step": 8166 + }, + { + "epoch": 0.5120697222396389, + "grad_norm": 3.425471782684326, + "learning_rate": 1.0095444481483094e-05, + "loss": 1.0372, + "step": 8167 + }, + { + "epoch": 0.5121324220954292, + "grad_norm": 3.2253973484039307, + "learning_rate": 1.009341380754689e-05, + "loss": 1.1762, + "step": 8168 + }, + { + "epoch": 0.5121951219512195, + "grad_norm": 3.2944889068603516, + "learning_rate": 1.0091383129758298e-05, + "loss": 1.0621, + "step": 8169 + }, + { + "epoch": 0.5122578218070098, + "grad_norm": 2.895552158355713, + "learning_rate": 1.0089352448201063e-05, + "loss": 1.0799, + "step": 8170 + }, + { + "epoch": 0.5123205216628002, + "grad_norm": 3.4567463397979736, + "learning_rate": 1.0087321762958927e-05, + "loss": 1.0786, + "step": 8171 + }, + { + "epoch": 0.5123832215185905, + "grad_norm": 2.846987247467041, + "learning_rate": 1.0085291074115637e-05, + "loss": 1.2045, + "step": 8172 + }, + { + "epoch": 0.5124459213743808, + "grad_norm": 3.123962879180908, + "learning_rate": 1.0083260381754942e-05, + "loss": 1.1582, + "step": 8173 + }, + { + "epoch": 0.5125086212301712, + "grad_norm": 3.4024808406829834, + "learning_rate": 1.0081229685960583e-05, + "loss": 1.0998, + "step": 8174 + }, + { + "epoch": 0.5125713210859615, + "grad_norm": 3.0867979526519775, + "learning_rate": 1.0079198986816307e-05, + "loss": 0.9147, + "step": 8175 + }, + { + "epoch": 0.5126340209417518, + "grad_norm": 3.465381145477295, + "learning_rate": 1.0077168284405866e-05, + "loss": 1.1179, + "step": 8176 + }, + { + "epoch": 0.5126967207975421, + "grad_norm": 3.325409173965454, + "learning_rate": 1.0075137578812998e-05, + "loss": 0.9193, + "step": 8177 + }, + { + "epoch": 0.5127594206533325, + "grad_norm": 3.188750743865967, + "learning_rate": 1.0073106870121454e-05, + "loss": 1.0393, + "step": 8178 + }, + { + "epoch": 0.5128221205091228, + "grad_norm": 3.237926721572876, + "learning_rate": 1.0071076158414977e-05, + "loss": 1.1145, + "step": 8179 + }, + { + "epoch": 0.5128848203649131, + "grad_norm": 2.8058884143829346, + "learning_rate": 1.0069045443777318e-05, + "loss": 1.2161, + "step": 8180 + }, + { + "epoch": 0.5129475202207034, + "grad_norm": 3.0540761947631836, + "learning_rate": 1.0067014726292223e-05, + "loss": 1.0976, + "step": 8181 + }, + { + "epoch": 0.5130102200764938, + "grad_norm": 2.8503775596618652, + "learning_rate": 1.0064984006043438e-05, + "loss": 1.2688, + "step": 8182 + }, + { + "epoch": 0.5130729199322842, + "grad_norm": 2.9586615562438965, + "learning_rate": 1.0062953283114706e-05, + "loss": 1.1541, + "step": 8183 + }, + { + "epoch": 0.5131356197880745, + "grad_norm": 2.9321327209472656, + "learning_rate": 1.006092255758978e-05, + "loss": 1.1584, + "step": 8184 + }, + { + "epoch": 0.5131983196438649, + "grad_norm": 3.452897071838379, + "learning_rate": 1.0058891829552408e-05, + "loss": 1.062, + "step": 8185 + }, + { + "epoch": 0.5132610194996552, + "grad_norm": 3.446528196334839, + "learning_rate": 1.0056861099086333e-05, + "loss": 1.1653, + "step": 8186 + }, + { + "epoch": 0.5133237193554455, + "grad_norm": 2.9634077548980713, + "learning_rate": 1.0054830366275303e-05, + "loss": 1.1554, + "step": 8187 + }, + { + "epoch": 0.5133864192112358, + "grad_norm": 3.0987510681152344, + "learning_rate": 1.0052799631203066e-05, + "loss": 1.0292, + "step": 8188 + }, + { + "epoch": 0.5134491190670262, + "grad_norm": 2.987661123275757, + "learning_rate": 1.005076889395337e-05, + "loss": 1.083, + "step": 8189 + }, + { + "epoch": 0.5135118189228165, + "grad_norm": 3.135873794555664, + "learning_rate": 1.004873815460996e-05, + "loss": 1.1552, + "step": 8190 + }, + { + "epoch": 0.5135745187786068, + "grad_norm": 3.2270097732543945, + "learning_rate": 1.0046707413256591e-05, + "loss": 1.0781, + "step": 8191 + }, + { + "epoch": 0.5136372186343972, + "grad_norm": 3.2964415550231934, + "learning_rate": 1.0044676669977005e-05, + "loss": 1.1561, + "step": 8192 + }, + { + "epoch": 0.5136999184901875, + "grad_norm": 3.0754103660583496, + "learning_rate": 1.004264592485495e-05, + "loss": 1.018, + "step": 8193 + }, + { + "epoch": 0.5137626183459778, + "grad_norm": 2.904356002807617, + "learning_rate": 1.0040615177974176e-05, + "loss": 0.9638, + "step": 8194 + }, + { + "epoch": 0.5138253182017681, + "grad_norm": 3.545501947402954, + "learning_rate": 1.0038584429418428e-05, + "loss": 1.1797, + "step": 8195 + }, + { + "epoch": 0.5138880180575585, + "grad_norm": 2.9867911338806152, + "learning_rate": 1.003655367927146e-05, + "loss": 1.0482, + "step": 8196 + }, + { + "epoch": 0.5139507179133488, + "grad_norm": 3.10212779045105, + "learning_rate": 1.0034522927617014e-05, + "loss": 1.0295, + "step": 8197 + }, + { + "epoch": 0.5140134177691391, + "grad_norm": 3.0073792934417725, + "learning_rate": 1.0032492174538844e-05, + "loss": 1.0391, + "step": 8198 + }, + { + "epoch": 0.5140761176249294, + "grad_norm": 3.1542112827301025, + "learning_rate": 1.0030461420120691e-05, + "loss": 1.1473, + "step": 8199 + }, + { + "epoch": 0.5141388174807198, + "grad_norm": 3.117417573928833, + "learning_rate": 1.002843066444631e-05, + "loss": 0.9532, + "step": 8200 + }, + { + "epoch": 0.5142015173365101, + "grad_norm": 2.998634099960327, + "learning_rate": 1.0026399907599447e-05, + "loss": 0.9647, + "step": 8201 + }, + { + "epoch": 0.5142642171923004, + "grad_norm": 3.0678114891052246, + "learning_rate": 1.0024369149663853e-05, + "loss": 1.1395, + "step": 8202 + }, + { + "epoch": 0.5143269170480907, + "grad_norm": 3.4665050506591797, + "learning_rate": 1.0022338390723269e-05, + "loss": 0.9731, + "step": 8203 + }, + { + "epoch": 0.5143896169038811, + "grad_norm": 3.1210460662841797, + "learning_rate": 1.0020307630861452e-05, + "loss": 1.2438, + "step": 8204 + }, + { + "epoch": 0.5144523167596714, + "grad_norm": 3.1273932456970215, + "learning_rate": 1.0018276870162149e-05, + "loss": 1.2741, + "step": 8205 + }, + { + "epoch": 0.5145150166154618, + "grad_norm": 2.9292373657226562, + "learning_rate": 1.0016246108709105e-05, + "loss": 1.1106, + "step": 8206 + }, + { + "epoch": 0.5145777164712522, + "grad_norm": 3.4925501346588135, + "learning_rate": 1.001421534658607e-05, + "loss": 1.1091, + "step": 8207 + }, + { + "epoch": 0.5146404163270425, + "grad_norm": 3.077148199081421, + "learning_rate": 1.0012184583876794e-05, + "loss": 1.1295, + "step": 8208 + }, + { + "epoch": 0.5147031161828328, + "grad_norm": 3.1944503784179688, + "learning_rate": 1.0010153820665028e-05, + "loss": 1.1711, + "step": 8209 + }, + { + "epoch": 0.5147658160386231, + "grad_norm": 3.1091597080230713, + "learning_rate": 1.0008123057034514e-05, + "loss": 1.1355, + "step": 8210 + }, + { + "epoch": 0.5148285158944135, + "grad_norm": 2.7352511882781982, + "learning_rate": 1.0006092293069007e-05, + "loss": 1.1266, + "step": 8211 + }, + { + "epoch": 0.5148912157502038, + "grad_norm": 3.0000619888305664, + "learning_rate": 1.0004061528852254e-05, + "loss": 1.1383, + "step": 8212 + }, + { + "epoch": 0.5149539156059941, + "grad_norm": 2.931187152862549, + "learning_rate": 1.0002030764468003e-05, + "loss": 1.039, + "step": 8213 + }, + { + "epoch": 0.5150166154617845, + "grad_norm": 3.117969274520874, + "learning_rate": 1e-05, + "loss": 0.9743, + "step": 8214 + }, + { + "epoch": 0.5150793153175748, + "grad_norm": 3.0044569969177246, + "learning_rate": 9.997969235532e-06, + "loss": 1.087, + "step": 8215 + }, + { + "epoch": 0.5151420151733651, + "grad_norm": 2.869117259979248, + "learning_rate": 9.995938471147751e-06, + "loss": 1.1597, + "step": 8216 + }, + { + "epoch": 0.5152047150291554, + "grad_norm": 2.948371648788452, + "learning_rate": 9.993907706930996e-06, + "loss": 1.0942, + "step": 8217 + }, + { + "epoch": 0.5152674148849458, + "grad_norm": 3.2855255603790283, + "learning_rate": 9.991876942965488e-06, + "loss": 1.028, + "step": 8218 + }, + { + "epoch": 0.5153301147407361, + "grad_norm": 2.905437469482422, + "learning_rate": 9.989846179334976e-06, + "loss": 1.1815, + "step": 8219 + }, + { + "epoch": 0.5153928145965264, + "grad_norm": 2.9872021675109863, + "learning_rate": 9.987815416123208e-06, + "loss": 1.0355, + "step": 8220 + }, + { + "epoch": 0.5154555144523167, + "grad_norm": 3.0734310150146484, + "learning_rate": 9.985784653413931e-06, + "loss": 1.1848, + "step": 8221 + }, + { + "epoch": 0.5155182143081071, + "grad_norm": 2.886183977127075, + "learning_rate": 9.983753891290899e-06, + "loss": 1.0983, + "step": 8222 + }, + { + "epoch": 0.5155809141638974, + "grad_norm": 3.0323779582977295, + "learning_rate": 9.981723129837855e-06, + "loss": 0.9922, + "step": 8223 + }, + { + "epoch": 0.5156436140196877, + "grad_norm": 3.154545783996582, + "learning_rate": 9.979692369138551e-06, + "loss": 0.9928, + "step": 8224 + }, + { + "epoch": 0.515706313875478, + "grad_norm": 3.0583245754241943, + "learning_rate": 9.977661609276733e-06, + "loss": 1.0543, + "step": 8225 + }, + { + "epoch": 0.5157690137312684, + "grad_norm": 2.7974088191986084, + "learning_rate": 9.97563085033615e-06, + "loss": 1.1683, + "step": 8226 + }, + { + "epoch": 0.5158317135870587, + "grad_norm": 2.8510031700134277, + "learning_rate": 9.973600092400556e-06, + "loss": 1.0544, + "step": 8227 + }, + { + "epoch": 0.515894413442849, + "grad_norm": 3.072143316268921, + "learning_rate": 9.97156933555369e-06, + "loss": 1.0065, + "step": 8228 + }, + { + "epoch": 0.5159571132986394, + "grad_norm": 3.199946403503418, + "learning_rate": 9.969538579879312e-06, + "loss": 0.9976, + "step": 8229 + }, + { + "epoch": 0.5160198131544298, + "grad_norm": 2.845902442932129, + "learning_rate": 9.967507825461161e-06, + "loss": 1.0875, + "step": 8230 + }, + { + "epoch": 0.5160825130102201, + "grad_norm": 3.192927122116089, + "learning_rate": 9.965477072382989e-06, + "loss": 1.2291, + "step": 8231 + }, + { + "epoch": 0.5161452128660104, + "grad_norm": 3.6229777336120605, + "learning_rate": 9.963446320728544e-06, + "loss": 0.9932, + "step": 8232 + }, + { + "epoch": 0.5162079127218008, + "grad_norm": 3.4820103645324707, + "learning_rate": 9.961415570581575e-06, + "loss": 1.1802, + "step": 8233 + }, + { + "epoch": 0.5162706125775911, + "grad_norm": 2.838735342025757, + "learning_rate": 9.959384822025829e-06, + "loss": 1.203, + "step": 8234 + }, + { + "epoch": 0.5163333124333814, + "grad_norm": 3.1762568950653076, + "learning_rate": 9.957354075145053e-06, + "loss": 1.1561, + "step": 8235 + }, + { + "epoch": 0.5163960122891718, + "grad_norm": 3.3974316120147705, + "learning_rate": 9.955323330022999e-06, + "loss": 1.0592, + "step": 8236 + }, + { + "epoch": 0.5164587121449621, + "grad_norm": 3.23239803314209, + "learning_rate": 9.953292586743412e-06, + "loss": 1.2175, + "step": 8237 + }, + { + "epoch": 0.5165214120007524, + "grad_norm": 3.1875641345977783, + "learning_rate": 9.951261845390041e-06, + "loss": 1.0479, + "step": 8238 + }, + { + "epoch": 0.5165841118565427, + "grad_norm": 2.8283262252807617, + "learning_rate": 9.949231106046632e-06, + "loss": 1.0583, + "step": 8239 + }, + { + "epoch": 0.5166468117123331, + "grad_norm": 2.9097938537597656, + "learning_rate": 9.94720036879694e-06, + "loss": 1.137, + "step": 8240 + }, + { + "epoch": 0.5167095115681234, + "grad_norm": 3.060451030731201, + "learning_rate": 9.9451696337247e-06, + "loss": 1.0545, + "step": 8241 + }, + { + "epoch": 0.5167722114239137, + "grad_norm": 3.2001731395721436, + "learning_rate": 9.943138900913672e-06, + "loss": 1.2032, + "step": 8242 + }, + { + "epoch": 0.516834911279704, + "grad_norm": 3.1278493404388428, + "learning_rate": 9.941108170447594e-06, + "loss": 0.9866, + "step": 8243 + }, + { + "epoch": 0.5168976111354944, + "grad_norm": 2.964472770690918, + "learning_rate": 9.939077442410221e-06, + "loss": 1.2389, + "step": 8244 + }, + { + "epoch": 0.5169603109912847, + "grad_norm": 3.3007795810699463, + "learning_rate": 9.937046716885295e-06, + "loss": 1.0722, + "step": 8245 + }, + { + "epoch": 0.517023010847075, + "grad_norm": 2.8989458084106445, + "learning_rate": 9.935015993956565e-06, + "loss": 1.0798, + "step": 8246 + }, + { + "epoch": 0.5170857107028654, + "grad_norm": 3.132176399230957, + "learning_rate": 9.93298527370778e-06, + "loss": 1.0172, + "step": 8247 + }, + { + "epoch": 0.5171484105586557, + "grad_norm": 3.1882219314575195, + "learning_rate": 9.930954556222683e-06, + "loss": 1.1082, + "step": 8248 + }, + { + "epoch": 0.517211110414446, + "grad_norm": 3.0428216457366943, + "learning_rate": 9.928923841585025e-06, + "loss": 1.0809, + "step": 8249 + }, + { + "epoch": 0.5172738102702363, + "grad_norm": 2.9191243648529053, + "learning_rate": 9.926893129878549e-06, + "loss": 1.1439, + "step": 8250 + }, + { + "epoch": 0.5173365101260267, + "grad_norm": 2.894558906555176, + "learning_rate": 9.924862421187008e-06, + "loss": 1.2378, + "step": 8251 + }, + { + "epoch": 0.517399209981817, + "grad_norm": 2.96362042427063, + "learning_rate": 9.92283171559414e-06, + "loss": 1.1825, + "step": 8252 + }, + { + "epoch": 0.5174619098376074, + "grad_norm": 3.2430129051208496, + "learning_rate": 9.920801013183696e-06, + "loss": 1.0623, + "step": 8253 + }, + { + "epoch": 0.5175246096933978, + "grad_norm": 2.947166681289673, + "learning_rate": 9.91877031403942e-06, + "loss": 1.2122, + "step": 8254 + }, + { + "epoch": 0.5175873095491881, + "grad_norm": 3.1756374835968018, + "learning_rate": 9.916739618245062e-06, + "loss": 1.0506, + "step": 8255 + }, + { + "epoch": 0.5176500094049784, + "grad_norm": 3.035597085952759, + "learning_rate": 9.914708925884366e-06, + "loss": 1.1019, + "step": 8256 + }, + { + "epoch": 0.5177127092607687, + "grad_norm": 2.8215816020965576, + "learning_rate": 9.912678237041075e-06, + "loss": 1.1059, + "step": 8257 + }, + { + "epoch": 0.5177754091165591, + "grad_norm": 3.2411320209503174, + "learning_rate": 9.910647551798942e-06, + "loss": 1.1761, + "step": 8258 + }, + { + "epoch": 0.5178381089723494, + "grad_norm": 3.6020853519439697, + "learning_rate": 9.908616870241703e-06, + "loss": 1.1606, + "step": 8259 + }, + { + "epoch": 0.5179008088281397, + "grad_norm": 2.922968626022339, + "learning_rate": 9.906586192453112e-06, + "loss": 1.2781, + "step": 8260 + }, + { + "epoch": 0.51796350868393, + "grad_norm": 3.0035600662231445, + "learning_rate": 9.90455551851691e-06, + "loss": 1.0699, + "step": 8261 + }, + { + "epoch": 0.5180262085397204, + "grad_norm": 3.0342886447906494, + "learning_rate": 9.902524848516842e-06, + "loss": 0.9776, + "step": 8262 + }, + { + "epoch": 0.5180889083955107, + "grad_norm": 3.3748764991760254, + "learning_rate": 9.900494182536652e-06, + "loss": 1.1515, + "step": 8263 + }, + { + "epoch": 0.518151608251301, + "grad_norm": 3.118067502975464, + "learning_rate": 9.898463520660088e-06, + "loss": 1.1697, + "step": 8264 + }, + { + "epoch": 0.5182143081070913, + "grad_norm": 2.921452522277832, + "learning_rate": 9.896432862970892e-06, + "loss": 1.1925, + "step": 8265 + }, + { + "epoch": 0.5182770079628817, + "grad_norm": 2.7468318939208984, + "learning_rate": 9.894402209552809e-06, + "loss": 1.0947, + "step": 8266 + }, + { + "epoch": 0.518339707818672, + "grad_norm": 3.4613709449768066, + "learning_rate": 9.892371560489585e-06, + "loss": 1.0513, + "step": 8267 + }, + { + "epoch": 0.5184024076744623, + "grad_norm": 2.8924400806427, + "learning_rate": 9.890340915864959e-06, + "loss": 1.0871, + "step": 8268 + }, + { + "epoch": 0.5184651075302527, + "grad_norm": 3.2263503074645996, + "learning_rate": 9.888310275762683e-06, + "loss": 1.0089, + "step": 8269 + }, + { + "epoch": 0.518527807386043, + "grad_norm": 2.9559667110443115, + "learning_rate": 9.886279640266492e-06, + "loss": 1.0685, + "step": 8270 + }, + { + "epoch": 0.5185905072418333, + "grad_norm": 3.233158826828003, + "learning_rate": 9.884249009460137e-06, + "loss": 0.9266, + "step": 8271 + }, + { + "epoch": 0.5186532070976236, + "grad_norm": 3.224656343460083, + "learning_rate": 9.882218383427356e-06, + "loss": 1.3394, + "step": 8272 + }, + { + "epoch": 0.518715906953414, + "grad_norm": 3.333197593688965, + "learning_rate": 9.880187762251895e-06, + "loss": 1.0652, + "step": 8273 + }, + { + "epoch": 0.5187786068092043, + "grad_norm": 3.275439739227295, + "learning_rate": 9.878157146017495e-06, + "loss": 0.9758, + "step": 8274 + }, + { + "epoch": 0.5188413066649946, + "grad_norm": 3.19234561920166, + "learning_rate": 9.876126534807902e-06, + "loss": 1.0809, + "step": 8275 + }, + { + "epoch": 0.518904006520785, + "grad_norm": 3.0009398460388184, + "learning_rate": 9.874095928706853e-06, + "loss": 1.1335, + "step": 8276 + }, + { + "epoch": 0.5189667063765754, + "grad_norm": 3.306591272354126, + "learning_rate": 9.872065327798092e-06, + "loss": 0.9644, + "step": 8277 + }, + { + "epoch": 0.5190294062323657, + "grad_norm": 3.11580228805542, + "learning_rate": 9.870034732165366e-06, + "loss": 1.1476, + "step": 8278 + }, + { + "epoch": 0.519092106088156, + "grad_norm": 3.369381904602051, + "learning_rate": 9.868004141892412e-06, + "loss": 1.0899, + "step": 8279 + }, + { + "epoch": 0.5191548059439464, + "grad_norm": 3.126821517944336, + "learning_rate": 9.865973557062974e-06, + "loss": 1.1098, + "step": 8280 + }, + { + "epoch": 0.5192175057997367, + "grad_norm": 3.477429151535034, + "learning_rate": 9.863942977760792e-06, + "loss": 1.3328, + "step": 8281 + }, + { + "epoch": 0.519280205655527, + "grad_norm": 3.1178901195526123, + "learning_rate": 9.861912404069608e-06, + "loss": 1.3105, + "step": 8282 + }, + { + "epoch": 0.5193429055113173, + "grad_norm": 2.80423641204834, + "learning_rate": 9.85988183607316e-06, + "loss": 1.0336, + "step": 8283 + }, + { + "epoch": 0.5194056053671077, + "grad_norm": 3.592484951019287, + "learning_rate": 9.857851273855195e-06, + "loss": 1.0955, + "step": 8284 + }, + { + "epoch": 0.519468305222898, + "grad_norm": 3.174903631210327, + "learning_rate": 9.855820717499447e-06, + "loss": 1.3566, + "step": 8285 + }, + { + "epoch": 0.5195310050786883, + "grad_norm": 3.22186541557312, + "learning_rate": 9.853790167089658e-06, + "loss": 1.095, + "step": 8286 + }, + { + "epoch": 0.5195937049344787, + "grad_norm": 3.0474483966827393, + "learning_rate": 9.851759622709572e-06, + "loss": 1.0248, + "step": 8287 + }, + { + "epoch": 0.519656404790269, + "grad_norm": 3.251039505004883, + "learning_rate": 9.849729084442923e-06, + "loss": 0.9656, + "step": 8288 + }, + { + "epoch": 0.5197191046460593, + "grad_norm": 3.3126368522644043, + "learning_rate": 9.847698552373455e-06, + "loss": 1.0453, + "step": 8289 + }, + { + "epoch": 0.5197818045018496, + "grad_norm": 3.010993003845215, + "learning_rate": 9.845668026584904e-06, + "loss": 1.1354, + "step": 8290 + }, + { + "epoch": 0.51984450435764, + "grad_norm": 3.309410572052002, + "learning_rate": 9.84363750716101e-06, + "loss": 1.0407, + "step": 8291 + }, + { + "epoch": 0.5199072042134303, + "grad_norm": 3.004120111465454, + "learning_rate": 9.841606994185513e-06, + "loss": 1.1587, + "step": 8292 + }, + { + "epoch": 0.5199699040692206, + "grad_norm": 2.9214348793029785, + "learning_rate": 9.839576487742151e-06, + "loss": 1.1809, + "step": 8293 + }, + { + "epoch": 0.5200326039250109, + "grad_norm": 3.067058563232422, + "learning_rate": 9.83754598791466e-06, + "loss": 1.0745, + "step": 8294 + }, + { + "epoch": 0.5200953037808013, + "grad_norm": 2.8423991203308105, + "learning_rate": 9.835515494786783e-06, + "loss": 1.0174, + "step": 8295 + }, + { + "epoch": 0.5201580036365916, + "grad_norm": 3.6544876098632812, + "learning_rate": 9.83348500844225e-06, + "loss": 1.0595, + "step": 8296 + }, + { + "epoch": 0.5202207034923819, + "grad_norm": 3.356647253036499, + "learning_rate": 9.831454528964803e-06, + "loss": 1.0626, + "step": 8297 + }, + { + "epoch": 0.5202834033481722, + "grad_norm": 3.7586758136749268, + "learning_rate": 9.829424056438181e-06, + "loss": 1.1927, + "step": 8298 + }, + { + "epoch": 0.5203461032039627, + "grad_norm": 3.174746513366699, + "learning_rate": 9.827393590946116e-06, + "loss": 0.9801, + "step": 8299 + }, + { + "epoch": 0.520408803059753, + "grad_norm": 3.2346861362457275, + "learning_rate": 9.82536313257235e-06, + "loss": 1.1504, + "step": 8300 + }, + { + "epoch": 0.5204715029155433, + "grad_norm": 3.245770215988159, + "learning_rate": 9.823332681400612e-06, + "loss": 0.9482, + "step": 8301 + }, + { + "epoch": 0.5205342027713337, + "grad_norm": 3.1178994178771973, + "learning_rate": 9.821302237514646e-06, + "loss": 1.0582, + "step": 8302 + }, + { + "epoch": 0.520596902627124, + "grad_norm": 3.0601861476898193, + "learning_rate": 9.81927180099818e-06, + "loss": 1.0749, + "step": 8303 + }, + { + "epoch": 0.5206596024829143, + "grad_norm": 2.9196932315826416, + "learning_rate": 9.817241371934956e-06, + "loss": 1.2022, + "step": 8304 + }, + { + "epoch": 0.5207223023387046, + "grad_norm": 3.1723196506500244, + "learning_rate": 9.815210950408703e-06, + "loss": 1.1755, + "step": 8305 + }, + { + "epoch": 0.520785002194495, + "grad_norm": 3.1193840503692627, + "learning_rate": 9.81318053650316e-06, + "loss": 1.0443, + "step": 8306 + }, + { + "epoch": 0.5208477020502853, + "grad_norm": 3.16583514213562, + "learning_rate": 9.81115013030206e-06, + "loss": 1.0397, + "step": 8307 + }, + { + "epoch": 0.5209104019060756, + "grad_norm": 3.3930892944335938, + "learning_rate": 9.809119731889134e-06, + "loss": 1.0464, + "step": 8308 + }, + { + "epoch": 0.520973101761866, + "grad_norm": 3.0296783447265625, + "learning_rate": 9.807089341348123e-06, + "loss": 1.2504, + "step": 8309 + }, + { + "epoch": 0.5210358016176563, + "grad_norm": 3.370619297027588, + "learning_rate": 9.805058958762751e-06, + "loss": 1.0169, + "step": 8310 + }, + { + "epoch": 0.5210985014734466, + "grad_norm": 2.9186854362487793, + "learning_rate": 9.803028584216761e-06, + "loss": 0.9599, + "step": 8311 + }, + { + "epoch": 0.5211612013292369, + "grad_norm": 2.958070755004883, + "learning_rate": 9.800998217793878e-06, + "loss": 1.1907, + "step": 8312 + }, + { + "epoch": 0.5212239011850273, + "grad_norm": 2.8350839614868164, + "learning_rate": 9.79896785957784e-06, + "loss": 1.1508, + "step": 8313 + }, + { + "epoch": 0.5212866010408176, + "grad_norm": 3.23047137260437, + "learning_rate": 9.796937509652373e-06, + "loss": 1.206, + "step": 8314 + }, + { + "epoch": 0.5213493008966079, + "grad_norm": 2.9462475776672363, + "learning_rate": 9.794907168101214e-06, + "loss": 1.1094, + "step": 8315 + }, + { + "epoch": 0.5214120007523982, + "grad_norm": 3.6873559951782227, + "learning_rate": 9.79287683500809e-06, + "loss": 0.7694, + "step": 8316 + }, + { + "epoch": 0.5214747006081886, + "grad_norm": 2.7443315982818604, + "learning_rate": 9.790846510456735e-06, + "loss": 1.0742, + "step": 8317 + }, + { + "epoch": 0.5215374004639789, + "grad_norm": 2.935253620147705, + "learning_rate": 9.788816194530882e-06, + "loss": 1.0828, + "step": 8318 + }, + { + "epoch": 0.5216001003197692, + "grad_norm": 2.8972396850585938, + "learning_rate": 9.786785887314255e-06, + "loss": 1.2957, + "step": 8319 + }, + { + "epoch": 0.5216628001755595, + "grad_norm": 3.1764919757843018, + "learning_rate": 9.78475558889059e-06, + "loss": 1.2011, + "step": 8320 + }, + { + "epoch": 0.5217255000313499, + "grad_norm": 3.1741371154785156, + "learning_rate": 9.78272529934361e-06, + "loss": 1.1085, + "step": 8321 + }, + { + "epoch": 0.5217881998871403, + "grad_norm": 3.0104920864105225, + "learning_rate": 9.780695018757052e-06, + "loss": 1.0787, + "step": 8322 + }, + { + "epoch": 0.5218508997429306, + "grad_norm": 2.9289395809173584, + "learning_rate": 9.77866474721464e-06, + "loss": 0.9794, + "step": 8323 + }, + { + "epoch": 0.521913599598721, + "grad_norm": 3.1687264442443848, + "learning_rate": 9.776634484800104e-06, + "loss": 1.0871, + "step": 8324 + }, + { + "epoch": 0.5219762994545113, + "grad_norm": 3.3649580478668213, + "learning_rate": 9.774604231597169e-06, + "loss": 1.1884, + "step": 8325 + }, + { + "epoch": 0.5220389993103016, + "grad_norm": 3.2367990016937256, + "learning_rate": 9.772573987689568e-06, + "loss": 1.0165, + "step": 8326 + }, + { + "epoch": 0.522101699166092, + "grad_norm": 3.13930344581604, + "learning_rate": 9.770543753161024e-06, + "loss": 1.0773, + "step": 8327 + }, + { + "epoch": 0.5221643990218823, + "grad_norm": 3.2073023319244385, + "learning_rate": 9.768513528095266e-06, + "loss": 1.2583, + "step": 8328 + }, + { + "epoch": 0.5222270988776726, + "grad_norm": 3.128121852874756, + "learning_rate": 9.766483312576022e-06, + "loss": 0.8477, + "step": 8329 + }, + { + "epoch": 0.5222897987334629, + "grad_norm": 3.227346181869507, + "learning_rate": 9.764453106687014e-06, + "loss": 1.0585, + "step": 8330 + }, + { + "epoch": 0.5223524985892533, + "grad_norm": 3.4145901203155518, + "learning_rate": 9.762422910511971e-06, + "loss": 1.0504, + "step": 8331 + }, + { + "epoch": 0.5224151984450436, + "grad_norm": 3.1923160552978516, + "learning_rate": 9.760392724134618e-06, + "loss": 1.1934, + "step": 8332 + }, + { + "epoch": 0.5224778983008339, + "grad_norm": 3.0066537857055664, + "learning_rate": 9.75836254763868e-06, + "loss": 1.2255, + "step": 8333 + }, + { + "epoch": 0.5225405981566242, + "grad_norm": 3.0194621086120605, + "learning_rate": 9.75633238110788e-06, + "loss": 1.3054, + "step": 8334 + }, + { + "epoch": 0.5226032980124146, + "grad_norm": 2.824223041534424, + "learning_rate": 9.754302224625944e-06, + "loss": 1.1829, + "step": 8335 + }, + { + "epoch": 0.5226659978682049, + "grad_norm": 3.167104721069336, + "learning_rate": 9.752272078276593e-06, + "loss": 1.1421, + "step": 8336 + }, + { + "epoch": 0.5227286977239952, + "grad_norm": 3.0908238887786865, + "learning_rate": 9.750241942143554e-06, + "loss": 0.9687, + "step": 8337 + }, + { + "epoch": 0.5227913975797855, + "grad_norm": 3.2106707096099854, + "learning_rate": 9.748211816310548e-06, + "loss": 1.3851, + "step": 8338 + }, + { + "epoch": 0.5228540974355759, + "grad_norm": 3.1485049724578857, + "learning_rate": 9.746181700861296e-06, + "loss": 1.2771, + "step": 8339 + }, + { + "epoch": 0.5229167972913662, + "grad_norm": 3.0711097717285156, + "learning_rate": 9.744151595879526e-06, + "loss": 1.2058, + "step": 8340 + }, + { + "epoch": 0.5229794971471565, + "grad_norm": 3.267967939376831, + "learning_rate": 9.742121501448953e-06, + "loss": 1.0526, + "step": 8341 + }, + { + "epoch": 0.5230421970029469, + "grad_norm": 2.7409164905548096, + "learning_rate": 9.7400914176533e-06, + "loss": 1.2593, + "step": 8342 + }, + { + "epoch": 0.5231048968587372, + "grad_norm": 3.311467170715332, + "learning_rate": 9.738061344576289e-06, + "loss": 1.2863, + "step": 8343 + }, + { + "epoch": 0.5231675967145275, + "grad_norm": 3.101511240005493, + "learning_rate": 9.73603128230164e-06, + "loss": 1.142, + "step": 8344 + }, + { + "epoch": 0.5232302965703179, + "grad_norm": 2.9892873764038086, + "learning_rate": 9.73400123091307e-06, + "loss": 1.2395, + "step": 8345 + }, + { + "epoch": 0.5232929964261083, + "grad_norm": 3.1501011848449707, + "learning_rate": 9.731971190494303e-06, + "loss": 1.0848, + "step": 8346 + }, + { + "epoch": 0.5233556962818986, + "grad_norm": 2.7545297145843506, + "learning_rate": 9.729941161129055e-06, + "loss": 1.2037, + "step": 8347 + }, + { + "epoch": 0.5234183961376889, + "grad_norm": 3.1771976947784424, + "learning_rate": 9.727911142901045e-06, + "loss": 1.1717, + "step": 8348 + }, + { + "epoch": 0.5234810959934793, + "grad_norm": 3.034961700439453, + "learning_rate": 9.725881135893993e-06, + "loss": 1.1505, + "step": 8349 + }, + { + "epoch": 0.5235437958492696, + "grad_norm": 3.0126051902770996, + "learning_rate": 9.723851140191613e-06, + "loss": 1.13, + "step": 8350 + }, + { + "epoch": 0.5236064957050599, + "grad_norm": 2.9034061431884766, + "learning_rate": 9.721821155877627e-06, + "loss": 1.2539, + "step": 8351 + }, + { + "epoch": 0.5236691955608502, + "grad_norm": 3.4420783519744873, + "learning_rate": 9.719791183035744e-06, + "loss": 1.1052, + "step": 8352 + }, + { + "epoch": 0.5237318954166406, + "grad_norm": 3.0911378860473633, + "learning_rate": 9.71776122174969e-06, + "loss": 1.0604, + "step": 8353 + }, + { + "epoch": 0.5237945952724309, + "grad_norm": 3.1912221908569336, + "learning_rate": 9.715731272103172e-06, + "loss": 1.2096, + "step": 8354 + }, + { + "epoch": 0.5238572951282212, + "grad_norm": 3.2490646839141846, + "learning_rate": 9.713701334179911e-06, + "loss": 1.1486, + "step": 8355 + }, + { + "epoch": 0.5239199949840115, + "grad_norm": 3.2565667629241943, + "learning_rate": 9.711671408063617e-06, + "loss": 0.9993, + "step": 8356 + }, + { + "epoch": 0.5239826948398019, + "grad_norm": 2.9830322265625, + "learning_rate": 9.70964149383801e-06, + "loss": 1.1046, + "step": 8357 + }, + { + "epoch": 0.5240453946955922, + "grad_norm": 2.790419816970825, + "learning_rate": 9.707611591586798e-06, + "loss": 1.1762, + "step": 8358 + }, + { + "epoch": 0.5241080945513825, + "grad_norm": 3.1529462337493896, + "learning_rate": 9.705581701393694e-06, + "loss": 1.1362, + "step": 8359 + }, + { + "epoch": 0.5241707944071728, + "grad_norm": 2.896533966064453, + "learning_rate": 9.703551823342419e-06, + "loss": 1.2537, + "step": 8360 + }, + { + "epoch": 0.5242334942629632, + "grad_norm": 3.1813127994537354, + "learning_rate": 9.701521957516677e-06, + "loss": 1.0881, + "step": 8361 + }, + { + "epoch": 0.5242961941187535, + "grad_norm": 3.4918177127838135, + "learning_rate": 9.699492104000183e-06, + "loss": 1.3083, + "step": 8362 + }, + { + "epoch": 0.5243588939745438, + "grad_norm": 2.947525978088379, + "learning_rate": 9.697462262876647e-06, + "loss": 1.1163, + "step": 8363 + }, + { + "epoch": 0.5244215938303342, + "grad_norm": 3.038443088531494, + "learning_rate": 9.695432434229783e-06, + "loss": 1.3502, + "step": 8364 + }, + { + "epoch": 0.5244842936861245, + "grad_norm": 3.23891282081604, + "learning_rate": 9.693402618143296e-06, + "loss": 0.9971, + "step": 8365 + }, + { + "epoch": 0.5245469935419148, + "grad_norm": 3.261460781097412, + "learning_rate": 9.6913728147009e-06, + "loss": 1.1782, + "step": 8366 + }, + { + "epoch": 0.5246096933977051, + "grad_norm": 2.9583563804626465, + "learning_rate": 9.689343023986303e-06, + "loss": 1.0752, + "step": 8367 + }, + { + "epoch": 0.5246723932534955, + "grad_norm": 3.20973539352417, + "learning_rate": 9.68731324608321e-06, + "loss": 1.0376, + "step": 8368 + }, + { + "epoch": 0.5247350931092859, + "grad_norm": 2.890734910964966, + "learning_rate": 9.685283481075334e-06, + "loss": 0.9965, + "step": 8369 + }, + { + "epoch": 0.5247977929650762, + "grad_norm": 3.1595191955566406, + "learning_rate": 9.683253729046383e-06, + "loss": 1.1463, + "step": 8370 + }, + { + "epoch": 0.5248604928208666, + "grad_norm": 2.922365427017212, + "learning_rate": 9.68122399008006e-06, + "loss": 0.9876, + "step": 8371 + }, + { + "epoch": 0.5249231926766569, + "grad_norm": 3.024862051010132, + "learning_rate": 9.679194264260074e-06, + "loss": 1.3093, + "step": 8372 + }, + { + "epoch": 0.5249858925324472, + "grad_norm": 3.176596164703369, + "learning_rate": 9.677164551670132e-06, + "loss": 1.062, + "step": 8373 + }, + { + "epoch": 0.5250485923882375, + "grad_norm": 3.267538547515869, + "learning_rate": 9.675134852393935e-06, + "loss": 0.9927, + "step": 8374 + }, + { + "epoch": 0.5251112922440279, + "grad_norm": 3.20623517036438, + "learning_rate": 9.673105166515194e-06, + "loss": 1.1933, + "step": 8375 + }, + { + "epoch": 0.5251739920998182, + "grad_norm": 3.2233214378356934, + "learning_rate": 9.671075494117608e-06, + "loss": 1.0871, + "step": 8376 + }, + { + "epoch": 0.5252366919556085, + "grad_norm": 3.2560219764709473, + "learning_rate": 9.669045835284885e-06, + "loss": 1.1146, + "step": 8377 + }, + { + "epoch": 0.5252993918113988, + "grad_norm": 3.7655203342437744, + "learning_rate": 9.667016190100724e-06, + "loss": 1.1678, + "step": 8378 + }, + { + "epoch": 0.5253620916671892, + "grad_norm": 3.1229045391082764, + "learning_rate": 9.66498655864883e-06, + "loss": 1.2083, + "step": 8379 + }, + { + "epoch": 0.5254247915229795, + "grad_norm": 3.3492133617401123, + "learning_rate": 9.662956941012906e-06, + "loss": 1.2196, + "step": 8380 + }, + { + "epoch": 0.5254874913787698, + "grad_norm": 3.0256142616271973, + "learning_rate": 9.660927337276652e-06, + "loss": 1.1262, + "step": 8381 + }, + { + "epoch": 0.5255501912345601, + "grad_norm": 3.244018077850342, + "learning_rate": 9.65889774752377e-06, + "loss": 1.1699, + "step": 8382 + }, + { + "epoch": 0.5256128910903505, + "grad_norm": 2.865508556365967, + "learning_rate": 9.65686817183796e-06, + "loss": 1.1598, + "step": 8383 + }, + { + "epoch": 0.5256755909461408, + "grad_norm": 2.909773111343384, + "learning_rate": 9.654838610302922e-06, + "loss": 1.0723, + "step": 8384 + }, + { + "epoch": 0.5257382908019311, + "grad_norm": 3.0766685009002686, + "learning_rate": 9.652809063002353e-06, + "loss": 1.0339, + "step": 8385 + }, + { + "epoch": 0.5258009906577215, + "grad_norm": 3.1377999782562256, + "learning_rate": 9.650779530019955e-06, + "loss": 1.0069, + "step": 8386 + }, + { + "epoch": 0.5258636905135118, + "grad_norm": 2.921680450439453, + "learning_rate": 9.648750011439424e-06, + "loss": 1.0206, + "step": 8387 + }, + { + "epoch": 0.5259263903693021, + "grad_norm": 3.317789316177368, + "learning_rate": 9.646720507344458e-06, + "loss": 1.3394, + "step": 8388 + }, + { + "epoch": 0.5259890902250924, + "grad_norm": 3.2346363067626953, + "learning_rate": 9.644691017818752e-06, + "loss": 1.1119, + "step": 8389 + }, + { + "epoch": 0.5260517900808828, + "grad_norm": 2.9409661293029785, + "learning_rate": 9.642661542946005e-06, + "loss": 1.083, + "step": 8390 + }, + { + "epoch": 0.5261144899366731, + "grad_norm": 3.2285826206207275, + "learning_rate": 9.640632082809912e-06, + "loss": 1.1663, + "step": 8391 + }, + { + "epoch": 0.5261771897924635, + "grad_norm": 3.3873517513275146, + "learning_rate": 9.638602637494167e-06, + "loss": 1.1248, + "step": 8392 + }, + { + "epoch": 0.5262398896482539, + "grad_norm": 3.133429765701294, + "learning_rate": 9.636573207082466e-06, + "loss": 1.1451, + "step": 8393 + }, + { + "epoch": 0.5263025895040442, + "grad_norm": 2.9256300926208496, + "learning_rate": 9.6345437916585e-06, + "loss": 1.0098, + "step": 8394 + }, + { + "epoch": 0.5263652893598345, + "grad_norm": 3.1798808574676514, + "learning_rate": 9.632514391305967e-06, + "loss": 1.1219, + "step": 8395 + }, + { + "epoch": 0.5264279892156248, + "grad_norm": 3.1623504161834717, + "learning_rate": 9.630485006108554e-06, + "loss": 0.9968, + "step": 8396 + }, + { + "epoch": 0.5264906890714152, + "grad_norm": 3.154564619064331, + "learning_rate": 9.628455636149958e-06, + "loss": 1.1602, + "step": 8397 + }, + { + "epoch": 0.5265533889272055, + "grad_norm": 2.9413747787475586, + "learning_rate": 9.626426281513864e-06, + "loss": 1.2413, + "step": 8398 + }, + { + "epoch": 0.5266160887829958, + "grad_norm": 3.5455777645111084, + "learning_rate": 9.624396942283968e-06, + "loss": 1.1323, + "step": 8399 + }, + { + "epoch": 0.5266787886387861, + "grad_norm": 3.356123447418213, + "learning_rate": 9.622367618543959e-06, + "loss": 1.1798, + "step": 8400 + }, + { + "epoch": 0.5267414884945765, + "grad_norm": 3.184760570526123, + "learning_rate": 9.620338310377526e-06, + "loss": 1.1347, + "step": 8401 + }, + { + "epoch": 0.5268041883503668, + "grad_norm": 3.222902774810791, + "learning_rate": 9.618309017868359e-06, + "loss": 1.2701, + "step": 8402 + }, + { + "epoch": 0.5268668882061571, + "grad_norm": 2.7406365871429443, + "learning_rate": 9.616279741100141e-06, + "loss": 1.1273, + "step": 8403 + }, + { + "epoch": 0.5269295880619475, + "grad_norm": 2.843336582183838, + "learning_rate": 9.614250480156568e-06, + "loss": 0.8746, + "step": 8404 + }, + { + "epoch": 0.5269922879177378, + "grad_norm": 3.2583932876586914, + "learning_rate": 9.612221235121318e-06, + "loss": 1.136, + "step": 8405 + }, + { + "epoch": 0.5270549877735281, + "grad_norm": 2.9614527225494385, + "learning_rate": 9.610192006078085e-06, + "loss": 1.0996, + "step": 8406 + }, + { + "epoch": 0.5271176876293184, + "grad_norm": 2.9965617656707764, + "learning_rate": 9.608162793110547e-06, + "loss": 1.0651, + "step": 8407 + }, + { + "epoch": 0.5271803874851088, + "grad_norm": 2.854513168334961, + "learning_rate": 9.606133596302396e-06, + "loss": 1.154, + "step": 8408 + }, + { + "epoch": 0.5272430873408991, + "grad_norm": 2.9269707202911377, + "learning_rate": 9.604104415737309e-06, + "loss": 0.9306, + "step": 8409 + }, + { + "epoch": 0.5273057871966894, + "grad_norm": 3.2202324867248535, + "learning_rate": 9.602075251498974e-06, + "loss": 1.264, + "step": 8410 + }, + { + "epoch": 0.5273684870524797, + "grad_norm": 2.8949241638183594, + "learning_rate": 9.600046103671075e-06, + "loss": 1.0363, + "step": 8411 + }, + { + "epoch": 0.5274311869082701, + "grad_norm": 3.1254074573516846, + "learning_rate": 9.59801697233729e-06, + "loss": 0.9711, + "step": 8412 + }, + { + "epoch": 0.5274938867640604, + "grad_norm": 3.079241991043091, + "learning_rate": 9.595987857581303e-06, + "loss": 1.1073, + "step": 8413 + }, + { + "epoch": 0.5275565866198507, + "grad_norm": 3.098176956176758, + "learning_rate": 9.593958759486795e-06, + "loss": 1.0478, + "step": 8414 + }, + { + "epoch": 0.5276192864756412, + "grad_norm": 3.028304100036621, + "learning_rate": 9.591929678137447e-06, + "loss": 1.0284, + "step": 8415 + }, + { + "epoch": 0.5276819863314315, + "grad_norm": 3.1161961555480957, + "learning_rate": 9.589900613616935e-06, + "loss": 1.0964, + "step": 8416 + }, + { + "epoch": 0.5277446861872218, + "grad_norm": 3.068453788757324, + "learning_rate": 9.587871566008941e-06, + "loss": 1.092, + "step": 8417 + }, + { + "epoch": 0.5278073860430121, + "grad_norm": 3.4280683994293213, + "learning_rate": 9.58584253539714e-06, + "loss": 1.2473, + "step": 8418 + }, + { + "epoch": 0.5278700858988025, + "grad_norm": 2.9098398685455322, + "learning_rate": 9.58381352186521e-06, + "loss": 1.1837, + "step": 8419 + }, + { + "epoch": 0.5279327857545928, + "grad_norm": 3.034099578857422, + "learning_rate": 9.581784525496831e-06, + "loss": 1.115, + "step": 8420 + }, + { + "epoch": 0.5279954856103831, + "grad_norm": 2.92668080329895, + "learning_rate": 9.579755546375675e-06, + "loss": 1.1069, + "step": 8421 + }, + { + "epoch": 0.5280581854661734, + "grad_norm": 2.879272937774658, + "learning_rate": 9.57772658458542e-06, + "loss": 1.1173, + "step": 8422 + }, + { + "epoch": 0.5281208853219638, + "grad_norm": 2.707829475402832, + "learning_rate": 9.575697640209738e-06, + "loss": 1.1369, + "step": 8423 + }, + { + "epoch": 0.5281835851777541, + "grad_norm": 3.1325900554656982, + "learning_rate": 9.573668713332305e-06, + "loss": 1.0782, + "step": 8424 + }, + { + "epoch": 0.5282462850335444, + "grad_norm": 2.986302614212036, + "learning_rate": 9.57163980403679e-06, + "loss": 1.0806, + "step": 8425 + }, + { + "epoch": 0.5283089848893348, + "grad_norm": 2.9299569129943848, + "learning_rate": 9.569610912406872e-06, + "loss": 1.1232, + "step": 8426 + }, + { + "epoch": 0.5283716847451251, + "grad_norm": 3.24458384513855, + "learning_rate": 9.567582038526217e-06, + "loss": 1.1506, + "step": 8427 + }, + { + "epoch": 0.5284343846009154, + "grad_norm": 3.173520803451538, + "learning_rate": 9.565553182478497e-06, + "loss": 1.0561, + "step": 8428 + }, + { + "epoch": 0.5284970844567057, + "grad_norm": 3.1131551265716553, + "learning_rate": 9.563524344347384e-06, + "loss": 1.1196, + "step": 8429 + }, + { + "epoch": 0.5285597843124961, + "grad_norm": 3.3130340576171875, + "learning_rate": 9.561495524216542e-06, + "loss": 1.1153, + "step": 8430 + }, + { + "epoch": 0.5286224841682864, + "grad_norm": 3.1041953563690186, + "learning_rate": 9.55946672216965e-06, + "loss": 1.1944, + "step": 8431 + }, + { + "epoch": 0.5286851840240767, + "grad_norm": 3.235179901123047, + "learning_rate": 9.557437938290367e-06, + "loss": 1.0703, + "step": 8432 + }, + { + "epoch": 0.528747883879867, + "grad_norm": 3.010749578475952, + "learning_rate": 9.555409172662362e-06, + "loss": 1.0019, + "step": 8433 + }, + { + "epoch": 0.5288105837356574, + "grad_norm": 3.6157608032226562, + "learning_rate": 9.553380425369302e-06, + "loss": 1.0814, + "step": 8434 + }, + { + "epoch": 0.5288732835914477, + "grad_norm": 3.318657875061035, + "learning_rate": 9.551351696494854e-06, + "loss": 1.1882, + "step": 8435 + }, + { + "epoch": 0.528935983447238, + "grad_norm": 3.3623766899108887, + "learning_rate": 9.54932298612268e-06, + "loss": 1.1401, + "step": 8436 + }, + { + "epoch": 0.5289986833030283, + "grad_norm": 3.4663188457489014, + "learning_rate": 9.547294294336449e-06, + "loss": 1.0513, + "step": 8437 + }, + { + "epoch": 0.5290613831588188, + "grad_norm": 2.9834766387939453, + "learning_rate": 9.545265621219817e-06, + "loss": 1.0711, + "step": 8438 + }, + { + "epoch": 0.5291240830146091, + "grad_norm": 3.2590479850769043, + "learning_rate": 9.543236966856455e-06, + "loss": 1.1067, + "step": 8439 + }, + { + "epoch": 0.5291867828703994, + "grad_norm": 2.9963700771331787, + "learning_rate": 9.541208331330015e-06, + "loss": 1.0742, + "step": 8440 + }, + { + "epoch": 0.5292494827261898, + "grad_norm": 3.0368051528930664, + "learning_rate": 9.539179714724166e-06, + "loss": 1.0512, + "step": 8441 + }, + { + "epoch": 0.5293121825819801, + "grad_norm": 3.115575075149536, + "learning_rate": 9.537151117122568e-06, + "loss": 1.191, + "step": 8442 + }, + { + "epoch": 0.5293748824377704, + "grad_norm": 3.3319199085235596, + "learning_rate": 9.535122538608874e-06, + "loss": 1.24, + "step": 8443 + }, + { + "epoch": 0.5294375822935607, + "grad_norm": 2.9369616508483887, + "learning_rate": 9.533093979266752e-06, + "loss": 1.0925, + "step": 8444 + }, + { + "epoch": 0.5295002821493511, + "grad_norm": 2.9029836654663086, + "learning_rate": 9.53106543917985e-06, + "loss": 1.0821, + "step": 8445 + }, + { + "epoch": 0.5295629820051414, + "grad_norm": 2.7552402019500732, + "learning_rate": 9.529036918431833e-06, + "loss": 1.1118, + "step": 8446 + }, + { + "epoch": 0.5296256818609317, + "grad_norm": 3.0830447673797607, + "learning_rate": 9.527008417106351e-06, + "loss": 1.0464, + "step": 8447 + }, + { + "epoch": 0.5296883817167221, + "grad_norm": 2.8990347385406494, + "learning_rate": 9.524979935287064e-06, + "loss": 1.1774, + "step": 8448 + }, + { + "epoch": 0.5297510815725124, + "grad_norm": 3.180037260055542, + "learning_rate": 9.522951473057624e-06, + "loss": 1.0454, + "step": 8449 + }, + { + "epoch": 0.5298137814283027, + "grad_norm": 2.8320600986480713, + "learning_rate": 9.520923030501687e-06, + "loss": 1.065, + "step": 8450 + }, + { + "epoch": 0.529876481284093, + "grad_norm": 2.974385976791382, + "learning_rate": 9.518894607702907e-06, + "loss": 1.1701, + "step": 8451 + }, + { + "epoch": 0.5299391811398834, + "grad_norm": 3.301265239715576, + "learning_rate": 9.516866204744932e-06, + "loss": 1.0517, + "step": 8452 + }, + { + "epoch": 0.5300018809956737, + "grad_norm": 2.9187228679656982, + "learning_rate": 9.514837821711418e-06, + "loss": 1.2165, + "step": 8453 + }, + { + "epoch": 0.530064580851464, + "grad_norm": 3.248201847076416, + "learning_rate": 9.512809458686011e-06, + "loss": 1.0712, + "step": 8454 + }, + { + "epoch": 0.5301272807072543, + "grad_norm": 2.995893955230713, + "learning_rate": 9.510781115752365e-06, + "loss": 1.0892, + "step": 8455 + }, + { + "epoch": 0.5301899805630447, + "grad_norm": 2.893266201019287, + "learning_rate": 9.508752792994124e-06, + "loss": 1.1629, + "step": 8456 + }, + { + "epoch": 0.530252680418835, + "grad_norm": 3.056887149810791, + "learning_rate": 9.506724490494939e-06, + "loss": 0.9962, + "step": 8457 + }, + { + "epoch": 0.5303153802746253, + "grad_norm": 3.0601766109466553, + "learning_rate": 9.504696208338461e-06, + "loss": 1.0551, + "step": 8458 + }, + { + "epoch": 0.5303780801304157, + "grad_norm": 3.377394199371338, + "learning_rate": 9.502667946608332e-06, + "loss": 1.1679, + "step": 8459 + }, + { + "epoch": 0.530440779986206, + "grad_norm": 2.9078807830810547, + "learning_rate": 9.500639705388198e-06, + "loss": 1.0347, + "step": 8460 + }, + { + "epoch": 0.5305034798419964, + "grad_norm": 3.0900697708129883, + "learning_rate": 9.498611484761707e-06, + "loss": 1.1788, + "step": 8461 + }, + { + "epoch": 0.5305661796977867, + "grad_norm": 3.157651662826538, + "learning_rate": 9.496583284812497e-06, + "loss": 1.1194, + "step": 8462 + }, + { + "epoch": 0.5306288795535771, + "grad_norm": 3.0628244876861572, + "learning_rate": 9.494555105624216e-06, + "loss": 0.937, + "step": 8463 + }, + { + "epoch": 0.5306915794093674, + "grad_norm": 3.06693434715271, + "learning_rate": 9.492526947280505e-06, + "loss": 1.1429, + "step": 8464 + }, + { + "epoch": 0.5307542792651577, + "grad_norm": 3.2913753986358643, + "learning_rate": 9.490498809865006e-06, + "loss": 1.1369, + "step": 8465 + }, + { + "epoch": 0.530816979120948, + "grad_norm": 3.3207242488861084, + "learning_rate": 9.488470693461354e-06, + "loss": 1.2153, + "step": 8466 + }, + { + "epoch": 0.5308796789767384, + "grad_norm": 2.8994123935699463, + "learning_rate": 9.486442598153198e-06, + "loss": 1.222, + "step": 8467 + }, + { + "epoch": 0.5309423788325287, + "grad_norm": 3.1777944564819336, + "learning_rate": 9.484414524024167e-06, + "loss": 0.9613, + "step": 8468 + }, + { + "epoch": 0.531005078688319, + "grad_norm": 3.1225674152374268, + "learning_rate": 9.482386471157905e-06, + "loss": 1.153, + "step": 8469 + }, + { + "epoch": 0.5310677785441094, + "grad_norm": 3.155057668685913, + "learning_rate": 9.480358439638048e-06, + "loss": 1.1096, + "step": 8470 + }, + { + "epoch": 0.5311304783998997, + "grad_norm": 3.1192986965179443, + "learning_rate": 9.478330429548233e-06, + "loss": 0.9835, + "step": 8471 + }, + { + "epoch": 0.53119317825569, + "grad_norm": 3.102903366088867, + "learning_rate": 9.476302440972093e-06, + "loss": 1.1954, + "step": 8472 + }, + { + "epoch": 0.5312558781114803, + "grad_norm": 2.9260811805725098, + "learning_rate": 9.474274473993263e-06, + "loss": 1.2104, + "step": 8473 + }, + { + "epoch": 0.5313185779672707, + "grad_norm": 2.9582836627960205, + "learning_rate": 9.472246528695377e-06, + "loss": 1.0788, + "step": 8474 + }, + { + "epoch": 0.531381277823061, + "grad_norm": 3.001225233078003, + "learning_rate": 9.470218605162066e-06, + "loss": 1.0972, + "step": 8475 + }, + { + "epoch": 0.5314439776788513, + "grad_norm": 3.4500136375427246, + "learning_rate": 9.468190703476964e-06, + "loss": 1.1512, + "step": 8476 + }, + { + "epoch": 0.5315066775346416, + "grad_norm": 2.9190752506256104, + "learning_rate": 9.466162823723699e-06, + "loss": 1.0848, + "step": 8477 + }, + { + "epoch": 0.531569377390432, + "grad_norm": 3.356555461883545, + "learning_rate": 9.464134965985905e-06, + "loss": 1.2416, + "step": 8478 + }, + { + "epoch": 0.5316320772462223, + "grad_norm": 3.135192632675171, + "learning_rate": 9.462107130347206e-06, + "loss": 1.2176, + "step": 8479 + }, + { + "epoch": 0.5316947771020126, + "grad_norm": 3.119572639465332, + "learning_rate": 9.460079316891233e-06, + "loss": 1.3105, + "step": 8480 + }, + { + "epoch": 0.531757476957803, + "grad_norm": 3.1746020317077637, + "learning_rate": 9.458051525701615e-06, + "loss": 1.1488, + "step": 8481 + }, + { + "epoch": 0.5318201768135933, + "grad_norm": 3.0748822689056396, + "learning_rate": 9.456023756861972e-06, + "loss": 1.1888, + "step": 8482 + }, + { + "epoch": 0.5318828766693836, + "grad_norm": 2.851980447769165, + "learning_rate": 9.453996010455936e-06, + "loss": 1.0338, + "step": 8483 + }, + { + "epoch": 0.531945576525174, + "grad_norm": 3.1767332553863525, + "learning_rate": 9.451968286567126e-06, + "loss": 1.1815, + "step": 8484 + }, + { + "epoch": 0.5320082763809644, + "grad_norm": 3.007988214492798, + "learning_rate": 9.44994058527917e-06, + "loss": 1.116, + "step": 8485 + }, + { + "epoch": 0.5320709762367547, + "grad_norm": 2.6000213623046875, + "learning_rate": 9.447912906675687e-06, + "loss": 1.1925, + "step": 8486 + }, + { + "epoch": 0.532133676092545, + "grad_norm": 3.1427228450775146, + "learning_rate": 9.445885250840301e-06, + "loss": 1.1426, + "step": 8487 + }, + { + "epoch": 0.5321963759483354, + "grad_norm": 3.2599258422851562, + "learning_rate": 9.44385761785663e-06, + "loss": 1.2944, + "step": 8488 + }, + { + "epoch": 0.5322590758041257, + "grad_norm": 2.649136543273926, + "learning_rate": 9.441830007808297e-06, + "loss": 1.1825, + "step": 8489 + }, + { + "epoch": 0.532321775659916, + "grad_norm": 3.3589894771575928, + "learning_rate": 9.439802420778917e-06, + "loss": 1.1109, + "step": 8490 + }, + { + "epoch": 0.5323844755157063, + "grad_norm": 3.0326852798461914, + "learning_rate": 9.437774856852109e-06, + "loss": 1.0831, + "step": 8491 + }, + { + "epoch": 0.5324471753714967, + "grad_norm": 3.255596399307251, + "learning_rate": 9.435747316111492e-06, + "loss": 1.0563, + "step": 8492 + }, + { + "epoch": 0.532509875227287, + "grad_norm": 3.1870005130767822, + "learning_rate": 9.433719798640679e-06, + "loss": 1.0086, + "step": 8493 + }, + { + "epoch": 0.5325725750830773, + "grad_norm": 3.1396291255950928, + "learning_rate": 9.43169230452329e-06, + "loss": 1.1849, + "step": 8494 + }, + { + "epoch": 0.5326352749388676, + "grad_norm": 2.987535238265991, + "learning_rate": 9.42966483384293e-06, + "loss": 0.9157, + "step": 8495 + }, + { + "epoch": 0.532697974794658, + "grad_norm": 3.239182233810425, + "learning_rate": 9.427637386683221e-06, + "loss": 1.2193, + "step": 8496 + }, + { + "epoch": 0.5327606746504483, + "grad_norm": 3.0841057300567627, + "learning_rate": 9.425609963127769e-06, + "loss": 1.1441, + "step": 8497 + }, + { + "epoch": 0.5328233745062386, + "grad_norm": 3.032076597213745, + "learning_rate": 9.42358256326019e-06, + "loss": 1.0702, + "step": 8498 + }, + { + "epoch": 0.532886074362029, + "grad_norm": 2.8164656162261963, + "learning_rate": 9.421555187164086e-06, + "loss": 1.1498, + "step": 8499 + }, + { + "epoch": 0.5329487742178193, + "grad_norm": 2.7766640186309814, + "learning_rate": 9.419527834923074e-06, + "loss": 1.1488, + "step": 8500 + }, + { + "epoch": 0.5330114740736096, + "grad_norm": 2.888688087463379, + "learning_rate": 9.41750050662076e-06, + "loss": 0.993, + "step": 8501 + }, + { + "epoch": 0.5330741739293999, + "grad_norm": 3.381824493408203, + "learning_rate": 9.415473202340751e-06, + "loss": 0.9708, + "step": 8502 + }, + { + "epoch": 0.5331368737851903, + "grad_norm": 3.2760305404663086, + "learning_rate": 9.413445922166654e-06, + "loss": 1.0522, + "step": 8503 + }, + { + "epoch": 0.5331995736409806, + "grad_norm": 3.269479990005493, + "learning_rate": 9.411418666182069e-06, + "loss": 1.13, + "step": 8504 + }, + { + "epoch": 0.5332622734967709, + "grad_norm": 3.331921339035034, + "learning_rate": 9.409391434470607e-06, + "loss": 0.9976, + "step": 8505 + }, + { + "epoch": 0.5333249733525612, + "grad_norm": 3.021087884902954, + "learning_rate": 9.407364227115866e-06, + "loss": 1.3032, + "step": 8506 + }, + { + "epoch": 0.5333876732083517, + "grad_norm": 3.410922050476074, + "learning_rate": 9.405337044201453e-06, + "loss": 1.1184, + "step": 8507 + }, + { + "epoch": 0.533450373064142, + "grad_norm": 3.3778374195098877, + "learning_rate": 9.403309885810962e-06, + "loss": 1.0666, + "step": 8508 + }, + { + "epoch": 0.5335130729199323, + "grad_norm": 3.2092835903167725, + "learning_rate": 9.401282752028002e-06, + "loss": 1.0443, + "step": 8509 + }, + { + "epoch": 0.5335757727757227, + "grad_norm": 3.042078971862793, + "learning_rate": 9.399255642936164e-06, + "loss": 1.1556, + "step": 8510 + }, + { + "epoch": 0.533638472631513, + "grad_norm": 3.088630437850952, + "learning_rate": 9.397228558619049e-06, + "loss": 1.2084, + "step": 8511 + }, + { + "epoch": 0.5337011724873033, + "grad_norm": 3.2323555946350098, + "learning_rate": 9.395201499160259e-06, + "loss": 1.2173, + "step": 8512 + }, + { + "epoch": 0.5337638723430936, + "grad_norm": 3.312417984008789, + "learning_rate": 9.393174464643381e-06, + "loss": 1.0837, + "step": 8513 + }, + { + "epoch": 0.533826572198884, + "grad_norm": 3.3287482261657715, + "learning_rate": 9.391147455152018e-06, + "loss": 1.2559, + "step": 8514 + }, + { + "epoch": 0.5338892720546743, + "grad_norm": 3.3334872722625732, + "learning_rate": 9.389120470769758e-06, + "loss": 1.0329, + "step": 8515 + }, + { + "epoch": 0.5339519719104646, + "grad_norm": 2.87841796875, + "learning_rate": 9.387093511580198e-06, + "loss": 1.1442, + "step": 8516 + }, + { + "epoch": 0.534014671766255, + "grad_norm": 3.0349903106689453, + "learning_rate": 9.385066577666927e-06, + "loss": 1.0681, + "step": 8517 + }, + { + "epoch": 0.5340773716220453, + "grad_norm": 2.983604669570923, + "learning_rate": 9.38303966911354e-06, + "loss": 1.2108, + "step": 8518 + }, + { + "epoch": 0.5341400714778356, + "grad_norm": 3.266577959060669, + "learning_rate": 9.38101278600362e-06, + "loss": 1.0953, + "step": 8519 + }, + { + "epoch": 0.5342027713336259, + "grad_norm": 3.1389150619506836, + "learning_rate": 9.378985928420764e-06, + "loss": 0.9392, + "step": 8520 + }, + { + "epoch": 0.5342654711894163, + "grad_norm": 3.239581823348999, + "learning_rate": 9.376959096448551e-06, + "loss": 1.0177, + "step": 8521 + }, + { + "epoch": 0.5343281710452066, + "grad_norm": 2.962841272354126, + "learning_rate": 9.374932290170574e-06, + "loss": 1.1771, + "step": 8522 + }, + { + "epoch": 0.5343908709009969, + "grad_norm": 3.1685733795166016, + "learning_rate": 9.372905509670417e-06, + "loss": 1.276, + "step": 8523 + }, + { + "epoch": 0.5344535707567872, + "grad_norm": 3.3446531295776367, + "learning_rate": 9.370878755031663e-06, + "loss": 1.0896, + "step": 8524 + }, + { + "epoch": 0.5345162706125776, + "grad_norm": 3.3512380123138428, + "learning_rate": 9.3688520263379e-06, + "loss": 1.1177, + "step": 8525 + }, + { + "epoch": 0.5345789704683679, + "grad_norm": 3.6045408248901367, + "learning_rate": 9.366825323672702e-06, + "loss": 1.1579, + "step": 8526 + }, + { + "epoch": 0.5346416703241582, + "grad_norm": 3.0115671157836914, + "learning_rate": 9.36479864711966e-06, + "loss": 1.1514, + "step": 8527 + }, + { + "epoch": 0.5347043701799485, + "grad_norm": 2.925445318222046, + "learning_rate": 9.362771996762347e-06, + "loss": 1.052, + "step": 8528 + }, + { + "epoch": 0.5347670700357389, + "grad_norm": 3.0127460956573486, + "learning_rate": 9.360745372684346e-06, + "loss": 1.0501, + "step": 8529 + }, + { + "epoch": 0.5348297698915292, + "grad_norm": 3.058748245239258, + "learning_rate": 9.35871877496923e-06, + "loss": 0.9915, + "step": 8530 + }, + { + "epoch": 0.5348924697473196, + "grad_norm": 2.821361780166626, + "learning_rate": 9.356692203700583e-06, + "loss": 1.2019, + "step": 8531 + }, + { + "epoch": 0.53495516960311, + "grad_norm": 3.082117795944214, + "learning_rate": 9.354665658961978e-06, + "loss": 1.073, + "step": 8532 + }, + { + "epoch": 0.5350178694589003, + "grad_norm": 3.420041799545288, + "learning_rate": 9.352639140836987e-06, + "loss": 1.1614, + "step": 8533 + }, + { + "epoch": 0.5350805693146906, + "grad_norm": 3.4023265838623047, + "learning_rate": 9.35061264940919e-06, + "loss": 1.0507, + "step": 8534 + }, + { + "epoch": 0.5351432691704809, + "grad_norm": 3.013861656188965, + "learning_rate": 9.348586184762152e-06, + "loss": 1.1492, + "step": 8535 + }, + { + "epoch": 0.5352059690262713, + "grad_norm": 3.0948503017425537, + "learning_rate": 9.346559746979451e-06, + "loss": 1.1646, + "step": 8536 + }, + { + "epoch": 0.5352686688820616, + "grad_norm": 2.9439315795898438, + "learning_rate": 9.344533336144653e-06, + "loss": 1.1386, + "step": 8537 + }, + { + "epoch": 0.5353313687378519, + "grad_norm": 3.196892261505127, + "learning_rate": 9.342506952341329e-06, + "loss": 1.2898, + "step": 8538 + }, + { + "epoch": 0.5353940685936422, + "grad_norm": 3.1283035278320312, + "learning_rate": 9.340480595653047e-06, + "loss": 1.2222, + "step": 8539 + }, + { + "epoch": 0.5354567684494326, + "grad_norm": 3.1294381618499756, + "learning_rate": 9.338454266163374e-06, + "loss": 1.0713, + "step": 8540 + }, + { + "epoch": 0.5355194683052229, + "grad_norm": 2.962702751159668, + "learning_rate": 9.336427963955876e-06, + "loss": 1.0405, + "step": 8541 + }, + { + "epoch": 0.5355821681610132, + "grad_norm": 3.2064383029937744, + "learning_rate": 9.334401689114116e-06, + "loss": 0.944, + "step": 8542 + }, + { + "epoch": 0.5356448680168036, + "grad_norm": 3.123068332672119, + "learning_rate": 9.332375441721664e-06, + "loss": 1.0467, + "step": 8543 + }, + { + "epoch": 0.5357075678725939, + "grad_norm": 3.1044676303863525, + "learning_rate": 9.330349221862073e-06, + "loss": 1.2164, + "step": 8544 + }, + { + "epoch": 0.5357702677283842, + "grad_norm": 3.4488444328308105, + "learning_rate": 9.328323029618912e-06, + "loss": 1.1566, + "step": 8545 + }, + { + "epoch": 0.5358329675841745, + "grad_norm": 3.0910933017730713, + "learning_rate": 9.326296865075737e-06, + "loss": 1.0047, + "step": 8546 + }, + { + "epoch": 0.5358956674399649, + "grad_norm": 3.1073896884918213, + "learning_rate": 9.324270728316112e-06, + "loss": 1.1752, + "step": 8547 + }, + { + "epoch": 0.5359583672957552, + "grad_norm": 2.885617971420288, + "learning_rate": 9.322244619423587e-06, + "loss": 1.0991, + "step": 8548 + }, + { + "epoch": 0.5360210671515455, + "grad_norm": 2.8836755752563477, + "learning_rate": 9.320218538481728e-06, + "loss": 1.1217, + "step": 8549 + }, + { + "epoch": 0.5360837670073358, + "grad_norm": 2.9014134407043457, + "learning_rate": 9.318192485574083e-06, + "loss": 1.1402, + "step": 8550 + }, + { + "epoch": 0.5361464668631262, + "grad_norm": 2.9920661449432373, + "learning_rate": 9.31616646078421e-06, + "loss": 1.0546, + "step": 8551 + }, + { + "epoch": 0.5362091667189165, + "grad_norm": 3.130145788192749, + "learning_rate": 9.314140464195663e-06, + "loss": 1.0642, + "step": 8552 + }, + { + "epoch": 0.5362718665747068, + "grad_norm": 2.9286229610443115, + "learning_rate": 9.312114495891992e-06, + "loss": 1.0015, + "step": 8553 + }, + { + "epoch": 0.5363345664304973, + "grad_norm": 2.702244758605957, + "learning_rate": 9.310088555956751e-06, + "loss": 1.1102, + "step": 8554 + }, + { + "epoch": 0.5363972662862876, + "grad_norm": 3.525806188583374, + "learning_rate": 9.308062644473487e-06, + "loss": 1.1093, + "step": 8555 + }, + { + "epoch": 0.5364599661420779, + "grad_norm": 2.9781429767608643, + "learning_rate": 9.306036761525751e-06, + "loss": 1.0153, + "step": 8556 + }, + { + "epoch": 0.5365226659978682, + "grad_norm": 3.1107335090637207, + "learning_rate": 9.304010907197088e-06, + "loss": 1.1211, + "step": 8557 + }, + { + "epoch": 0.5365853658536586, + "grad_norm": 3.070545196533203, + "learning_rate": 9.301985081571047e-06, + "loss": 1.1359, + "step": 8558 + }, + { + "epoch": 0.5366480657094489, + "grad_norm": 3.301013469696045, + "learning_rate": 9.299959284731169e-06, + "loss": 1.0549, + "step": 8559 + }, + { + "epoch": 0.5367107655652392, + "grad_norm": 3.1015427112579346, + "learning_rate": 9.297933516761003e-06, + "loss": 1.1263, + "step": 8560 + }, + { + "epoch": 0.5367734654210295, + "grad_norm": 2.7272024154663086, + "learning_rate": 9.295907777744088e-06, + "loss": 0.9377, + "step": 8561 + }, + { + "epoch": 0.5368361652768199, + "grad_norm": 3.168773651123047, + "learning_rate": 9.293882067763966e-06, + "loss": 0.9779, + "step": 8562 + }, + { + "epoch": 0.5368988651326102, + "grad_norm": 2.9617197513580322, + "learning_rate": 9.291856386904182e-06, + "loss": 1.1901, + "step": 8563 + }, + { + "epoch": 0.5369615649884005, + "grad_norm": 3.143247127532959, + "learning_rate": 9.289830735248269e-06, + "loss": 1.2248, + "step": 8564 + }, + { + "epoch": 0.5370242648441909, + "grad_norm": 3.4148571491241455, + "learning_rate": 9.28780511287977e-06, + "loss": 1.0873, + "step": 8565 + }, + { + "epoch": 0.5370869646999812, + "grad_norm": 3.6021316051483154, + "learning_rate": 9.285779519882214e-06, + "loss": 0.9738, + "step": 8566 + }, + { + "epoch": 0.5371496645557715, + "grad_norm": 3.885265588760376, + "learning_rate": 9.283753956339148e-06, + "loss": 0.9898, + "step": 8567 + }, + { + "epoch": 0.5372123644115618, + "grad_norm": 3.391580104827881, + "learning_rate": 9.281728422334095e-06, + "loss": 1.1889, + "step": 8568 + }, + { + "epoch": 0.5372750642673522, + "grad_norm": 2.9206302165985107, + "learning_rate": 9.279702917950597e-06, + "loss": 1.1141, + "step": 8569 + }, + { + "epoch": 0.5373377641231425, + "grad_norm": 3.4022433757781982, + "learning_rate": 9.27767744327218e-06, + "loss": 1.0549, + "step": 8570 + }, + { + "epoch": 0.5374004639789328, + "grad_norm": 3.132762908935547, + "learning_rate": 9.275651998382377e-06, + "loss": 1.0794, + "step": 8571 + }, + { + "epoch": 0.5374631638347231, + "grad_norm": 2.9952964782714844, + "learning_rate": 9.273626583364718e-06, + "loss": 1.1411, + "step": 8572 + }, + { + "epoch": 0.5375258636905135, + "grad_norm": 2.938760995864868, + "learning_rate": 9.27160119830273e-06, + "loss": 1.2416, + "step": 8573 + }, + { + "epoch": 0.5375885635463038, + "grad_norm": 2.8897881507873535, + "learning_rate": 9.26957584327994e-06, + "loss": 1.0537, + "step": 8574 + }, + { + "epoch": 0.5376512634020941, + "grad_norm": 2.9223740100860596, + "learning_rate": 9.267550518379873e-06, + "loss": 1.1026, + "step": 8575 + }, + { + "epoch": 0.5377139632578845, + "grad_norm": 2.9861769676208496, + "learning_rate": 9.265525223686058e-06, + "loss": 1.3554, + "step": 8576 + }, + { + "epoch": 0.5377766631136749, + "grad_norm": 2.925347089767456, + "learning_rate": 9.263499959282012e-06, + "loss": 1.2419, + "step": 8577 + }, + { + "epoch": 0.5378393629694652, + "grad_norm": 3.183289051055908, + "learning_rate": 9.261474725251261e-06, + "loss": 1.0241, + "step": 8578 + }, + { + "epoch": 0.5379020628252555, + "grad_norm": 3.3863584995269775, + "learning_rate": 9.259449521677323e-06, + "loss": 0.9547, + "step": 8579 + }, + { + "epoch": 0.5379647626810459, + "grad_norm": 3.4802908897399902, + "learning_rate": 9.257424348643722e-06, + "loss": 0.9437, + "step": 8580 + }, + { + "epoch": 0.5380274625368362, + "grad_norm": 3.277961492538452, + "learning_rate": 9.255399206233971e-06, + "loss": 1.3117, + "step": 8581 + }, + { + "epoch": 0.5380901623926265, + "grad_norm": 3.1101303100585938, + "learning_rate": 9.25337409453159e-06, + "loss": 1.0902, + "step": 8582 + }, + { + "epoch": 0.5381528622484169, + "grad_norm": 2.919391632080078, + "learning_rate": 9.251349013620095e-06, + "loss": 1.3019, + "step": 8583 + }, + { + "epoch": 0.5382155621042072, + "grad_norm": 3.056436061859131, + "learning_rate": 9.249323963582996e-06, + "loss": 1.0218, + "step": 8584 + }, + { + "epoch": 0.5382782619599975, + "grad_norm": 3.317735433578491, + "learning_rate": 9.247298944503813e-06, + "loss": 1.1052, + "step": 8585 + }, + { + "epoch": 0.5383409618157878, + "grad_norm": 3.044806718826294, + "learning_rate": 9.245273956466054e-06, + "loss": 1.0925, + "step": 8586 + }, + { + "epoch": 0.5384036616715782, + "grad_norm": 3.4391517639160156, + "learning_rate": 9.24324899955323e-06, + "loss": 1.1486, + "step": 8587 + }, + { + "epoch": 0.5384663615273685, + "grad_norm": 3.179734230041504, + "learning_rate": 9.241224073848848e-06, + "loss": 1.2251, + "step": 8588 + }, + { + "epoch": 0.5385290613831588, + "grad_norm": 2.9148671627044678, + "learning_rate": 9.23919917943642e-06, + "loss": 1.0504, + "step": 8589 + }, + { + "epoch": 0.5385917612389491, + "grad_norm": 2.9704184532165527, + "learning_rate": 9.237174316399451e-06, + "loss": 1.1397, + "step": 8590 + }, + { + "epoch": 0.5386544610947395, + "grad_norm": 3.1296136379241943, + "learning_rate": 9.235149484821447e-06, + "loss": 1.1203, + "step": 8591 + }, + { + "epoch": 0.5387171609505298, + "grad_norm": 2.906792402267456, + "learning_rate": 9.233124684785911e-06, + "loss": 1.2269, + "step": 8592 + }, + { + "epoch": 0.5387798608063201, + "grad_norm": 3.075791835784912, + "learning_rate": 9.231099916376344e-06, + "loss": 1.2484, + "step": 8593 + }, + { + "epoch": 0.5388425606621104, + "grad_norm": 3.323524236679077, + "learning_rate": 9.229075179676255e-06, + "loss": 1.0407, + "step": 8594 + }, + { + "epoch": 0.5389052605179008, + "grad_norm": 3.150299310684204, + "learning_rate": 9.227050474769136e-06, + "loss": 1.0987, + "step": 8595 + }, + { + "epoch": 0.5389679603736911, + "grad_norm": 3.3040285110473633, + "learning_rate": 9.225025801738491e-06, + "loss": 0.9338, + "step": 8596 + }, + { + "epoch": 0.5390306602294814, + "grad_norm": 2.7948145866394043, + "learning_rate": 9.223001160667814e-06, + "loss": 1.3186, + "step": 8597 + }, + { + "epoch": 0.5390933600852718, + "grad_norm": 3.10762357711792, + "learning_rate": 9.220976551640605e-06, + "loss": 1.0368, + "step": 8598 + }, + { + "epoch": 0.5391560599410621, + "grad_norm": 3.0277822017669678, + "learning_rate": 9.218951974740354e-06, + "loss": 1.1145, + "step": 8599 + }, + { + "epoch": 0.5392187597968525, + "grad_norm": 3.196352243423462, + "learning_rate": 9.216927430050562e-06, + "loss": 1.092, + "step": 8600 + }, + { + "epoch": 0.5392814596526428, + "grad_norm": 3.0888328552246094, + "learning_rate": 9.214902917654713e-06, + "loss": 1.097, + "step": 8601 + }, + { + "epoch": 0.5393441595084332, + "grad_norm": 3.0335941314697266, + "learning_rate": 9.212878437636302e-06, + "loss": 1.07, + "step": 8602 + }, + { + "epoch": 0.5394068593642235, + "grad_norm": 2.950798511505127, + "learning_rate": 9.210853990078821e-06, + "loss": 1.2429, + "step": 8603 + }, + { + "epoch": 0.5394695592200138, + "grad_norm": 2.8929224014282227, + "learning_rate": 9.208829575065754e-06, + "loss": 0.9204, + "step": 8604 + }, + { + "epoch": 0.5395322590758042, + "grad_norm": 3.356966018676758, + "learning_rate": 9.206805192680592e-06, + "loss": 1.0628, + "step": 8605 + }, + { + "epoch": 0.5395949589315945, + "grad_norm": 2.922437906265259, + "learning_rate": 9.204780843006817e-06, + "loss": 1.1059, + "step": 8606 + }, + { + "epoch": 0.5396576587873848, + "grad_norm": 3.341503143310547, + "learning_rate": 9.202756526127916e-06, + "loss": 1.071, + "step": 8607 + }, + { + "epoch": 0.5397203586431751, + "grad_norm": 3.327796697616577, + "learning_rate": 9.200732242127369e-06, + "loss": 1.2728, + "step": 8608 + }, + { + "epoch": 0.5397830584989655, + "grad_norm": 3.3146989345550537, + "learning_rate": 9.198707991088662e-06, + "loss": 1.0711, + "step": 8609 + }, + { + "epoch": 0.5398457583547558, + "grad_norm": 3.033224105834961, + "learning_rate": 9.196683773095268e-06, + "loss": 1.2781, + "step": 8610 + }, + { + "epoch": 0.5399084582105461, + "grad_norm": 3.113186836242676, + "learning_rate": 9.194659588230675e-06, + "loss": 1.0848, + "step": 8611 + }, + { + "epoch": 0.5399711580663364, + "grad_norm": 3.0456838607788086, + "learning_rate": 9.192635436578352e-06, + "loss": 1.045, + "step": 8612 + }, + { + "epoch": 0.5400338579221268, + "grad_norm": 3.246561050415039, + "learning_rate": 9.190611318221779e-06, + "loss": 1.1113, + "step": 8613 + }, + { + "epoch": 0.5400965577779171, + "grad_norm": 3.1379470825195312, + "learning_rate": 9.188587233244434e-06, + "loss": 1.0619, + "step": 8614 + }, + { + "epoch": 0.5401592576337074, + "grad_norm": 3.0125796794891357, + "learning_rate": 9.186563181729784e-06, + "loss": 1.1483, + "step": 8615 + }, + { + "epoch": 0.5402219574894978, + "grad_norm": 3.0145132541656494, + "learning_rate": 9.184539163761306e-06, + "loss": 0.9758, + "step": 8616 + }, + { + "epoch": 0.5402846573452881, + "grad_norm": 3.051687240600586, + "learning_rate": 9.182515179422466e-06, + "loss": 1.0943, + "step": 8617 + }, + { + "epoch": 0.5403473572010784, + "grad_norm": 3.1850147247314453, + "learning_rate": 9.180491228796738e-06, + "loss": 1.0358, + "step": 8618 + }, + { + "epoch": 0.5404100570568687, + "grad_norm": 2.926557779312134, + "learning_rate": 9.178467311967586e-06, + "loss": 0.9875, + "step": 8619 + }, + { + "epoch": 0.5404727569126591, + "grad_norm": 2.828812837600708, + "learning_rate": 9.17644342901848e-06, + "loss": 1.0786, + "step": 8620 + }, + { + "epoch": 0.5405354567684494, + "grad_norm": 3.9117541313171387, + "learning_rate": 9.17441958003288e-06, + "loss": 1.1007, + "step": 8621 + }, + { + "epoch": 0.5405981566242397, + "grad_norm": 3.080411911010742, + "learning_rate": 9.172395765094255e-06, + "loss": 1.0385, + "step": 8622 + }, + { + "epoch": 0.5406608564800301, + "grad_norm": 3.2517850399017334, + "learning_rate": 9.170371984286063e-06, + "loss": 1.0951, + "step": 8623 + }, + { + "epoch": 0.5407235563358205, + "grad_norm": 2.932229995727539, + "learning_rate": 9.168348237691766e-06, + "loss": 1.0862, + "step": 8624 + }, + { + "epoch": 0.5407862561916108, + "grad_norm": 2.92201828956604, + "learning_rate": 9.166324525394826e-06, + "loss": 1.0144, + "step": 8625 + }, + { + "epoch": 0.5408489560474011, + "grad_norm": 3.5258865356445312, + "learning_rate": 9.164300847478698e-06, + "loss": 0.9594, + "step": 8626 + }, + { + "epoch": 0.5409116559031915, + "grad_norm": 3.1914827823638916, + "learning_rate": 9.162277204026841e-06, + "loss": 1.0594, + "step": 8627 + }, + { + "epoch": 0.5409743557589818, + "grad_norm": 2.98583984375, + "learning_rate": 9.160253595122707e-06, + "loss": 0.9946, + "step": 8628 + }, + { + "epoch": 0.5410370556147721, + "grad_norm": 3.1753106117248535, + "learning_rate": 9.158230020849754e-06, + "loss": 1.1748, + "step": 8629 + }, + { + "epoch": 0.5410997554705624, + "grad_norm": 3.2646102905273438, + "learning_rate": 9.156206481291427e-06, + "loss": 0.9948, + "step": 8630 + }, + { + "epoch": 0.5411624553263528, + "grad_norm": 2.9670708179473877, + "learning_rate": 9.154182976531187e-06, + "loss": 1.1116, + "step": 8631 + }, + { + "epoch": 0.5412251551821431, + "grad_norm": 2.898989200592041, + "learning_rate": 9.152159506652473e-06, + "loss": 1.2445, + "step": 8632 + }, + { + "epoch": 0.5412878550379334, + "grad_norm": 3.5344607830047607, + "learning_rate": 9.150136071738738e-06, + "loss": 0.9864, + "step": 8633 + }, + { + "epoch": 0.5413505548937237, + "grad_norm": 3.0837242603302, + "learning_rate": 9.148112671873433e-06, + "loss": 1.0054, + "step": 8634 + }, + { + "epoch": 0.5414132547495141, + "grad_norm": 3.1177141666412354, + "learning_rate": 9.146089307139997e-06, + "loss": 1.0, + "step": 8635 + }, + { + "epoch": 0.5414759546053044, + "grad_norm": 2.88808012008667, + "learning_rate": 9.144065977621874e-06, + "loss": 1.1964, + "step": 8636 + }, + { + "epoch": 0.5415386544610947, + "grad_norm": 2.7830464839935303, + "learning_rate": 9.142042683402507e-06, + "loss": 1.1833, + "step": 8637 + }, + { + "epoch": 0.541601354316885, + "grad_norm": 3.259209394454956, + "learning_rate": 9.14001942456534e-06, + "loss": 1.1659, + "step": 8638 + }, + { + "epoch": 0.5416640541726754, + "grad_norm": 3.129565715789795, + "learning_rate": 9.137996201193807e-06, + "loss": 0.9591, + "step": 8639 + }, + { + "epoch": 0.5417267540284657, + "grad_norm": 3.229414939880371, + "learning_rate": 9.13597301337135e-06, + "loss": 1.1733, + "step": 8640 + }, + { + "epoch": 0.541789453884256, + "grad_norm": 3.1280198097229004, + "learning_rate": 9.1339498611814e-06, + "loss": 1.1101, + "step": 8641 + }, + { + "epoch": 0.5418521537400464, + "grad_norm": 2.522782802581787, + "learning_rate": 9.1319267447074e-06, + "loss": 1.141, + "step": 8642 + }, + { + "epoch": 0.5419148535958367, + "grad_norm": 3.2432188987731934, + "learning_rate": 9.129903664032777e-06, + "loss": 1.1581, + "step": 8643 + }, + { + "epoch": 0.541977553451627, + "grad_norm": 2.905735492706299, + "learning_rate": 9.127880619240964e-06, + "loss": 1.1865, + "step": 8644 + }, + { + "epoch": 0.5420402533074173, + "grad_norm": 2.865494728088379, + "learning_rate": 9.125857610415395e-06, + "loss": 1.0715, + "step": 8645 + }, + { + "epoch": 0.5421029531632078, + "grad_norm": 2.734285354614258, + "learning_rate": 9.123834637639493e-06, + "loss": 1.1349, + "step": 8646 + }, + { + "epoch": 0.5421656530189981, + "grad_norm": 3.0312745571136475, + "learning_rate": 9.121811700996693e-06, + "loss": 1.1052, + "step": 8647 + }, + { + "epoch": 0.5422283528747884, + "grad_norm": 3.215275287628174, + "learning_rate": 9.119788800570413e-06, + "loss": 1.1209, + "step": 8648 + }, + { + "epoch": 0.5422910527305788, + "grad_norm": 3.2802722454071045, + "learning_rate": 9.117765936444084e-06, + "loss": 1.0595, + "step": 8649 + }, + { + "epoch": 0.5423537525863691, + "grad_norm": 2.9708542823791504, + "learning_rate": 9.115743108701125e-06, + "loss": 1.214, + "step": 8650 + }, + { + "epoch": 0.5424164524421594, + "grad_norm": 2.8196592330932617, + "learning_rate": 9.113720317424959e-06, + "loss": 1.1683, + "step": 8651 + }, + { + "epoch": 0.5424791522979497, + "grad_norm": 2.5783894062042236, + "learning_rate": 9.111697562699004e-06, + "loss": 1.0668, + "step": 8652 + }, + { + "epoch": 0.5425418521537401, + "grad_norm": 3.1545181274414062, + "learning_rate": 9.109674844606683e-06, + "loss": 1.1954, + "step": 8653 + }, + { + "epoch": 0.5426045520095304, + "grad_norm": 3.001370429992676, + "learning_rate": 9.107652163231409e-06, + "loss": 1.3026, + "step": 8654 + }, + { + "epoch": 0.5426672518653207, + "grad_norm": 3.3602187633514404, + "learning_rate": 9.105629518656597e-06, + "loss": 1.1508, + "step": 8655 + }, + { + "epoch": 0.542729951721111, + "grad_norm": 3.5807979106903076, + "learning_rate": 9.103606910965666e-06, + "loss": 1.1696, + "step": 8656 + }, + { + "epoch": 0.5427926515769014, + "grad_norm": 3.2463786602020264, + "learning_rate": 9.101584340242024e-06, + "loss": 0.9999, + "step": 8657 + }, + { + "epoch": 0.5428553514326917, + "grad_norm": 3.3297717571258545, + "learning_rate": 9.099561806569084e-06, + "loss": 1.0575, + "step": 8658 + }, + { + "epoch": 0.542918051288482, + "grad_norm": 2.8962061405181885, + "learning_rate": 9.097539310030251e-06, + "loss": 0.9705, + "step": 8659 + }, + { + "epoch": 0.5429807511442724, + "grad_norm": 2.7787821292877197, + "learning_rate": 9.095516850708942e-06, + "loss": 0.9873, + "step": 8660 + }, + { + "epoch": 0.5430434510000627, + "grad_norm": 3.0082221031188965, + "learning_rate": 9.093494428688552e-06, + "loss": 1.1153, + "step": 8661 + }, + { + "epoch": 0.543106150855853, + "grad_norm": 3.1458492279052734, + "learning_rate": 9.091472044052494e-06, + "loss": 1.2567, + "step": 8662 + }, + { + "epoch": 0.5431688507116433, + "grad_norm": 3.194330930709839, + "learning_rate": 9.089449696884167e-06, + "loss": 0.9052, + "step": 8663 + }, + { + "epoch": 0.5432315505674337, + "grad_norm": 3.1370816230773926, + "learning_rate": 9.087427387266975e-06, + "loss": 1.0606, + "step": 8664 + }, + { + "epoch": 0.543294250423224, + "grad_norm": 3.1685078144073486, + "learning_rate": 9.085405115284318e-06, + "loss": 1.1859, + "step": 8665 + }, + { + "epoch": 0.5433569502790143, + "grad_norm": 2.9964208602905273, + "learning_rate": 9.083382881019593e-06, + "loss": 1.1008, + "step": 8666 + }, + { + "epoch": 0.5434196501348046, + "grad_norm": 3.026146650314331, + "learning_rate": 9.0813606845562e-06, + "loss": 0.9709, + "step": 8667 + }, + { + "epoch": 0.543482349990595, + "grad_norm": 3.473905563354492, + "learning_rate": 9.07933852597753e-06, + "loss": 1.1282, + "step": 8668 + }, + { + "epoch": 0.5435450498463854, + "grad_norm": 2.8910272121429443, + "learning_rate": 9.07731640536698e-06, + "loss": 1.1413, + "step": 8669 + }, + { + "epoch": 0.5436077497021757, + "grad_norm": 3.288562774658203, + "learning_rate": 9.075294322807944e-06, + "loss": 1.1292, + "step": 8670 + }, + { + "epoch": 0.5436704495579661, + "grad_norm": 3.198421001434326, + "learning_rate": 9.073272278383809e-06, + "loss": 1.0542, + "step": 8671 + }, + { + "epoch": 0.5437331494137564, + "grad_norm": 2.750854015350342, + "learning_rate": 9.071250272177965e-06, + "loss": 1.1295, + "step": 8672 + }, + { + "epoch": 0.5437958492695467, + "grad_norm": 3.121846914291382, + "learning_rate": 9.069228304273802e-06, + "loss": 1.1417, + "step": 8673 + }, + { + "epoch": 0.543858549125337, + "grad_norm": 3.4534149169921875, + "learning_rate": 9.067206374754703e-06, + "loss": 1.2255, + "step": 8674 + }, + { + "epoch": 0.5439212489811274, + "grad_norm": 2.9047515392303467, + "learning_rate": 9.065184483704052e-06, + "loss": 1.1136, + "step": 8675 + }, + { + "epoch": 0.5439839488369177, + "grad_norm": 3.208742618560791, + "learning_rate": 9.063162631205236e-06, + "loss": 1.0955, + "step": 8676 + }, + { + "epoch": 0.544046648692708, + "grad_norm": 3.0178046226501465, + "learning_rate": 9.061140817341633e-06, + "loss": 0.9925, + "step": 8677 + }, + { + "epoch": 0.5441093485484984, + "grad_norm": 3.0634477138519287, + "learning_rate": 9.059119042196626e-06, + "loss": 1.0013, + "step": 8678 + }, + { + "epoch": 0.5441720484042887, + "grad_norm": 3.0872962474823, + "learning_rate": 9.057097305853589e-06, + "loss": 1.1693, + "step": 8679 + }, + { + "epoch": 0.544234748260079, + "grad_norm": 3.0348122119903564, + "learning_rate": 9.055075608395901e-06, + "loss": 1.1622, + "step": 8680 + }, + { + "epoch": 0.5442974481158693, + "grad_norm": 3.235201120376587, + "learning_rate": 9.053053949906936e-06, + "loss": 1.0175, + "step": 8681 + }, + { + "epoch": 0.5443601479716597, + "grad_norm": 3.5120863914489746, + "learning_rate": 9.051032330470068e-06, + "loss": 1.0686, + "step": 8682 + }, + { + "epoch": 0.54442284782745, + "grad_norm": 3.266969680786133, + "learning_rate": 9.049010750168667e-06, + "loss": 1.1689, + "step": 8683 + }, + { + "epoch": 0.5444855476832403, + "grad_norm": 3.54927921295166, + "learning_rate": 9.046989209086104e-06, + "loss": 1.1784, + "step": 8684 + }, + { + "epoch": 0.5445482475390306, + "grad_norm": 3.2849721908569336, + "learning_rate": 9.044967707305749e-06, + "loss": 1.2136, + "step": 8685 + }, + { + "epoch": 0.544610947394821, + "grad_norm": 3.1396803855895996, + "learning_rate": 9.042946244910964e-06, + "loss": 1.1035, + "step": 8686 + }, + { + "epoch": 0.5446736472506113, + "grad_norm": 3.1968934535980225, + "learning_rate": 9.040924821985122e-06, + "loss": 1.1342, + "step": 8687 + }, + { + "epoch": 0.5447363471064016, + "grad_norm": 2.927074670791626, + "learning_rate": 9.038903438611579e-06, + "loss": 0.9979, + "step": 8688 + }, + { + "epoch": 0.544799046962192, + "grad_norm": 3.292024850845337, + "learning_rate": 9.036882094873703e-06, + "loss": 1.0417, + "step": 8689 + }, + { + "epoch": 0.5448617468179823, + "grad_norm": 3.1118247509002686, + "learning_rate": 9.034860790854848e-06, + "loss": 1.0669, + "step": 8690 + }, + { + "epoch": 0.5449244466737726, + "grad_norm": 3.2103559970855713, + "learning_rate": 9.032839526638382e-06, + "loss": 1.0929, + "step": 8691 + }, + { + "epoch": 0.5449871465295629, + "grad_norm": 3.270900249481201, + "learning_rate": 9.03081830230765e-06, + "loss": 1.0718, + "step": 8692 + }, + { + "epoch": 0.5450498463853534, + "grad_norm": 3.1769232749938965, + "learning_rate": 9.028797117946018e-06, + "loss": 1.1339, + "step": 8693 + }, + { + "epoch": 0.5451125462411437, + "grad_norm": 3.0360639095306396, + "learning_rate": 9.026775973636832e-06, + "loss": 0.9424, + "step": 8694 + }, + { + "epoch": 0.545175246096934, + "grad_norm": 3.1536035537719727, + "learning_rate": 9.024754869463448e-06, + "loss": 1.0151, + "step": 8695 + }, + { + "epoch": 0.5452379459527243, + "grad_norm": 2.979846239089966, + "learning_rate": 9.022733805509216e-06, + "loss": 1.17, + "step": 8696 + }, + { + "epoch": 0.5453006458085147, + "grad_norm": 3.197101593017578, + "learning_rate": 9.020712781857485e-06, + "loss": 1.0026, + "step": 8697 + }, + { + "epoch": 0.545363345664305, + "grad_norm": 3.1425390243530273, + "learning_rate": 9.018691798591602e-06, + "loss": 1.1137, + "step": 8698 + }, + { + "epoch": 0.5454260455200953, + "grad_norm": 3.551980972290039, + "learning_rate": 9.016670855794911e-06, + "loss": 0.9252, + "step": 8699 + }, + { + "epoch": 0.5454887453758857, + "grad_norm": 3.6381726264953613, + "learning_rate": 9.01464995355076e-06, + "loss": 1.0454, + "step": 8700 + }, + { + "epoch": 0.545551445231676, + "grad_norm": 3.2312159538269043, + "learning_rate": 9.012629091942482e-06, + "loss": 1.0548, + "step": 8701 + }, + { + "epoch": 0.5456141450874663, + "grad_norm": 3.199373960494995, + "learning_rate": 9.01060827105343e-06, + "loss": 1.1401, + "step": 8702 + }, + { + "epoch": 0.5456768449432566, + "grad_norm": 3.328124523162842, + "learning_rate": 9.008587490966932e-06, + "loss": 1.2606, + "step": 8703 + }, + { + "epoch": 0.545739544799047, + "grad_norm": 3.335314989089966, + "learning_rate": 9.00656675176633e-06, + "loss": 1.0668, + "step": 8704 + }, + { + "epoch": 0.5458022446548373, + "grad_norm": 3.151732921600342, + "learning_rate": 9.004546053534958e-06, + "loss": 1.0297, + "step": 8705 + }, + { + "epoch": 0.5458649445106276, + "grad_norm": 3.009521007537842, + "learning_rate": 9.00252539635615e-06, + "loss": 1.0786, + "step": 8706 + }, + { + "epoch": 0.5459276443664179, + "grad_norm": 3.2590763568878174, + "learning_rate": 9.00050478031324e-06, + "loss": 1.1126, + "step": 8707 + }, + { + "epoch": 0.5459903442222083, + "grad_norm": 3.4632985591888428, + "learning_rate": 8.998484205489554e-06, + "loss": 1.0035, + "step": 8708 + }, + { + "epoch": 0.5460530440779986, + "grad_norm": 3.1393754482269287, + "learning_rate": 8.996463671968427e-06, + "loss": 1.1853, + "step": 8709 + }, + { + "epoch": 0.5461157439337889, + "grad_norm": 2.82234263420105, + "learning_rate": 8.994443179833178e-06, + "loss": 1.3106, + "step": 8710 + }, + { + "epoch": 0.5461784437895792, + "grad_norm": 3.4246976375579834, + "learning_rate": 8.99242272916714e-06, + "loss": 1.0092, + "step": 8711 + }, + { + "epoch": 0.5462411436453696, + "grad_norm": 3.359529495239258, + "learning_rate": 8.990402320053628e-06, + "loss": 1.029, + "step": 8712 + }, + { + "epoch": 0.5463038435011599, + "grad_norm": 2.935332775115967, + "learning_rate": 8.988381952575973e-06, + "loss": 1.2204, + "step": 8713 + }, + { + "epoch": 0.5463665433569502, + "grad_norm": 2.9331204891204834, + "learning_rate": 8.986361626817487e-06, + "loss": 1.0536, + "step": 8714 + }, + { + "epoch": 0.5464292432127406, + "grad_norm": 3.3966803550720215, + "learning_rate": 8.984341342861492e-06, + "loss": 1.1906, + "step": 8715 + }, + { + "epoch": 0.546491943068531, + "grad_norm": 3.018721580505371, + "learning_rate": 8.982321100791308e-06, + "loss": 1.2482, + "step": 8716 + }, + { + "epoch": 0.5465546429243213, + "grad_norm": 3.0825064182281494, + "learning_rate": 8.980300900690243e-06, + "loss": 1.1323, + "step": 8717 + }, + { + "epoch": 0.5466173427801116, + "grad_norm": 2.7929654121398926, + "learning_rate": 8.978280742641614e-06, + "loss": 1.0973, + "step": 8718 + }, + { + "epoch": 0.546680042635902, + "grad_norm": 3.1841304302215576, + "learning_rate": 8.97626062672873e-06, + "loss": 1.2085, + "step": 8719 + }, + { + "epoch": 0.5467427424916923, + "grad_norm": 3.2603931427001953, + "learning_rate": 8.974240553034907e-06, + "loss": 1.21, + "step": 8720 + }, + { + "epoch": 0.5468054423474826, + "grad_norm": 3.1730411052703857, + "learning_rate": 8.972220521643445e-06, + "loss": 0.9483, + "step": 8721 + }, + { + "epoch": 0.546868142203273, + "grad_norm": 3.1331467628479004, + "learning_rate": 8.970200532637654e-06, + "loss": 1.0311, + "step": 8722 + }, + { + "epoch": 0.5469308420590633, + "grad_norm": 3.300356149673462, + "learning_rate": 8.968180586100838e-06, + "loss": 1.1582, + "step": 8723 + }, + { + "epoch": 0.5469935419148536, + "grad_norm": 3.014101505279541, + "learning_rate": 8.966160682116301e-06, + "loss": 0.983, + "step": 8724 + }, + { + "epoch": 0.5470562417706439, + "grad_norm": 2.9728176593780518, + "learning_rate": 8.96414082076734e-06, + "loss": 1.102, + "step": 8725 + }, + { + "epoch": 0.5471189416264343, + "grad_norm": 3.417938709259033, + "learning_rate": 8.962121002137258e-06, + "loss": 1.135, + "step": 8726 + }, + { + "epoch": 0.5471816414822246, + "grad_norm": 3.2460358142852783, + "learning_rate": 8.960101226309352e-06, + "loss": 1.0551, + "step": 8727 + }, + { + "epoch": 0.5472443413380149, + "grad_norm": 3.443619728088379, + "learning_rate": 8.958081493366916e-06, + "loss": 1.0337, + "step": 8728 + }, + { + "epoch": 0.5473070411938052, + "grad_norm": 3.3671936988830566, + "learning_rate": 8.956061803393246e-06, + "loss": 1.2055, + "step": 8729 + }, + { + "epoch": 0.5473697410495956, + "grad_norm": 3.1408066749572754, + "learning_rate": 8.954042156471632e-06, + "loss": 1.1237, + "step": 8730 + }, + { + "epoch": 0.5474324409053859, + "grad_norm": 2.9691097736358643, + "learning_rate": 8.952022552685366e-06, + "loss": 1.2486, + "step": 8731 + }, + { + "epoch": 0.5474951407611762, + "grad_norm": 3.1564671993255615, + "learning_rate": 8.950002992117734e-06, + "loss": 1.0501, + "step": 8732 + }, + { + "epoch": 0.5475578406169666, + "grad_norm": 3.4585797786712646, + "learning_rate": 8.947983474852027e-06, + "loss": 1.1375, + "step": 8733 + }, + { + "epoch": 0.5476205404727569, + "grad_norm": 3.1421923637390137, + "learning_rate": 8.945964000971525e-06, + "loss": 1.3598, + "step": 8734 + }, + { + "epoch": 0.5476832403285472, + "grad_norm": 3.207058906555176, + "learning_rate": 8.943944570559514e-06, + "loss": 1.0249, + "step": 8735 + }, + { + "epoch": 0.5477459401843375, + "grad_norm": 3.2366209030151367, + "learning_rate": 8.941925183699274e-06, + "loss": 1.0399, + "step": 8736 + }, + { + "epoch": 0.5478086400401279, + "grad_norm": 3.554527521133423, + "learning_rate": 8.939905840474087e-06, + "loss": 1.0929, + "step": 8737 + }, + { + "epoch": 0.5478713398959182, + "grad_norm": 3.1229546070098877, + "learning_rate": 8.93788654096723e-06, + "loss": 1.0858, + "step": 8738 + }, + { + "epoch": 0.5479340397517086, + "grad_norm": 2.8411896228790283, + "learning_rate": 8.935867285261977e-06, + "loss": 1.1454, + "step": 8739 + }, + { + "epoch": 0.547996739607499, + "grad_norm": 3.1269912719726562, + "learning_rate": 8.933848073441606e-06, + "loss": 1.0726, + "step": 8740 + }, + { + "epoch": 0.5480594394632893, + "grad_norm": 3.322097063064575, + "learning_rate": 8.931828905589385e-06, + "loss": 1.0362, + "step": 8741 + }, + { + "epoch": 0.5481221393190796, + "grad_norm": 3.0889785289764404, + "learning_rate": 8.929809781788588e-06, + "loss": 1.1021, + "step": 8742 + }, + { + "epoch": 0.5481848391748699, + "grad_norm": 3.1634230613708496, + "learning_rate": 8.92779070212248e-06, + "loss": 1.1613, + "step": 8743 + }, + { + "epoch": 0.5482475390306603, + "grad_norm": 3.5109758377075195, + "learning_rate": 8.925771666674333e-06, + "loss": 1.1526, + "step": 8744 + }, + { + "epoch": 0.5483102388864506, + "grad_norm": 2.7849984169006348, + "learning_rate": 8.923752675527408e-06, + "loss": 1.1275, + "step": 8745 + }, + { + "epoch": 0.5483729387422409, + "grad_norm": 2.8731863498687744, + "learning_rate": 8.921733728764967e-06, + "loss": 1.0682, + "step": 8746 + }, + { + "epoch": 0.5484356385980312, + "grad_norm": 2.9470012187957764, + "learning_rate": 8.91971482647028e-06, + "loss": 1.1858, + "step": 8747 + }, + { + "epoch": 0.5484983384538216, + "grad_norm": 2.8265719413757324, + "learning_rate": 8.917695968726595e-06, + "loss": 1.1715, + "step": 8748 + }, + { + "epoch": 0.5485610383096119, + "grad_norm": 3.8180325031280518, + "learning_rate": 8.91567715561718e-06, + "loss": 1.1046, + "step": 8749 + }, + { + "epoch": 0.5486237381654022, + "grad_norm": 3.1191182136535645, + "learning_rate": 8.913658387225283e-06, + "loss": 0.9894, + "step": 8750 + }, + { + "epoch": 0.5486864380211925, + "grad_norm": 3.2528412342071533, + "learning_rate": 8.911639663634166e-06, + "loss": 1.0989, + "step": 8751 + }, + { + "epoch": 0.5487491378769829, + "grad_norm": 3.26174259185791, + "learning_rate": 8.909620984927072e-06, + "loss": 1.1695, + "step": 8752 + }, + { + "epoch": 0.5488118377327732, + "grad_norm": 3.3094685077667236, + "learning_rate": 8.90760235118726e-06, + "loss": 1.0637, + "step": 8753 + }, + { + "epoch": 0.5488745375885635, + "grad_norm": 3.1167848110198975, + "learning_rate": 8.90558376249797e-06, + "loss": 1.1211, + "step": 8754 + }, + { + "epoch": 0.5489372374443539, + "grad_norm": 2.7919580936431885, + "learning_rate": 8.903565218942458e-06, + "loss": 1.1988, + "step": 8755 + }, + { + "epoch": 0.5489999373001442, + "grad_norm": 3.387448310852051, + "learning_rate": 8.901546720603961e-06, + "loss": 1.0333, + "step": 8756 + }, + { + "epoch": 0.5490626371559345, + "grad_norm": 3.197293996810913, + "learning_rate": 8.899528267565728e-06, + "loss": 1.0595, + "step": 8757 + }, + { + "epoch": 0.5491253370117248, + "grad_norm": 3.13950252532959, + "learning_rate": 8.897509859910996e-06, + "loss": 1.0565, + "step": 8758 + }, + { + "epoch": 0.5491880368675152, + "grad_norm": 3.1350953578948975, + "learning_rate": 8.895491497723004e-06, + "loss": 1.1455, + "step": 8759 + }, + { + "epoch": 0.5492507367233055, + "grad_norm": 3.3093485832214355, + "learning_rate": 8.893473181084993e-06, + "loss": 1.0774, + "step": 8760 + }, + { + "epoch": 0.5493134365790958, + "grad_norm": 2.937831163406372, + "learning_rate": 8.891454910080195e-06, + "loss": 0.9358, + "step": 8761 + }, + { + "epoch": 0.5493761364348863, + "grad_norm": 3.108919143676758, + "learning_rate": 8.889436684791848e-06, + "loss": 1.2215, + "step": 8762 + }, + { + "epoch": 0.5494388362906766, + "grad_norm": 3.126028537750244, + "learning_rate": 8.887418505303176e-06, + "loss": 1.2096, + "step": 8763 + }, + { + "epoch": 0.5495015361464669, + "grad_norm": 3.34865665435791, + "learning_rate": 8.885400371697418e-06, + "loss": 1.2744, + "step": 8764 + }, + { + "epoch": 0.5495642360022572, + "grad_norm": 2.990025043487549, + "learning_rate": 8.883382284057794e-06, + "loss": 1.2012, + "step": 8765 + }, + { + "epoch": 0.5496269358580476, + "grad_norm": 3.1116671562194824, + "learning_rate": 8.881364242467532e-06, + "loss": 1.1644, + "step": 8766 + }, + { + "epoch": 0.5496896357138379, + "grad_norm": 3.259761333465576, + "learning_rate": 8.879346247009862e-06, + "loss": 0.9737, + "step": 8767 + }, + { + "epoch": 0.5497523355696282, + "grad_norm": 3.4468555450439453, + "learning_rate": 8.877328297767999e-06, + "loss": 1.2153, + "step": 8768 + }, + { + "epoch": 0.5498150354254185, + "grad_norm": 3.2226383686065674, + "learning_rate": 8.875310394825167e-06, + "loss": 1.2091, + "step": 8769 + }, + { + "epoch": 0.5498777352812089, + "grad_norm": 3.4329216480255127, + "learning_rate": 8.873292538264583e-06, + "loss": 1.0524, + "step": 8770 + }, + { + "epoch": 0.5499404351369992, + "grad_norm": 3.5885088443756104, + "learning_rate": 8.871274728169466e-06, + "loss": 1.1398, + "step": 8771 + }, + { + "epoch": 0.5500031349927895, + "grad_norm": 2.969011068344116, + "learning_rate": 8.869256964623024e-06, + "loss": 1.0593, + "step": 8772 + }, + { + "epoch": 0.5500658348485798, + "grad_norm": 3.148020029067993, + "learning_rate": 8.867239247708479e-06, + "loss": 1.1358, + "step": 8773 + }, + { + "epoch": 0.5501285347043702, + "grad_norm": 3.069206714630127, + "learning_rate": 8.865221577509034e-06, + "loss": 1.1231, + "step": 8774 + }, + { + "epoch": 0.5501912345601605, + "grad_norm": 3.019866704940796, + "learning_rate": 8.863203954107902e-06, + "loss": 1.1309, + "step": 8775 + }, + { + "epoch": 0.5502539344159508, + "grad_norm": 3.0368142127990723, + "learning_rate": 8.861186377588286e-06, + "loss": 1.1302, + "step": 8776 + }, + { + "epoch": 0.5503166342717412, + "grad_norm": 2.8199410438537598, + "learning_rate": 8.859168848033395e-06, + "loss": 1.0792, + "step": 8777 + }, + { + "epoch": 0.5503793341275315, + "grad_norm": 3.3299083709716797, + "learning_rate": 8.857151365526431e-06, + "loss": 1.094, + "step": 8778 + }, + { + "epoch": 0.5504420339833218, + "grad_norm": 3.341815233230591, + "learning_rate": 8.855133930150594e-06, + "loss": 1.1953, + "step": 8779 + }, + { + "epoch": 0.5505047338391121, + "grad_norm": 3.2909326553344727, + "learning_rate": 8.853116541989087e-06, + "loss": 1.1636, + "step": 8780 + }, + { + "epoch": 0.5505674336949025, + "grad_norm": 3.2028820514678955, + "learning_rate": 8.851099201125098e-06, + "loss": 1.0404, + "step": 8781 + }, + { + "epoch": 0.5506301335506928, + "grad_norm": 3.100595474243164, + "learning_rate": 8.849081907641834e-06, + "loss": 1.0849, + "step": 8782 + }, + { + "epoch": 0.5506928334064831, + "grad_norm": 3.1916162967681885, + "learning_rate": 8.847064661622477e-06, + "loss": 0.9948, + "step": 8783 + }, + { + "epoch": 0.5507555332622734, + "grad_norm": 3.2290143966674805, + "learning_rate": 8.845047463150229e-06, + "loss": 0.9769, + "step": 8784 + }, + { + "epoch": 0.5508182331180639, + "grad_norm": 2.68078875541687, + "learning_rate": 8.84303031230827e-06, + "loss": 1.1209, + "step": 8785 + }, + { + "epoch": 0.5508809329738542, + "grad_norm": 2.9449803829193115, + "learning_rate": 8.841013209179793e-06, + "loss": 1.1543, + "step": 8786 + }, + { + "epoch": 0.5509436328296445, + "grad_norm": 3.02451491355896, + "learning_rate": 8.838996153847982e-06, + "loss": 1.1123, + "step": 8787 + }, + { + "epoch": 0.5510063326854349, + "grad_norm": 3.483020067214966, + "learning_rate": 8.836979146396019e-06, + "loss": 1.0844, + "step": 8788 + }, + { + "epoch": 0.5510690325412252, + "grad_norm": 3.2331159114837646, + "learning_rate": 8.834962186907088e-06, + "loss": 1.0641, + "step": 8789 + }, + { + "epoch": 0.5511317323970155, + "grad_norm": 3.079172134399414, + "learning_rate": 8.832945275464367e-06, + "loss": 1.2622, + "step": 8790 + }, + { + "epoch": 0.5511944322528058, + "grad_norm": 3.3599748611450195, + "learning_rate": 8.830928412151033e-06, + "loss": 1.1082, + "step": 8791 + }, + { + "epoch": 0.5512571321085962, + "grad_norm": 3.260063409805298, + "learning_rate": 8.828911597050263e-06, + "loss": 1.158, + "step": 8792 + }, + { + "epoch": 0.5513198319643865, + "grad_norm": 2.919741630554199, + "learning_rate": 8.826894830245232e-06, + "loss": 1.1831, + "step": 8793 + }, + { + "epoch": 0.5513825318201768, + "grad_norm": 3.4550278186798096, + "learning_rate": 8.824878111819107e-06, + "loss": 1.0397, + "step": 8794 + }, + { + "epoch": 0.5514452316759672, + "grad_norm": 2.865499258041382, + "learning_rate": 8.822861441855062e-06, + "loss": 1.232, + "step": 8795 + }, + { + "epoch": 0.5515079315317575, + "grad_norm": 3.1909937858581543, + "learning_rate": 8.820844820436259e-06, + "loss": 0.9645, + "step": 8796 + }, + { + "epoch": 0.5515706313875478, + "grad_norm": 2.8315811157226562, + "learning_rate": 8.818828247645868e-06, + "loss": 1.2028, + "step": 8797 + }, + { + "epoch": 0.5516333312433381, + "grad_norm": 3.43609881401062, + "learning_rate": 8.816811723567055e-06, + "loss": 1.1871, + "step": 8798 + }, + { + "epoch": 0.5516960310991285, + "grad_norm": 3.102858066558838, + "learning_rate": 8.814795248282974e-06, + "loss": 1.1699, + "step": 8799 + }, + { + "epoch": 0.5517587309549188, + "grad_norm": 3.084294080734253, + "learning_rate": 8.812778821876793e-06, + "loss": 1.1114, + "step": 8800 + }, + { + "epoch": 0.5518214308107091, + "grad_norm": 2.765110492706299, + "learning_rate": 8.810762444431662e-06, + "loss": 1.3845, + "step": 8801 + }, + { + "epoch": 0.5518841306664994, + "grad_norm": 3.0771007537841797, + "learning_rate": 8.808746116030742e-06, + "loss": 1.1438, + "step": 8802 + }, + { + "epoch": 0.5519468305222898, + "grad_norm": 2.9594414234161377, + "learning_rate": 8.806729836757182e-06, + "loss": 1.0653, + "step": 8803 + }, + { + "epoch": 0.5520095303780801, + "grad_norm": 3.0573952198028564, + "learning_rate": 8.804713606694138e-06, + "loss": 1.0538, + "step": 8804 + }, + { + "epoch": 0.5520722302338704, + "grad_norm": 3.1177752017974854, + "learning_rate": 8.802697425924754e-06, + "loss": 1.148, + "step": 8805 + }, + { + "epoch": 0.5521349300896607, + "grad_norm": 3.129190683364868, + "learning_rate": 8.800681294532184e-06, + "loss": 1.0041, + "step": 8806 + }, + { + "epoch": 0.5521976299454511, + "grad_norm": 3.073721170425415, + "learning_rate": 8.798665212599566e-06, + "loss": 1.2955, + "step": 8807 + }, + { + "epoch": 0.5522603298012415, + "grad_norm": 2.99929141998291, + "learning_rate": 8.796649180210048e-06, + "loss": 0.9398, + "step": 8808 + }, + { + "epoch": 0.5523230296570318, + "grad_norm": 3.179208517074585, + "learning_rate": 8.79463319744677e-06, + "loss": 0.9018, + "step": 8809 + }, + { + "epoch": 0.5523857295128222, + "grad_norm": 2.896949529647827, + "learning_rate": 8.792617264392874e-06, + "loss": 1.1974, + "step": 8810 + }, + { + "epoch": 0.5524484293686125, + "grad_norm": 2.9991025924682617, + "learning_rate": 8.790601381131493e-06, + "loss": 1.0221, + "step": 8811 + }, + { + "epoch": 0.5525111292244028, + "grad_norm": 3.467486619949341, + "learning_rate": 8.788585547745764e-06, + "loss": 1.1172, + "step": 8812 + }, + { + "epoch": 0.5525738290801931, + "grad_norm": 3.175173759460449, + "learning_rate": 8.786569764318821e-06, + "loss": 1.0443, + "step": 8813 + }, + { + "epoch": 0.5526365289359835, + "grad_norm": 3.4258904457092285, + "learning_rate": 8.784554030933794e-06, + "loss": 1.0607, + "step": 8814 + }, + { + "epoch": 0.5526992287917738, + "grad_norm": 3.2039999961853027, + "learning_rate": 8.782538347673813e-06, + "loss": 1.0148, + "step": 8815 + }, + { + "epoch": 0.5527619286475641, + "grad_norm": 2.744457483291626, + "learning_rate": 8.780522714622e-06, + "loss": 1.1161, + "step": 8816 + }, + { + "epoch": 0.5528246285033545, + "grad_norm": 3.3236653804779053, + "learning_rate": 8.778507131861486e-06, + "loss": 1.2699, + "step": 8817 + }, + { + "epoch": 0.5528873283591448, + "grad_norm": 2.782686233520508, + "learning_rate": 8.77649159947539e-06, + "loss": 1.1082, + "step": 8818 + }, + { + "epoch": 0.5529500282149351, + "grad_norm": 3.3746981620788574, + "learning_rate": 8.774476117546833e-06, + "loss": 1.13, + "step": 8819 + }, + { + "epoch": 0.5530127280707254, + "grad_norm": 2.9377613067626953, + "learning_rate": 8.772460686158937e-06, + "loss": 1.1572, + "step": 8820 + }, + { + "epoch": 0.5530754279265158, + "grad_norm": 3.2163853645324707, + "learning_rate": 8.770445305394815e-06, + "loss": 1.0179, + "step": 8821 + }, + { + "epoch": 0.5531381277823061, + "grad_norm": 3.0310611724853516, + "learning_rate": 8.768429975337583e-06, + "loss": 1.1593, + "step": 8822 + }, + { + "epoch": 0.5532008276380964, + "grad_norm": 3.1118619441986084, + "learning_rate": 8.76641469607035e-06, + "loss": 1.0512, + "step": 8823 + }, + { + "epoch": 0.5532635274938867, + "grad_norm": 3.1444196701049805, + "learning_rate": 8.76439946767623e-06, + "loss": 1.1336, + "step": 8824 + }, + { + "epoch": 0.5533262273496771, + "grad_norm": 3.031865358352661, + "learning_rate": 8.76238429023833e-06, + "loss": 1.1151, + "step": 8825 + }, + { + "epoch": 0.5533889272054674, + "grad_norm": 3.127397298812866, + "learning_rate": 8.760369163839759e-06, + "loss": 1.011, + "step": 8826 + }, + { + "epoch": 0.5534516270612577, + "grad_norm": 3.2375192642211914, + "learning_rate": 8.758354088563613e-06, + "loss": 0.9803, + "step": 8827 + }, + { + "epoch": 0.553514326917048, + "grad_norm": 3.4561092853546143, + "learning_rate": 8.756339064493e-06, + "loss": 1.0634, + "step": 8828 + }, + { + "epoch": 0.5535770267728384, + "grad_norm": 3.1406664848327637, + "learning_rate": 8.754324091711021e-06, + "loss": 1.0196, + "step": 8829 + }, + { + "epoch": 0.5536397266286287, + "grad_norm": 3.0757079124450684, + "learning_rate": 8.752309170300767e-06, + "loss": 1.2278, + "step": 8830 + }, + { + "epoch": 0.5537024264844191, + "grad_norm": 2.9541094303131104, + "learning_rate": 8.75029430034534e-06, + "loss": 1.0888, + "step": 8831 + }, + { + "epoch": 0.5537651263402095, + "grad_norm": 3.024289608001709, + "learning_rate": 8.74827948192783e-06, + "loss": 1.0386, + "step": 8832 + }, + { + "epoch": 0.5538278261959998, + "grad_norm": 3.399778366088867, + "learning_rate": 8.74626471513133e-06, + "loss": 0.9918, + "step": 8833 + }, + { + "epoch": 0.5538905260517901, + "grad_norm": 3.2374072074890137, + "learning_rate": 8.744250000038925e-06, + "loss": 1.0241, + "step": 8834 + }, + { + "epoch": 0.5539532259075804, + "grad_norm": 3.2164649963378906, + "learning_rate": 8.742235336733708e-06, + "loss": 1.1597, + "step": 8835 + }, + { + "epoch": 0.5540159257633708, + "grad_norm": 3.2289931774139404, + "learning_rate": 8.740220725298757e-06, + "loss": 1.0647, + "step": 8836 + }, + { + "epoch": 0.5540786256191611, + "grad_norm": 3.231088876724243, + "learning_rate": 8.738206165817162e-06, + "loss": 0.9908, + "step": 8837 + }, + { + "epoch": 0.5541413254749514, + "grad_norm": 3.118917226791382, + "learning_rate": 8.736191658371998e-06, + "loss": 1.0106, + "step": 8838 + }, + { + "epoch": 0.5542040253307418, + "grad_norm": 3.1323297023773193, + "learning_rate": 8.734177203046343e-06, + "loss": 1.1125, + "step": 8839 + }, + { + "epoch": 0.5542667251865321, + "grad_norm": 2.9980812072753906, + "learning_rate": 8.732162799923279e-06, + "loss": 1.1068, + "step": 8840 + }, + { + "epoch": 0.5543294250423224, + "grad_norm": 3.4197158813476562, + "learning_rate": 8.730148449085875e-06, + "loss": 1.1307, + "step": 8841 + }, + { + "epoch": 0.5543921248981127, + "grad_norm": 3.210832357406616, + "learning_rate": 8.728134150617205e-06, + "loss": 0.9601, + "step": 8842 + }, + { + "epoch": 0.5544548247539031, + "grad_norm": 3.0592615604400635, + "learning_rate": 8.726119904600337e-06, + "loss": 1.1341, + "step": 8843 + }, + { + "epoch": 0.5545175246096934, + "grad_norm": 3.0359466075897217, + "learning_rate": 8.724105711118342e-06, + "loss": 1.0987, + "step": 8844 + }, + { + "epoch": 0.5545802244654837, + "grad_norm": 3.5372564792633057, + "learning_rate": 8.72209157025428e-06, + "loss": 1.1309, + "step": 8845 + }, + { + "epoch": 0.554642924321274, + "grad_norm": 3.104907989501953, + "learning_rate": 8.720077482091221e-06, + "loss": 1.0596, + "step": 8846 + }, + { + "epoch": 0.5547056241770644, + "grad_norm": 2.959063768386841, + "learning_rate": 8.71806344671222e-06, + "loss": 1.1851, + "step": 8847 + }, + { + "epoch": 0.5547683240328547, + "grad_norm": 3.1982810497283936, + "learning_rate": 8.716049464200338e-06, + "loss": 1.1638, + "step": 8848 + }, + { + "epoch": 0.554831023888645, + "grad_norm": 3.068434476852417, + "learning_rate": 8.714035534638633e-06, + "loss": 1.302, + "step": 8849 + }, + { + "epoch": 0.5548937237444354, + "grad_norm": 2.880091905593872, + "learning_rate": 8.71202165811016e-06, + "loss": 1.0357, + "step": 8850 + }, + { + "epoch": 0.5549564236002257, + "grad_norm": 3.1379222869873047, + "learning_rate": 8.71000783469797e-06, + "loss": 1.0719, + "step": 8851 + }, + { + "epoch": 0.555019123456016, + "grad_norm": 2.800123691558838, + "learning_rate": 8.707994064485112e-06, + "loss": 1.193, + "step": 8852 + }, + { + "epoch": 0.5550818233118063, + "grad_norm": 3.327401876449585, + "learning_rate": 8.705980347554636e-06, + "loss": 1.0239, + "step": 8853 + }, + { + "epoch": 0.5551445231675967, + "grad_norm": 2.7519543170928955, + "learning_rate": 8.703966683989586e-06, + "loss": 1.0512, + "step": 8854 + }, + { + "epoch": 0.5552072230233871, + "grad_norm": 3.197408437728882, + "learning_rate": 8.701953073873008e-06, + "loss": 1.1519, + "step": 8855 + }, + { + "epoch": 0.5552699228791774, + "grad_norm": 3.149639129638672, + "learning_rate": 8.69993951728794e-06, + "loss": 1.0702, + "step": 8856 + }, + { + "epoch": 0.5553326227349678, + "grad_norm": 3.0367190837860107, + "learning_rate": 8.697926014317426e-06, + "loss": 1.0682, + "step": 8857 + }, + { + "epoch": 0.5553953225907581, + "grad_norm": 3.01476788520813, + "learning_rate": 8.695912565044497e-06, + "loss": 1.2834, + "step": 8858 + }, + { + "epoch": 0.5554580224465484, + "grad_norm": 3.1301605701446533, + "learning_rate": 8.69389916955219e-06, + "loss": 1.1218, + "step": 8859 + }, + { + "epoch": 0.5555207223023387, + "grad_norm": 2.698758602142334, + "learning_rate": 8.691885827923541e-06, + "loss": 1.1508, + "step": 8860 + }, + { + "epoch": 0.5555834221581291, + "grad_norm": 2.9541385173797607, + "learning_rate": 8.689872540241576e-06, + "loss": 0.8853, + "step": 8861 + }, + { + "epoch": 0.5556461220139194, + "grad_norm": 3.142571210861206, + "learning_rate": 8.687859306589326e-06, + "loss": 1.1679, + "step": 8862 + }, + { + "epoch": 0.5557088218697097, + "grad_norm": 2.791254997253418, + "learning_rate": 8.685846127049816e-06, + "loss": 1.1913, + "step": 8863 + }, + { + "epoch": 0.5557715217255, + "grad_norm": 3.1113317012786865, + "learning_rate": 8.683833001706068e-06, + "loss": 1.1661, + "step": 8864 + }, + { + "epoch": 0.5558342215812904, + "grad_norm": 3.389608383178711, + "learning_rate": 8.681819930641104e-06, + "loss": 1.1444, + "step": 8865 + }, + { + "epoch": 0.5558969214370807, + "grad_norm": 3.1396865844726562, + "learning_rate": 8.679806913937944e-06, + "loss": 1.1133, + "step": 8866 + }, + { + "epoch": 0.555959621292871, + "grad_norm": 3.2332136631011963, + "learning_rate": 8.677793951679605e-06, + "loss": 1.0215, + "step": 8867 + }, + { + "epoch": 0.5560223211486613, + "grad_norm": 2.799903392791748, + "learning_rate": 8.675781043949101e-06, + "loss": 1.1447, + "step": 8868 + }, + { + "epoch": 0.5560850210044517, + "grad_norm": 3.129610776901245, + "learning_rate": 8.673768190829444e-06, + "loss": 1.2364, + "step": 8869 + }, + { + "epoch": 0.556147720860242, + "grad_norm": 3.147993803024292, + "learning_rate": 8.671755392403645e-06, + "loss": 1.0414, + "step": 8870 + }, + { + "epoch": 0.5562104207160323, + "grad_norm": 3.097548007965088, + "learning_rate": 8.669742648754714e-06, + "loss": 1.3388, + "step": 8871 + }, + { + "epoch": 0.5562731205718227, + "grad_norm": 3.0807604789733887, + "learning_rate": 8.667729959965649e-06, + "loss": 1.1008, + "step": 8872 + }, + { + "epoch": 0.556335820427613, + "grad_norm": 3.1571109294891357, + "learning_rate": 8.665717326119462e-06, + "loss": 1.0809, + "step": 8873 + }, + { + "epoch": 0.5563985202834033, + "grad_norm": 2.987335443496704, + "learning_rate": 8.66370474729915e-06, + "loss": 1.1414, + "step": 8874 + }, + { + "epoch": 0.5564612201391936, + "grad_norm": 3.0315520763397217, + "learning_rate": 8.661692223587715e-06, + "loss": 1.1971, + "step": 8875 + }, + { + "epoch": 0.556523919994984, + "grad_norm": 3.2772581577301025, + "learning_rate": 8.659679755068148e-06, + "loss": 1.0471, + "step": 8876 + }, + { + "epoch": 0.5565866198507743, + "grad_norm": 3.1444125175476074, + "learning_rate": 8.657667341823449e-06, + "loss": 1.1343, + "step": 8877 + }, + { + "epoch": 0.5566493197065647, + "grad_norm": 2.6244864463806152, + "learning_rate": 8.655654983936604e-06, + "loss": 1.1468, + "step": 8878 + }, + { + "epoch": 0.556712019562355, + "grad_norm": 2.962165355682373, + "learning_rate": 8.653642681490608e-06, + "loss": 1.0926, + "step": 8879 + }, + { + "epoch": 0.5567747194181454, + "grad_norm": 3.058495283126831, + "learning_rate": 8.651630434568448e-06, + "loss": 1.1048, + "step": 8880 + }, + { + "epoch": 0.5568374192739357, + "grad_norm": 3.1661314964294434, + "learning_rate": 8.649618243253106e-06, + "loss": 1.2003, + "step": 8881 + }, + { + "epoch": 0.556900119129726, + "grad_norm": 3.1513969898223877, + "learning_rate": 8.647606107627567e-06, + "loss": 1.1257, + "step": 8882 + }, + { + "epoch": 0.5569628189855164, + "grad_norm": 3.1025023460388184, + "learning_rate": 8.645594027774811e-06, + "loss": 1.0744, + "step": 8883 + }, + { + "epoch": 0.5570255188413067, + "grad_norm": 3.0602364540100098, + "learning_rate": 8.643582003777818e-06, + "loss": 1.0525, + "step": 8884 + }, + { + "epoch": 0.557088218697097, + "grad_norm": 3.028655529022217, + "learning_rate": 8.641570035719559e-06, + "loss": 1.1524, + "step": 8885 + }, + { + "epoch": 0.5571509185528873, + "grad_norm": 3.243067502975464, + "learning_rate": 8.639558123683017e-06, + "loss": 1.0076, + "step": 8886 + }, + { + "epoch": 0.5572136184086777, + "grad_norm": 3.0116655826568604, + "learning_rate": 8.63754626775115e-06, + "loss": 1.2395, + "step": 8887 + }, + { + "epoch": 0.557276318264468, + "grad_norm": 3.8882510662078857, + "learning_rate": 8.63553446800694e-06, + "loss": 1.1373, + "step": 8888 + }, + { + "epoch": 0.5573390181202583, + "grad_norm": 3.0367379188537598, + "learning_rate": 8.633522724533345e-06, + "loss": 1.1441, + "step": 8889 + }, + { + "epoch": 0.5574017179760486, + "grad_norm": 3.128896474838257, + "learning_rate": 8.631511037413334e-06, + "loss": 1.1117, + "step": 8890 + }, + { + "epoch": 0.557464417831839, + "grad_norm": 3.1406564712524414, + "learning_rate": 8.629499406729868e-06, + "loss": 0.9289, + "step": 8891 + }, + { + "epoch": 0.5575271176876293, + "grad_norm": 3.3255255222320557, + "learning_rate": 8.627487832565903e-06, + "loss": 1.0798, + "step": 8892 + }, + { + "epoch": 0.5575898175434196, + "grad_norm": 3.124115228652954, + "learning_rate": 8.625476315004406e-06, + "loss": 1.0055, + "step": 8893 + }, + { + "epoch": 0.55765251739921, + "grad_norm": 2.9064249992370605, + "learning_rate": 8.62346485412832e-06, + "loss": 1.0783, + "step": 8894 + }, + { + "epoch": 0.5577152172550003, + "grad_norm": 2.931262254714966, + "learning_rate": 8.621453450020607e-06, + "loss": 1.065, + "step": 8895 + }, + { + "epoch": 0.5577779171107906, + "grad_norm": 3.0356240272521973, + "learning_rate": 8.619442102764211e-06, + "loss": 1.1072, + "step": 8896 + }, + { + "epoch": 0.5578406169665809, + "grad_norm": 3.1825406551361084, + "learning_rate": 8.617430812442085e-06, + "loss": 1.0389, + "step": 8897 + }, + { + "epoch": 0.5579033168223713, + "grad_norm": 2.841503620147705, + "learning_rate": 8.615419579137172e-06, + "loss": 1.1529, + "step": 8898 + }, + { + "epoch": 0.5579660166781616, + "grad_norm": 3.5273449420928955, + "learning_rate": 8.613408402932417e-06, + "loss": 1.0412, + "step": 8899 + }, + { + "epoch": 0.5580287165339519, + "grad_norm": 3.1052310466766357, + "learning_rate": 8.611397283910758e-06, + "loss": 0.9689, + "step": 8900 + }, + { + "epoch": 0.5580914163897424, + "grad_norm": 3.09344482421875, + "learning_rate": 8.609386222155135e-06, + "loss": 1.1363, + "step": 8901 + }, + { + "epoch": 0.5581541162455327, + "grad_norm": 3.1642982959747314, + "learning_rate": 8.607375217748491e-06, + "loss": 1.139, + "step": 8902 + }, + { + "epoch": 0.558216816101323, + "grad_norm": 2.9432995319366455, + "learning_rate": 8.605364270773747e-06, + "loss": 1.0683, + "step": 8903 + }, + { + "epoch": 0.5582795159571133, + "grad_norm": 3.192667007446289, + "learning_rate": 8.603353381313846e-06, + "loss": 1.0502, + "step": 8904 + }, + { + "epoch": 0.5583422158129037, + "grad_norm": 3.15362286567688, + "learning_rate": 8.60134254945171e-06, + "loss": 0.9792, + "step": 8905 + }, + { + "epoch": 0.558404915668694, + "grad_norm": 2.909435510635376, + "learning_rate": 8.59933177527027e-06, + "loss": 1.011, + "step": 8906 + }, + { + "epoch": 0.5584676155244843, + "grad_norm": 3.247702121734619, + "learning_rate": 8.597321058852446e-06, + "loss": 1.013, + "step": 8907 + }, + { + "epoch": 0.5585303153802746, + "grad_norm": 3.1614291667938232, + "learning_rate": 8.595310400281165e-06, + "loss": 1.2216, + "step": 8908 + }, + { + "epoch": 0.558593015236065, + "grad_norm": 3.2751646041870117, + "learning_rate": 8.593299799639343e-06, + "loss": 1.0327, + "step": 8909 + }, + { + "epoch": 0.5586557150918553, + "grad_norm": 3.0896213054656982, + "learning_rate": 8.5912892570099e-06, + "loss": 1.169, + "step": 8910 + }, + { + "epoch": 0.5587184149476456, + "grad_norm": 3.2857439517974854, + "learning_rate": 8.58927877247575e-06, + "loss": 1.0202, + "step": 8911 + }, + { + "epoch": 0.558781114803436, + "grad_norm": 3.223865270614624, + "learning_rate": 8.587268346119803e-06, + "loss": 1.115, + "step": 8912 + }, + { + "epoch": 0.5588438146592263, + "grad_norm": 3.186213254928589, + "learning_rate": 8.585257978024973e-06, + "loss": 1.0397, + "step": 8913 + }, + { + "epoch": 0.5589065145150166, + "grad_norm": 3.2270023822784424, + "learning_rate": 8.583247668274163e-06, + "loss": 1.1334, + "step": 8914 + }, + { + "epoch": 0.5589692143708069, + "grad_norm": 3.2580204010009766, + "learning_rate": 8.581237416950286e-06, + "loss": 0.9597, + "step": 8915 + }, + { + "epoch": 0.5590319142265973, + "grad_norm": 2.961467981338501, + "learning_rate": 8.579227224136236e-06, + "loss": 1.1094, + "step": 8916 + }, + { + "epoch": 0.5590946140823876, + "grad_norm": 3.02528977394104, + "learning_rate": 8.577217089914919e-06, + "loss": 1.1673, + "step": 8917 + }, + { + "epoch": 0.5591573139381779, + "grad_norm": 3.2540810108184814, + "learning_rate": 8.57520701436923e-06, + "loss": 1.1857, + "step": 8918 + }, + { + "epoch": 0.5592200137939682, + "grad_norm": 3.5649960041046143, + "learning_rate": 8.573196997582068e-06, + "loss": 0.9677, + "step": 8919 + }, + { + "epoch": 0.5592827136497586, + "grad_norm": 3.442898750305176, + "learning_rate": 8.57118703963632e-06, + "loss": 1.0303, + "step": 8920 + }, + { + "epoch": 0.5593454135055489, + "grad_norm": 3.165393829345703, + "learning_rate": 8.569177140614884e-06, + "loss": 1.0836, + "step": 8921 + }, + { + "epoch": 0.5594081133613392, + "grad_norm": 3.0888984203338623, + "learning_rate": 8.567167300600646e-06, + "loss": 1.123, + "step": 8922 + }, + { + "epoch": 0.5594708132171295, + "grad_norm": 3.096625328063965, + "learning_rate": 8.565157519676488e-06, + "loss": 1.2282, + "step": 8923 + }, + { + "epoch": 0.55953351307292, + "grad_norm": 3.4635682106018066, + "learning_rate": 8.5631477979253e-06, + "loss": 0.9792, + "step": 8924 + }, + { + "epoch": 0.5595962129287103, + "grad_norm": 2.929800271987915, + "learning_rate": 8.561138135429956e-06, + "loss": 1.033, + "step": 8925 + }, + { + "epoch": 0.5596589127845006, + "grad_norm": 3.022857427597046, + "learning_rate": 8.559128532273341e-06, + "loss": 1.0276, + "step": 8926 + }, + { + "epoch": 0.559721612640291, + "grad_norm": 3.4122023582458496, + "learning_rate": 8.557118988538326e-06, + "loss": 1.0369, + "step": 8927 + }, + { + "epoch": 0.5597843124960813, + "grad_norm": 3.1845951080322266, + "learning_rate": 8.55510950430779e-06, + "loss": 1.1648, + "step": 8928 + }, + { + "epoch": 0.5598470123518716, + "grad_norm": 3.5634567737579346, + "learning_rate": 8.553100079664598e-06, + "loss": 1.2978, + "step": 8929 + }, + { + "epoch": 0.559909712207662, + "grad_norm": 3.3039486408233643, + "learning_rate": 8.551090714691623e-06, + "loss": 0.8957, + "step": 8930 + }, + { + "epoch": 0.5599724120634523, + "grad_norm": 3.366501569747925, + "learning_rate": 8.549081409471733e-06, + "loss": 1.2576, + "step": 8931 + }, + { + "epoch": 0.5600351119192426, + "grad_norm": 2.878814935684204, + "learning_rate": 8.547072164087783e-06, + "loss": 1.1865, + "step": 8932 + }, + { + "epoch": 0.5600978117750329, + "grad_norm": 2.889968156814575, + "learning_rate": 8.545062978622646e-06, + "loss": 1.1867, + "step": 8933 + }, + { + "epoch": 0.5601605116308233, + "grad_norm": 2.9214940071105957, + "learning_rate": 8.543053853159173e-06, + "loss": 1.1588, + "step": 8934 + }, + { + "epoch": 0.5602232114866136, + "grad_norm": 3.198554277420044, + "learning_rate": 8.541044787780223e-06, + "loss": 1.0735, + "step": 8935 + }, + { + "epoch": 0.5602859113424039, + "grad_norm": 3.2336204051971436, + "learning_rate": 8.539035782568649e-06, + "loss": 0.9434, + "step": 8936 + }, + { + "epoch": 0.5603486111981942, + "grad_norm": 3.277631998062134, + "learning_rate": 8.537026837607306e-06, + "loss": 1.0995, + "step": 8937 + }, + { + "epoch": 0.5604113110539846, + "grad_norm": 3.3728158473968506, + "learning_rate": 8.535017952979037e-06, + "loss": 1.1497, + "step": 8938 + }, + { + "epoch": 0.5604740109097749, + "grad_norm": 3.28521728515625, + "learning_rate": 8.533009128766693e-06, + "loss": 1.0012, + "step": 8939 + }, + { + "epoch": 0.5605367107655652, + "grad_norm": 3.310025453567505, + "learning_rate": 8.531000365053111e-06, + "loss": 1.0666, + "step": 8940 + }, + { + "epoch": 0.5605994106213555, + "grad_norm": 2.597670793533325, + "learning_rate": 8.528991661921145e-06, + "loss": 1.0206, + "step": 8941 + }, + { + "epoch": 0.5606621104771459, + "grad_norm": 3.4435551166534424, + "learning_rate": 8.526983019453624e-06, + "loss": 0.8337, + "step": 8942 + }, + { + "epoch": 0.5607248103329362, + "grad_norm": 3.244272232055664, + "learning_rate": 8.524974437733389e-06, + "loss": 1.2103, + "step": 8943 + }, + { + "epoch": 0.5607875101887265, + "grad_norm": 3.1504368782043457, + "learning_rate": 8.522965916843272e-06, + "loss": 1.141, + "step": 8944 + }, + { + "epoch": 0.5608502100445169, + "grad_norm": 3.347539186477661, + "learning_rate": 8.520957456866107e-06, + "loss": 1.1312, + "step": 8945 + }, + { + "epoch": 0.5609129099003072, + "grad_norm": 2.9563286304473877, + "learning_rate": 8.518949057884719e-06, + "loss": 1.0251, + "step": 8946 + }, + { + "epoch": 0.5609756097560976, + "grad_norm": 3.0350563526153564, + "learning_rate": 8.516940719981938e-06, + "loss": 0.9828, + "step": 8947 + }, + { + "epoch": 0.5610383096118879, + "grad_norm": 2.971397876739502, + "learning_rate": 8.514932443240584e-06, + "loss": 1.1972, + "step": 8948 + }, + { + "epoch": 0.5611010094676783, + "grad_norm": 3.3040311336517334, + "learning_rate": 8.512924227743482e-06, + "loss": 1.0255, + "step": 8949 + }, + { + "epoch": 0.5611637093234686, + "grad_norm": 2.775827169418335, + "learning_rate": 8.510916073573452e-06, + "loss": 1.0869, + "step": 8950 + }, + { + "epoch": 0.5612264091792589, + "grad_norm": 3.0775296688079834, + "learning_rate": 8.508907980813305e-06, + "loss": 1.1966, + "step": 8951 + }, + { + "epoch": 0.5612891090350492, + "grad_norm": 3.1275274753570557, + "learning_rate": 8.506899949545862e-06, + "loss": 1.2275, + "step": 8952 + }, + { + "epoch": 0.5613518088908396, + "grad_norm": 3.0587751865386963, + "learning_rate": 8.504891979853925e-06, + "loss": 0.9245, + "step": 8953 + }, + { + "epoch": 0.5614145087466299, + "grad_norm": 3.039335012435913, + "learning_rate": 8.502884071820312e-06, + "loss": 1.1205, + "step": 8954 + }, + { + "epoch": 0.5614772086024202, + "grad_norm": 3.26347279548645, + "learning_rate": 8.500876225527822e-06, + "loss": 0.9336, + "step": 8955 + }, + { + "epoch": 0.5615399084582106, + "grad_norm": 2.8848376274108887, + "learning_rate": 8.498868441059266e-06, + "loss": 1.0942, + "step": 8956 + }, + { + "epoch": 0.5616026083140009, + "grad_norm": 3.262178659439087, + "learning_rate": 8.49686071849744e-06, + "loss": 1.0209, + "step": 8957 + }, + { + "epoch": 0.5616653081697912, + "grad_norm": 2.766050338745117, + "learning_rate": 8.494853057925143e-06, + "loss": 1.2252, + "step": 8958 + }, + { + "epoch": 0.5617280080255815, + "grad_norm": 3.005551815032959, + "learning_rate": 8.492845459425169e-06, + "loss": 1.1104, + "step": 8959 + }, + { + "epoch": 0.5617907078813719, + "grad_norm": 3.0832138061523438, + "learning_rate": 8.490837923080315e-06, + "loss": 1.0517, + "step": 8960 + }, + { + "epoch": 0.5618534077371622, + "grad_norm": 2.841860055923462, + "learning_rate": 8.488830448973376e-06, + "loss": 1.1614, + "step": 8961 + }, + { + "epoch": 0.5619161075929525, + "grad_norm": 3.4661145210266113, + "learning_rate": 8.48682303718713e-06, + "loss": 1.0531, + "step": 8962 + }, + { + "epoch": 0.5619788074487428, + "grad_norm": 3.263052225112915, + "learning_rate": 8.48481568780437e-06, + "loss": 1.0533, + "step": 8963 + }, + { + "epoch": 0.5620415073045332, + "grad_norm": 2.9631481170654297, + "learning_rate": 8.482808400907876e-06, + "loss": 1.1627, + "step": 8964 + }, + { + "epoch": 0.5621042071603235, + "grad_norm": 2.9648919105529785, + "learning_rate": 8.480801176580431e-06, + "loss": 1.1809, + "step": 8965 + }, + { + "epoch": 0.5621669070161138, + "grad_norm": 3.081064224243164, + "learning_rate": 8.478794014904811e-06, + "loss": 1.0111, + "step": 8966 + }, + { + "epoch": 0.5622296068719042, + "grad_norm": 3.1072700023651123, + "learning_rate": 8.476786915963794e-06, + "loss": 1.1236, + "step": 8967 + }, + { + "epoch": 0.5622923067276945, + "grad_norm": 3.142688035964966, + "learning_rate": 8.47477987984015e-06, + "loss": 1.1709, + "step": 8968 + }, + { + "epoch": 0.5623550065834848, + "grad_norm": 2.838510513305664, + "learning_rate": 8.47277290661665e-06, + "loss": 1.0521, + "step": 8969 + }, + { + "epoch": 0.5624177064392752, + "grad_norm": 3.117318630218506, + "learning_rate": 8.470765996376062e-06, + "loss": 0.8903, + "step": 8970 + }, + { + "epoch": 0.5624804062950656, + "grad_norm": 3.159543037414551, + "learning_rate": 8.46875914920115e-06, + "loss": 0.9119, + "step": 8971 + }, + { + "epoch": 0.5625431061508559, + "grad_norm": 3.0067481994628906, + "learning_rate": 8.46675236517468e-06, + "loss": 0.9706, + "step": 8972 + }, + { + "epoch": 0.5626058060066462, + "grad_norm": 2.8413443565368652, + "learning_rate": 8.464745644379407e-06, + "loss": 1.2827, + "step": 8973 + }, + { + "epoch": 0.5626685058624366, + "grad_norm": 3.330714702606201, + "learning_rate": 8.462738986898092e-06, + "loss": 0.9428, + "step": 8974 + }, + { + "epoch": 0.5627312057182269, + "grad_norm": 3.212127923965454, + "learning_rate": 8.460732392813487e-06, + "loss": 1.1027, + "step": 8975 + }, + { + "epoch": 0.5627939055740172, + "grad_norm": 3.3397252559661865, + "learning_rate": 8.458725862208348e-06, + "loss": 1.0211, + "step": 8976 + }, + { + "epoch": 0.5628566054298075, + "grad_norm": 2.9702208042144775, + "learning_rate": 8.45671939516542e-06, + "loss": 0.961, + "step": 8977 + }, + { + "epoch": 0.5629193052855979, + "grad_norm": 2.740837812423706, + "learning_rate": 8.454712991767452e-06, + "loss": 1.164, + "step": 8978 + }, + { + "epoch": 0.5629820051413882, + "grad_norm": 3.2289414405822754, + "learning_rate": 8.452706652097187e-06, + "loss": 1.0397, + "step": 8979 + }, + { + "epoch": 0.5630447049971785, + "grad_norm": 2.9957079887390137, + "learning_rate": 8.450700376237368e-06, + "loss": 0.9365, + "step": 8980 + }, + { + "epoch": 0.5631074048529688, + "grad_norm": 3.0558362007141113, + "learning_rate": 8.448694164270733e-06, + "loss": 1.1009, + "step": 8981 + }, + { + "epoch": 0.5631701047087592, + "grad_norm": 3.2653682231903076, + "learning_rate": 8.446688016280018e-06, + "loss": 1.0099, + "step": 8982 + }, + { + "epoch": 0.5632328045645495, + "grad_norm": 3.1139280796051025, + "learning_rate": 8.444681932347958e-06, + "loss": 1.2999, + "step": 8983 + }, + { + "epoch": 0.5632955044203398, + "grad_norm": 3.0530190467834473, + "learning_rate": 8.442675912557281e-06, + "loss": 1.0446, + "step": 8984 + }, + { + "epoch": 0.5633582042761301, + "grad_norm": 3.2807374000549316, + "learning_rate": 8.44066995699072e-06, + "loss": 1.1522, + "step": 8985 + }, + { + "epoch": 0.5634209041319205, + "grad_norm": 3.2213809490203857, + "learning_rate": 8.438664065730998e-06, + "loss": 1.0221, + "step": 8986 + }, + { + "epoch": 0.5634836039877108, + "grad_norm": 3.063249349594116, + "learning_rate": 8.436658238860837e-06, + "loss": 1.1089, + "step": 8987 + }, + { + "epoch": 0.5635463038435011, + "grad_norm": 3.3466379642486572, + "learning_rate": 8.434652476462956e-06, + "loss": 1.0853, + "step": 8988 + }, + { + "epoch": 0.5636090036992915, + "grad_norm": 3.0101733207702637, + "learning_rate": 8.43264677862008e-06, + "loss": 1.1179, + "step": 8989 + }, + { + "epoch": 0.5636717035550818, + "grad_norm": 3.0743472576141357, + "learning_rate": 8.430641145414916e-06, + "loss": 1.2023, + "step": 8990 + }, + { + "epoch": 0.5637344034108721, + "grad_norm": 3.067561149597168, + "learning_rate": 8.42863557693018e-06, + "loss": 1.1164, + "step": 8991 + }, + { + "epoch": 0.5637971032666624, + "grad_norm": 2.9422013759613037, + "learning_rate": 8.426630073248582e-06, + "loss": 1.0339, + "step": 8992 + }, + { + "epoch": 0.5638598031224528, + "grad_norm": 3.0089406967163086, + "learning_rate": 8.424624634452829e-06, + "loss": 1.0265, + "step": 8993 + }, + { + "epoch": 0.5639225029782432, + "grad_norm": 3.1816189289093018, + "learning_rate": 8.422619260625626e-06, + "loss": 1.3613, + "step": 8994 + }, + { + "epoch": 0.5639852028340335, + "grad_norm": 3.177926540374756, + "learning_rate": 8.42061395184967e-06, + "loss": 1.0842, + "step": 8995 + }, + { + "epoch": 0.5640479026898239, + "grad_norm": 3.112457275390625, + "learning_rate": 8.418608708207667e-06, + "loss": 1.1467, + "step": 8996 + }, + { + "epoch": 0.5641106025456142, + "grad_norm": 2.7829208374023438, + "learning_rate": 8.416603529782308e-06, + "loss": 1.171, + "step": 8997 + }, + { + "epoch": 0.5641733024014045, + "grad_norm": 3.0922162532806396, + "learning_rate": 8.41459841665629e-06, + "loss": 1.0318, + "step": 8998 + }, + { + "epoch": 0.5642360022571948, + "grad_norm": 3.3777315616607666, + "learning_rate": 8.4125933689123e-06, + "loss": 1.0728, + "step": 8999 + }, + { + "epoch": 0.5642987021129852, + "grad_norm": 3.6228649616241455, + "learning_rate": 8.410588386633031e-06, + "loss": 1.1785, + "step": 9000 + }, + { + "epoch": 0.5642987021129852, + "eval_loss": 1.1186591386795044, + "eval_runtime": 143.818, + "eval_samples_per_second": 4.381, + "eval_steps_per_second": 1.099, + "step": 9000 + }, + { + "epoch": 0.5643614019687755, + "grad_norm": 3.2832539081573486, + "learning_rate": 8.408583469901166e-06, + "loss": 1.0632, + "step": 9001 + }, + { + "epoch": 0.5644241018245658, + "grad_norm": 3.157287359237671, + "learning_rate": 8.406578618799388e-06, + "loss": 1.139, + "step": 9002 + }, + { + "epoch": 0.5644868016803561, + "grad_norm": 3.1367576122283936, + "learning_rate": 8.404573833410378e-06, + "loss": 1.0841, + "step": 9003 + }, + { + "epoch": 0.5645495015361465, + "grad_norm": 3.4065003395080566, + "learning_rate": 8.402569113816812e-06, + "loss": 0.9877, + "step": 9004 + }, + { + "epoch": 0.5646122013919368, + "grad_norm": 3.452996253967285, + "learning_rate": 8.400564460101365e-06, + "loss": 1.0254, + "step": 9005 + }, + { + "epoch": 0.5646749012477271, + "grad_norm": 3.2855584621429443, + "learning_rate": 8.39855987234671e-06, + "loss": 1.1421, + "step": 9006 + }, + { + "epoch": 0.5647376011035175, + "grad_norm": 3.146529197692871, + "learning_rate": 8.396555350635516e-06, + "loss": 1.1444, + "step": 9007 + }, + { + "epoch": 0.5648003009593078, + "grad_norm": 3.398977041244507, + "learning_rate": 8.39455089505045e-06, + "loss": 0.9915, + "step": 9008 + }, + { + "epoch": 0.5648630008150981, + "grad_norm": 3.4200856685638428, + "learning_rate": 8.392546505674176e-06, + "loss": 0.9397, + "step": 9009 + }, + { + "epoch": 0.5649257006708884, + "grad_norm": 3.411851167678833, + "learning_rate": 8.390542182589351e-06, + "loss": 1.0402, + "step": 9010 + }, + { + "epoch": 0.5649884005266788, + "grad_norm": 3.5778045654296875, + "learning_rate": 8.388537925878637e-06, + "loss": 1.1171, + "step": 9011 + }, + { + "epoch": 0.5650511003824691, + "grad_norm": 3.320131778717041, + "learning_rate": 8.386533735624693e-06, + "loss": 1.1713, + "step": 9012 + }, + { + "epoch": 0.5651138002382594, + "grad_norm": 3.005119800567627, + "learning_rate": 8.384529611910164e-06, + "loss": 1.2698, + "step": 9013 + }, + { + "epoch": 0.5651765000940497, + "grad_norm": 3.2578163146972656, + "learning_rate": 8.382525554817709e-06, + "loss": 1.0859, + "step": 9014 + }, + { + "epoch": 0.5652391999498401, + "grad_norm": 3.022862434387207, + "learning_rate": 8.380521564429967e-06, + "loss": 1.1081, + "step": 9015 + }, + { + "epoch": 0.5653018998056304, + "grad_norm": 3.3085477352142334, + "learning_rate": 8.378517640829587e-06, + "loss": 1.2089, + "step": 9016 + }, + { + "epoch": 0.5653645996614208, + "grad_norm": 3.3412740230560303, + "learning_rate": 8.37651378409921e-06, + "loss": 1.1756, + "step": 9017 + }, + { + "epoch": 0.5654272995172112, + "grad_norm": 3.095074415206909, + "learning_rate": 8.374509994321477e-06, + "loss": 1.2388, + "step": 9018 + }, + { + "epoch": 0.5654899993730015, + "grad_norm": 3.2049150466918945, + "learning_rate": 8.372506271579022e-06, + "loss": 1.3609, + "step": 9019 + }, + { + "epoch": 0.5655526992287918, + "grad_norm": 3.2227444648742676, + "learning_rate": 8.370502615954481e-06, + "loss": 1.2226, + "step": 9020 + }, + { + "epoch": 0.5656153990845821, + "grad_norm": 3.085252523422241, + "learning_rate": 8.368499027530482e-06, + "loss": 1.0341, + "step": 9021 + }, + { + "epoch": 0.5656780989403725, + "grad_norm": 3.1027650833129883, + "learning_rate": 8.366495506389652e-06, + "loss": 1.0765, + "step": 9022 + }, + { + "epoch": 0.5657407987961628, + "grad_norm": 3.166006088256836, + "learning_rate": 8.364492052614623e-06, + "loss": 1.1276, + "step": 9023 + }, + { + "epoch": 0.5658034986519531, + "grad_norm": 2.910890579223633, + "learning_rate": 8.362488666288009e-06, + "loss": 1.2157, + "step": 9024 + }, + { + "epoch": 0.5658661985077434, + "grad_norm": 3.3789703845977783, + "learning_rate": 8.360485347492437e-06, + "loss": 1.0764, + "step": 9025 + }, + { + "epoch": 0.5659288983635338, + "grad_norm": 3.006227970123291, + "learning_rate": 8.358482096310518e-06, + "loss": 1.0659, + "step": 9026 + }, + { + "epoch": 0.5659915982193241, + "grad_norm": 2.892493486404419, + "learning_rate": 8.356478912824873e-06, + "loss": 1.1403, + "step": 9027 + }, + { + "epoch": 0.5660542980751144, + "grad_norm": 2.800407886505127, + "learning_rate": 8.354475797118106e-06, + "loss": 1.1152, + "step": 9028 + }, + { + "epoch": 0.5661169979309048, + "grad_norm": 2.991731643676758, + "learning_rate": 8.352472749272831e-06, + "loss": 1.1596, + "step": 9029 + }, + { + "epoch": 0.5661796977866951, + "grad_norm": 3.2093496322631836, + "learning_rate": 8.35046976937165e-06, + "loss": 1.1822, + "step": 9030 + }, + { + "epoch": 0.5662423976424854, + "grad_norm": 3.0743401050567627, + "learning_rate": 8.348466857497169e-06, + "loss": 1.0479, + "step": 9031 + }, + { + "epoch": 0.5663050974982757, + "grad_norm": 3.041863441467285, + "learning_rate": 8.346464013731987e-06, + "loss": 1.2343, + "step": 9032 + }, + { + "epoch": 0.5663677973540661, + "grad_norm": 3.2358756065368652, + "learning_rate": 8.3444612381587e-06, + "loss": 0.9331, + "step": 9033 + }, + { + "epoch": 0.5664304972098564, + "grad_norm": 3.4038147926330566, + "learning_rate": 8.342458530859906e-06, + "loss": 0.9381, + "step": 9034 + }, + { + "epoch": 0.5664931970656467, + "grad_norm": 2.959937572479248, + "learning_rate": 8.340455891918192e-06, + "loss": 0.9668, + "step": 9035 + }, + { + "epoch": 0.566555896921437, + "grad_norm": 3.426703691482544, + "learning_rate": 8.338453321416152e-06, + "loss": 1.2546, + "step": 9036 + }, + { + "epoch": 0.5666185967772274, + "grad_norm": 3.3177547454833984, + "learning_rate": 8.336450819436369e-06, + "loss": 1.0592, + "step": 9037 + }, + { + "epoch": 0.5666812966330177, + "grad_norm": 3.2554287910461426, + "learning_rate": 8.334448386061426e-06, + "loss": 1.135, + "step": 9038 + }, + { + "epoch": 0.566743996488808, + "grad_norm": 2.966526508331299, + "learning_rate": 8.332446021373903e-06, + "loss": 1.0965, + "step": 9039 + }, + { + "epoch": 0.5668066963445985, + "grad_norm": 3.342755079269409, + "learning_rate": 8.330443725456382e-06, + "loss": 1.0798, + "step": 9040 + }, + { + "epoch": 0.5668693962003888, + "grad_norm": 3.0694801807403564, + "learning_rate": 8.328441498391432e-06, + "loss": 1.1625, + "step": 9041 + }, + { + "epoch": 0.5669320960561791, + "grad_norm": 3.4108738899230957, + "learning_rate": 8.326439340261628e-06, + "loss": 1.154, + "step": 9042 + }, + { + "epoch": 0.5669947959119694, + "grad_norm": 3.5995054244995117, + "learning_rate": 8.32443725114954e-06, + "loss": 1.1703, + "step": 9043 + }, + { + "epoch": 0.5670574957677598, + "grad_norm": 2.97011137008667, + "learning_rate": 8.322435231137732e-06, + "loss": 0.9606, + "step": 9044 + }, + { + "epoch": 0.5671201956235501, + "grad_norm": 2.897170066833496, + "learning_rate": 8.32043328030877e-06, + "loss": 1.0839, + "step": 9045 + }, + { + "epoch": 0.5671828954793404, + "grad_norm": 3.522035598754883, + "learning_rate": 8.31843139874521e-06, + "loss": 1.1946, + "step": 9046 + }, + { + "epoch": 0.5672455953351307, + "grad_norm": 2.9559690952301025, + "learning_rate": 8.316429586529616e-06, + "loss": 1.1173, + "step": 9047 + }, + { + "epoch": 0.5673082951909211, + "grad_norm": 3.2203590869903564, + "learning_rate": 8.314427843744535e-06, + "loss": 0.9801, + "step": 9048 + }, + { + "epoch": 0.5673709950467114, + "grad_norm": 3.0775609016418457, + "learning_rate": 8.312426170472528e-06, + "loss": 1.0507, + "step": 9049 + }, + { + "epoch": 0.5674336949025017, + "grad_norm": 3.013676404953003, + "learning_rate": 8.310424566796135e-06, + "loss": 1.0831, + "step": 9050 + }, + { + "epoch": 0.567496394758292, + "grad_norm": 3.31510853767395, + "learning_rate": 8.308423032797911e-06, + "loss": 1.207, + "step": 9051 + }, + { + "epoch": 0.5675590946140824, + "grad_norm": 3.1516079902648926, + "learning_rate": 8.306421568560393e-06, + "loss": 1.1042, + "step": 9052 + }, + { + "epoch": 0.5676217944698727, + "grad_norm": 3.173255681991577, + "learning_rate": 8.304420174166121e-06, + "loss": 1.0991, + "step": 9053 + }, + { + "epoch": 0.567684494325663, + "grad_norm": 3.5878989696502686, + "learning_rate": 8.30241884969764e-06, + "loss": 1.0806, + "step": 9054 + }, + { + "epoch": 0.5677471941814534, + "grad_norm": 3.074465274810791, + "learning_rate": 8.300417595237475e-06, + "loss": 1.1953, + "step": 9055 + }, + { + "epoch": 0.5678098940372437, + "grad_norm": 3.455522060394287, + "learning_rate": 8.298416410868166e-06, + "loss": 1.0294, + "step": 9056 + }, + { + "epoch": 0.567872593893034, + "grad_norm": 2.860485076904297, + "learning_rate": 8.296415296672236e-06, + "loss": 1.1885, + "step": 9057 + }, + { + "epoch": 0.5679352937488243, + "grad_norm": 3.5552589893341064, + "learning_rate": 8.294414252732217e-06, + "loss": 0.9441, + "step": 9058 + }, + { + "epoch": 0.5679979936046147, + "grad_norm": 3.116209030151367, + "learning_rate": 8.292413279130625e-06, + "loss": 1.1796, + "step": 9059 + }, + { + "epoch": 0.568060693460405, + "grad_norm": 3.3861446380615234, + "learning_rate": 8.290412375949987e-06, + "loss": 1.1155, + "step": 9060 + }, + { + "epoch": 0.5681233933161953, + "grad_norm": 2.793712854385376, + "learning_rate": 8.288411543272814e-06, + "loss": 1.0182, + "step": 9061 + }, + { + "epoch": 0.5681860931719857, + "grad_norm": 3.318876028060913, + "learning_rate": 8.286410781181626e-06, + "loss": 1.0028, + "step": 9062 + }, + { + "epoch": 0.5682487930277761, + "grad_norm": 3.6269919872283936, + "learning_rate": 8.284410089758932e-06, + "loss": 1.1136, + "step": 9063 + }, + { + "epoch": 0.5683114928835664, + "grad_norm": 3.1610002517700195, + "learning_rate": 8.28240946908724e-06, + "loss": 1.0406, + "step": 9064 + }, + { + "epoch": 0.5683741927393567, + "grad_norm": 3.34857177734375, + "learning_rate": 8.280408919249058e-06, + "loss": 0.9707, + "step": 9065 + }, + { + "epoch": 0.5684368925951471, + "grad_norm": 3.075920343399048, + "learning_rate": 8.278408440326885e-06, + "loss": 1.0968, + "step": 9066 + }, + { + "epoch": 0.5684995924509374, + "grad_norm": 3.0563101768493652, + "learning_rate": 8.276408032403225e-06, + "loss": 1.1753, + "step": 9067 + }, + { + "epoch": 0.5685622923067277, + "grad_norm": 3.4288456439971924, + "learning_rate": 8.274407695560572e-06, + "loss": 1.1491, + "step": 9068 + }, + { + "epoch": 0.568624992162518, + "grad_norm": 2.9744200706481934, + "learning_rate": 8.272407429881421e-06, + "loss": 1.1596, + "step": 9069 + }, + { + "epoch": 0.5686876920183084, + "grad_norm": 2.8931429386138916, + "learning_rate": 8.270407235448263e-06, + "loss": 1.1172, + "step": 9070 + }, + { + "epoch": 0.5687503918740987, + "grad_norm": 3.194378137588501, + "learning_rate": 8.268407112343589e-06, + "loss": 0.8949, + "step": 9071 + }, + { + "epoch": 0.568813091729889, + "grad_norm": 3.2555816173553467, + "learning_rate": 8.266407060649877e-06, + "loss": 1.0594, + "step": 9072 + }, + { + "epoch": 0.5688757915856794, + "grad_norm": 3.2101261615753174, + "learning_rate": 8.264407080449615e-06, + "loss": 1.159, + "step": 9073 + }, + { + "epoch": 0.5689384914414697, + "grad_norm": 3.0367934703826904, + "learning_rate": 8.262407171825282e-06, + "loss": 1.0863, + "step": 9074 + }, + { + "epoch": 0.56900119129726, + "grad_norm": 3.02947735786438, + "learning_rate": 8.260407334859352e-06, + "loss": 1.2077, + "step": 9075 + }, + { + "epoch": 0.5690638911530503, + "grad_norm": 3.1531472206115723, + "learning_rate": 8.2584075696343e-06, + "loss": 1.2002, + "step": 9076 + }, + { + "epoch": 0.5691265910088407, + "grad_norm": 3.2605063915252686, + "learning_rate": 8.256407876232596e-06, + "loss": 1.1058, + "step": 9077 + }, + { + "epoch": 0.569189290864631, + "grad_norm": 3.007946729660034, + "learning_rate": 8.254408254736708e-06, + "loss": 1.145, + "step": 9078 + }, + { + "epoch": 0.5692519907204213, + "grad_norm": 3.164815664291382, + "learning_rate": 8.252408705229098e-06, + "loss": 1.0949, + "step": 9079 + }, + { + "epoch": 0.5693146905762116, + "grad_norm": 3.3986759185791016, + "learning_rate": 8.250409227792231e-06, + "loss": 1.0543, + "step": 9080 + }, + { + "epoch": 0.569377390432002, + "grad_norm": 3.26773738861084, + "learning_rate": 8.24840982250856e-06, + "loss": 1.236, + "step": 9081 + }, + { + "epoch": 0.5694400902877923, + "grad_norm": 3.5100460052490234, + "learning_rate": 8.24641048946055e-06, + "loss": 0.8976, + "step": 9082 + }, + { + "epoch": 0.5695027901435826, + "grad_norm": 3.0171310901641846, + "learning_rate": 8.244411228730645e-06, + "loss": 1.1445, + "step": 9083 + }, + { + "epoch": 0.569565489999373, + "grad_norm": 3.2792744636535645, + "learning_rate": 8.242412040401297e-06, + "loss": 1.0944, + "step": 9084 + }, + { + "epoch": 0.5696281898551633, + "grad_norm": 3.1385369300842285, + "learning_rate": 8.240412924554955e-06, + "loss": 1.1897, + "step": 9085 + }, + { + "epoch": 0.5696908897109537, + "grad_norm": 2.900753974914551, + "learning_rate": 8.238413881274059e-06, + "loss": 1.1975, + "step": 9086 + }, + { + "epoch": 0.569753589566744, + "grad_norm": 3.4375722408294678, + "learning_rate": 8.236414910641054e-06, + "loss": 1.219, + "step": 9087 + }, + { + "epoch": 0.5698162894225344, + "grad_norm": 2.697890043258667, + "learning_rate": 8.234416012738372e-06, + "loss": 1.1197, + "step": 9088 + }, + { + "epoch": 0.5698789892783247, + "grad_norm": 2.752167224884033, + "learning_rate": 8.232417187648454e-06, + "loss": 1.1451, + "step": 9089 + }, + { + "epoch": 0.569941689134115, + "grad_norm": 3.3542895317077637, + "learning_rate": 8.230418435453723e-06, + "loss": 1.127, + "step": 9090 + }, + { + "epoch": 0.5700043889899054, + "grad_norm": 3.252629280090332, + "learning_rate": 8.22841975623662e-06, + "loss": 1.0811, + "step": 9091 + }, + { + "epoch": 0.5700670888456957, + "grad_norm": 3.3471596240997314, + "learning_rate": 8.226421150079558e-06, + "loss": 1.0123, + "step": 9092 + }, + { + "epoch": 0.570129788701486, + "grad_norm": 3.254913091659546, + "learning_rate": 8.224422617064968e-06, + "loss": 1.0801, + "step": 9093 + }, + { + "epoch": 0.5701924885572763, + "grad_norm": 3.3181729316711426, + "learning_rate": 8.222424157275267e-06, + "loss": 1.1466, + "step": 9094 + }, + { + "epoch": 0.5702551884130667, + "grad_norm": 3.204502820968628, + "learning_rate": 8.22042577079287e-06, + "loss": 1.2767, + "step": 9095 + }, + { + "epoch": 0.570317888268857, + "grad_norm": 3.1094861030578613, + "learning_rate": 8.218427457700194e-06, + "loss": 1.1757, + "step": 9096 + }, + { + "epoch": 0.5703805881246473, + "grad_norm": 3.179189443588257, + "learning_rate": 8.216429218079645e-06, + "loss": 1.1247, + "step": 9097 + }, + { + "epoch": 0.5704432879804376, + "grad_norm": 2.7571306228637695, + "learning_rate": 8.214431052013636e-06, + "loss": 1.1383, + "step": 9098 + }, + { + "epoch": 0.570505987836228, + "grad_norm": 3.0971477031707764, + "learning_rate": 8.212432959584565e-06, + "loss": 0.9926, + "step": 9099 + }, + { + "epoch": 0.5705686876920183, + "grad_norm": 3.046359062194824, + "learning_rate": 8.210434940874838e-06, + "loss": 1.1367, + "step": 9100 + }, + { + "epoch": 0.5706313875478086, + "grad_norm": 3.0937302112579346, + "learning_rate": 8.208436995966851e-06, + "loss": 1.1197, + "step": 9101 + }, + { + "epoch": 0.570694087403599, + "grad_norm": 3.342217445373535, + "learning_rate": 8.206439124943004e-06, + "loss": 1.2399, + "step": 9102 + }, + { + "epoch": 0.5707567872593893, + "grad_norm": 3.2851669788360596, + "learning_rate": 8.204441327885682e-06, + "loss": 1.0751, + "step": 9103 + }, + { + "epoch": 0.5708194871151796, + "grad_norm": 2.9995546340942383, + "learning_rate": 8.202443604877279e-06, + "loss": 1.0741, + "step": 9104 + }, + { + "epoch": 0.5708821869709699, + "grad_norm": 3.241807222366333, + "learning_rate": 8.200445956000182e-06, + "loss": 1.0807, + "step": 9105 + }, + { + "epoch": 0.5709448868267603, + "grad_norm": 2.8040006160736084, + "learning_rate": 8.198448381336771e-06, + "loss": 1.2488, + "step": 9106 + }, + { + "epoch": 0.5710075866825506, + "grad_norm": 3.379624128341675, + "learning_rate": 8.196450880969429e-06, + "loss": 1.2764, + "step": 9107 + }, + { + "epoch": 0.5710702865383409, + "grad_norm": 2.754742383956909, + "learning_rate": 8.19445345498053e-06, + "loss": 1.2323, + "step": 9108 + }, + { + "epoch": 0.5711329863941313, + "grad_norm": 3.1651220321655273, + "learning_rate": 8.192456103452452e-06, + "loss": 1.2227, + "step": 9109 + }, + { + "epoch": 0.5711956862499217, + "grad_norm": 3.1654415130615234, + "learning_rate": 8.190458826467557e-06, + "loss": 1.1878, + "step": 9110 + }, + { + "epoch": 0.571258386105712, + "grad_norm": 3.091644287109375, + "learning_rate": 8.188461624108226e-06, + "loss": 1.0016, + "step": 9111 + }, + { + "epoch": 0.5713210859615023, + "grad_norm": 3.0696358680725098, + "learning_rate": 8.186464496456812e-06, + "loss": 1.1022, + "step": 9112 + }, + { + "epoch": 0.5713837858172927, + "grad_norm": 3.395120620727539, + "learning_rate": 8.184467443595682e-06, + "loss": 1.1846, + "step": 9113 + }, + { + "epoch": 0.571446485673083, + "grad_norm": 3.1187450885772705, + "learning_rate": 8.182470465607195e-06, + "loss": 1.0324, + "step": 9114 + }, + { + "epoch": 0.5715091855288733, + "grad_norm": 3.47517728805542, + "learning_rate": 8.180473562573705e-06, + "loss": 0.9807, + "step": 9115 + }, + { + "epoch": 0.5715718853846636, + "grad_norm": 2.806225299835205, + "learning_rate": 8.178476734577566e-06, + "loss": 1.0733, + "step": 9116 + }, + { + "epoch": 0.571634585240454, + "grad_norm": 3.0332906246185303, + "learning_rate": 8.176479981701124e-06, + "loss": 1.0789, + "step": 9117 + }, + { + "epoch": 0.5716972850962443, + "grad_norm": 3.303030490875244, + "learning_rate": 8.17448330402673e-06, + "loss": 1.3665, + "step": 9118 + }, + { + "epoch": 0.5717599849520346, + "grad_norm": 3.0371875762939453, + "learning_rate": 8.172486701636721e-06, + "loss": 1.0414, + "step": 9119 + }, + { + "epoch": 0.5718226848078249, + "grad_norm": 3.1885764598846436, + "learning_rate": 8.170490174613443e-06, + "loss": 0.9467, + "step": 9120 + }, + { + "epoch": 0.5718853846636153, + "grad_norm": 2.750051498413086, + "learning_rate": 8.168493723039229e-06, + "loss": 1.1646, + "step": 9121 + }, + { + "epoch": 0.5719480845194056, + "grad_norm": 3.482893705368042, + "learning_rate": 8.166497346996414e-06, + "loss": 1.1753, + "step": 9122 + }, + { + "epoch": 0.5720107843751959, + "grad_norm": 2.9842143058776855, + "learning_rate": 8.164501046567328e-06, + "loss": 1.1509, + "step": 9123 + }, + { + "epoch": 0.5720734842309863, + "grad_norm": 2.734492540359497, + "learning_rate": 8.162504821834296e-06, + "loss": 1.16, + "step": 9124 + }, + { + "epoch": 0.5721361840867766, + "grad_norm": 2.904996633529663, + "learning_rate": 8.16050867287965e-06, + "loss": 1.2439, + "step": 9125 + }, + { + "epoch": 0.5721988839425669, + "grad_norm": 3.366048574447632, + "learning_rate": 8.158512599785705e-06, + "loss": 0.9771, + "step": 9126 + }, + { + "epoch": 0.5722615837983572, + "grad_norm": 3.1966471672058105, + "learning_rate": 8.156516602634781e-06, + "loss": 0.9886, + "step": 9127 + }, + { + "epoch": 0.5723242836541476, + "grad_norm": 3.010387659072876, + "learning_rate": 8.154520681509193e-06, + "loss": 1.106, + "step": 9128 + }, + { + "epoch": 0.5723869835099379, + "grad_norm": 3.3424642086029053, + "learning_rate": 8.152524836491254e-06, + "loss": 1.1958, + "step": 9129 + }, + { + "epoch": 0.5724496833657282, + "grad_norm": 3.335116147994995, + "learning_rate": 8.150529067663267e-06, + "loss": 1.0284, + "step": 9130 + }, + { + "epoch": 0.5725123832215185, + "grad_norm": 3.308417797088623, + "learning_rate": 8.148533375107547e-06, + "loss": 1.0549, + "step": 9131 + }, + { + "epoch": 0.572575083077309, + "grad_norm": 3.0053293704986572, + "learning_rate": 8.146537758906388e-06, + "loss": 1.3323, + "step": 9132 + }, + { + "epoch": 0.5726377829330993, + "grad_norm": 3.253371477127075, + "learning_rate": 8.144542219142095e-06, + "loss": 1.1791, + "step": 9133 + }, + { + "epoch": 0.5727004827888896, + "grad_norm": 3.0469110012054443, + "learning_rate": 8.14254675589696e-06, + "loss": 1.1049, + "step": 9134 + }, + { + "epoch": 0.57276318264468, + "grad_norm": 3.3920273780822754, + "learning_rate": 8.140551369253279e-06, + "loss": 1.1649, + "step": 9135 + }, + { + "epoch": 0.5728258825004703, + "grad_norm": 3.237809181213379, + "learning_rate": 8.138556059293341e-06, + "loss": 1.153, + "step": 9136 + }, + { + "epoch": 0.5728885823562606, + "grad_norm": 3.2999541759490967, + "learning_rate": 8.13656082609943e-06, + "loss": 1.1855, + "step": 9137 + }, + { + "epoch": 0.5729512822120509, + "grad_norm": 3.1852731704711914, + "learning_rate": 8.134565669753836e-06, + "loss": 1.1331, + "step": 9138 + }, + { + "epoch": 0.5730139820678413, + "grad_norm": 2.9517033100128174, + "learning_rate": 8.132570590338834e-06, + "loss": 1.0769, + "step": 9139 + }, + { + "epoch": 0.5730766819236316, + "grad_norm": 5.264471054077148, + "learning_rate": 8.130575587936702e-06, + "loss": 1.0463, + "step": 9140 + }, + { + "epoch": 0.5731393817794219, + "grad_norm": 3.2030093669891357, + "learning_rate": 8.128580662629713e-06, + "loss": 1.2763, + "step": 9141 + }, + { + "epoch": 0.5732020816352122, + "grad_norm": 3.401099920272827, + "learning_rate": 8.126585814500141e-06, + "loss": 1.0165, + "step": 9142 + }, + { + "epoch": 0.5732647814910026, + "grad_norm": 2.9986162185668945, + "learning_rate": 8.12459104363025e-06, + "loss": 1.1135, + "step": 9143 + }, + { + "epoch": 0.5733274813467929, + "grad_norm": 3.0689785480499268, + "learning_rate": 8.122596350102307e-06, + "loss": 1.1007, + "step": 9144 + }, + { + "epoch": 0.5733901812025832, + "grad_norm": 3.010985851287842, + "learning_rate": 8.120601733998573e-06, + "loss": 1.2487, + "step": 9145 + }, + { + "epoch": 0.5734528810583736, + "grad_norm": 3.3888189792633057, + "learning_rate": 8.118607195401305e-06, + "loss": 1.1634, + "step": 9146 + }, + { + "epoch": 0.5735155809141639, + "grad_norm": 3.4528636932373047, + "learning_rate": 8.11661273439276e-06, + "loss": 1.0172, + "step": 9147 + }, + { + "epoch": 0.5735782807699542, + "grad_norm": 3.111191749572754, + "learning_rate": 8.114618351055186e-06, + "loss": 1.1373, + "step": 9148 + }, + { + "epoch": 0.5736409806257445, + "grad_norm": 2.889590263366699, + "learning_rate": 8.112624045470834e-06, + "loss": 1.3058, + "step": 9149 + }, + { + "epoch": 0.5737036804815349, + "grad_norm": 3.067962408065796, + "learning_rate": 8.110629817721949e-06, + "loss": 1.1823, + "step": 9150 + }, + { + "epoch": 0.5737663803373252, + "grad_norm": 2.9781298637390137, + "learning_rate": 8.108635667890774e-06, + "loss": 1.0966, + "step": 9151 + }, + { + "epoch": 0.5738290801931155, + "grad_norm": 3.2058424949645996, + "learning_rate": 8.106641596059545e-06, + "loss": 1.1906, + "step": 9152 + }, + { + "epoch": 0.5738917800489058, + "grad_norm": 3.4989686012268066, + "learning_rate": 8.104647602310501e-06, + "loss": 1.0754, + "step": 9153 + }, + { + "epoch": 0.5739544799046962, + "grad_norm": 2.966735363006592, + "learning_rate": 8.102653686725871e-06, + "loss": 1.1196, + "step": 9154 + }, + { + "epoch": 0.5740171797604865, + "grad_norm": 3.2042245864868164, + "learning_rate": 8.100659849387885e-06, + "loss": 1.0991, + "step": 9155 + }, + { + "epoch": 0.5740798796162769, + "grad_norm": 3.0985093116760254, + "learning_rate": 8.098666090378772e-06, + "loss": 1.1419, + "step": 9156 + }, + { + "epoch": 0.5741425794720673, + "grad_norm": 3.160656452178955, + "learning_rate": 8.09667240978075e-06, + "loss": 1.2027, + "step": 9157 + }, + { + "epoch": 0.5742052793278576, + "grad_norm": 3.0356903076171875, + "learning_rate": 8.094678807676045e-06, + "loss": 1.2134, + "step": 9158 + }, + { + "epoch": 0.5742679791836479, + "grad_norm": 3.340852737426758, + "learning_rate": 8.092685284146865e-06, + "loss": 1.1752, + "step": 9159 + }, + { + "epoch": 0.5743306790394382, + "grad_norm": 3.1054224967956543, + "learning_rate": 8.09069183927543e-06, + "loss": 1.0648, + "step": 9160 + }, + { + "epoch": 0.5743933788952286, + "grad_norm": 3.0164473056793213, + "learning_rate": 8.088698473143945e-06, + "loss": 1.1819, + "step": 9161 + }, + { + "epoch": 0.5744560787510189, + "grad_norm": 3.326972484588623, + "learning_rate": 8.08670518583462e-06, + "loss": 1.1725, + "step": 9162 + }, + { + "epoch": 0.5745187786068092, + "grad_norm": 2.9990930557250977, + "learning_rate": 8.084711977429655e-06, + "loss": 1.226, + "step": 9163 + }, + { + "epoch": 0.5745814784625995, + "grad_norm": 2.9499621391296387, + "learning_rate": 8.082718848011256e-06, + "loss": 1.1106, + "step": 9164 + }, + { + "epoch": 0.5746441783183899, + "grad_norm": 3.0509417057037354, + "learning_rate": 8.08072579766161e-06, + "loss": 1.0319, + "step": 9165 + }, + { + "epoch": 0.5747068781741802, + "grad_norm": 3.315584659576416, + "learning_rate": 8.078732826462917e-06, + "loss": 1.2544, + "step": 9166 + }, + { + "epoch": 0.5747695780299705, + "grad_norm": 2.8307361602783203, + "learning_rate": 8.076739934497368e-06, + "loss": 1.1158, + "step": 9167 + }, + { + "epoch": 0.5748322778857609, + "grad_norm": 3.1731927394866943, + "learning_rate": 8.074747121847146e-06, + "loss": 1.0949, + "step": 9168 + }, + { + "epoch": 0.5748949777415512, + "grad_norm": 3.1554408073425293, + "learning_rate": 8.07275438859444e-06, + "loss": 1.0236, + "step": 9169 + }, + { + "epoch": 0.5749576775973415, + "grad_norm": 3.0024027824401855, + "learning_rate": 8.070761734821425e-06, + "loss": 1.2439, + "step": 9170 + }, + { + "epoch": 0.5750203774531318, + "grad_norm": 3.6206016540527344, + "learning_rate": 8.068769160610281e-06, + "loss": 1.0347, + "step": 9171 + }, + { + "epoch": 0.5750830773089222, + "grad_norm": 3.218989372253418, + "learning_rate": 8.066776666043179e-06, + "loss": 1.0923, + "step": 9172 + }, + { + "epoch": 0.5751457771647125, + "grad_norm": 3.0176584720611572, + "learning_rate": 8.064784251202295e-06, + "loss": 0.9798, + "step": 9173 + }, + { + "epoch": 0.5752084770205028, + "grad_norm": 3.0255494117736816, + "learning_rate": 8.06279191616979e-06, + "loss": 1.003, + "step": 9174 + }, + { + "epoch": 0.5752711768762931, + "grad_norm": 3.2220499515533447, + "learning_rate": 8.060799661027832e-06, + "loss": 1.2329, + "step": 9175 + }, + { + "epoch": 0.5753338767320835, + "grad_norm": 3.0218052864074707, + "learning_rate": 8.05880748585858e-06, + "loss": 1.0368, + "step": 9176 + }, + { + "epoch": 0.5753965765878738, + "grad_norm": 3.073261260986328, + "learning_rate": 8.056815390744193e-06, + "loss": 1.0551, + "step": 9177 + }, + { + "epoch": 0.5754592764436641, + "grad_norm": 3.491914987564087, + "learning_rate": 8.054823375766827e-06, + "loss": 0.9313, + "step": 9178 + }, + { + "epoch": 0.5755219762994546, + "grad_norm": 2.862046241760254, + "learning_rate": 8.052831441008626e-06, + "loss": 1.0388, + "step": 9179 + }, + { + "epoch": 0.5755846761552449, + "grad_norm": 3.030763864517212, + "learning_rate": 8.050839586551741e-06, + "loss": 1.0898, + "step": 9180 + }, + { + "epoch": 0.5756473760110352, + "grad_norm": 3.1791398525238037, + "learning_rate": 8.048847812478318e-06, + "loss": 1.0649, + "step": 9181 + }, + { + "epoch": 0.5757100758668255, + "grad_norm": 2.977187156677246, + "learning_rate": 8.046856118870499e-06, + "loss": 1.034, + "step": 9182 + }, + { + "epoch": 0.5757727757226159, + "grad_norm": 3.496405601501465, + "learning_rate": 8.044864505810415e-06, + "loss": 1.0768, + "step": 9183 + }, + { + "epoch": 0.5758354755784062, + "grad_norm": 2.730165481567383, + "learning_rate": 8.042872973380207e-06, + "loss": 1.2245, + "step": 9184 + }, + { + "epoch": 0.5758981754341965, + "grad_norm": 2.8524537086486816, + "learning_rate": 8.040881521662001e-06, + "loss": 1.1781, + "step": 9185 + }, + { + "epoch": 0.5759608752899869, + "grad_norm": 3.075782537460327, + "learning_rate": 8.038890150737925e-06, + "loss": 1.1486, + "step": 9186 + }, + { + "epoch": 0.5760235751457772, + "grad_norm": 2.8918027877807617, + "learning_rate": 8.036898860690109e-06, + "loss": 1.1682, + "step": 9187 + }, + { + "epoch": 0.5760862750015675, + "grad_norm": 3.145439863204956, + "learning_rate": 8.034907651600666e-06, + "loss": 1.1766, + "step": 9188 + }, + { + "epoch": 0.5761489748573578, + "grad_norm": 2.9784231185913086, + "learning_rate": 8.03291652355172e-06, + "loss": 1.244, + "step": 9189 + }, + { + "epoch": 0.5762116747131482, + "grad_norm": 3.1637425422668457, + "learning_rate": 8.030925476625382e-06, + "loss": 1.0458, + "step": 9190 + }, + { + "epoch": 0.5762743745689385, + "grad_norm": 2.9183075428009033, + "learning_rate": 8.028934510903763e-06, + "loss": 1.0148, + "step": 9191 + }, + { + "epoch": 0.5763370744247288, + "grad_norm": 3.2215323448181152, + "learning_rate": 8.02694362646897e-06, + "loss": 0.9352, + "step": 9192 + }, + { + "epoch": 0.5763997742805191, + "grad_norm": 3.006221294403076, + "learning_rate": 8.024952823403109e-06, + "loss": 1.0796, + "step": 9193 + }, + { + "epoch": 0.5764624741363095, + "grad_norm": 3.063594341278076, + "learning_rate": 8.02296210178828e-06, + "loss": 1.1329, + "step": 9194 + }, + { + "epoch": 0.5765251739920998, + "grad_norm": 3.1301321983337402, + "learning_rate": 8.020971461706578e-06, + "loss": 1.119, + "step": 9195 + }, + { + "epoch": 0.5765878738478901, + "grad_norm": 3.2393243312835693, + "learning_rate": 8.018980903240103e-06, + "loss": 0.9614, + "step": 9196 + }, + { + "epoch": 0.5766505737036804, + "grad_norm": 2.816861867904663, + "learning_rate": 8.016990426470939e-06, + "loss": 1.037, + "step": 9197 + }, + { + "epoch": 0.5767132735594708, + "grad_norm": 2.911731719970703, + "learning_rate": 8.01500003148118e-06, + "loss": 1.1429, + "step": 9198 + }, + { + "epoch": 0.5767759734152611, + "grad_norm": 2.9920551776885986, + "learning_rate": 8.013009718352902e-06, + "loss": 1.131, + "step": 9199 + }, + { + "epoch": 0.5768386732710514, + "grad_norm": 3.419728994369507, + "learning_rate": 8.011019487168193e-06, + "loss": 1.0679, + "step": 9200 + }, + { + "epoch": 0.5769013731268418, + "grad_norm": 3.066898822784424, + "learning_rate": 8.009029338009124e-06, + "loss": 1.0177, + "step": 9201 + }, + { + "epoch": 0.5769640729826322, + "grad_norm": 3.507355213165283, + "learning_rate": 8.007039270957778e-06, + "loss": 0.978, + "step": 9202 + }, + { + "epoch": 0.5770267728384225, + "grad_norm": 3.0758490562438965, + "learning_rate": 8.005049286096214e-06, + "loss": 1.015, + "step": 9203 + }, + { + "epoch": 0.5770894726942128, + "grad_norm": 3.1339781284332275, + "learning_rate": 8.003059383506506e-06, + "loss": 0.9905, + "step": 9204 + }, + { + "epoch": 0.5771521725500032, + "grad_norm": 3.090510606765747, + "learning_rate": 8.001069563270715e-06, + "loss": 1.0451, + "step": 9205 + }, + { + "epoch": 0.5772148724057935, + "grad_norm": 3.211721897125244, + "learning_rate": 7.999079825470904e-06, + "loss": 1.211, + "step": 9206 + }, + { + "epoch": 0.5772775722615838, + "grad_norm": 3.1922905445098877, + "learning_rate": 7.99709017018913e-06, + "loss": 1.1681, + "step": 9207 + }, + { + "epoch": 0.5773402721173742, + "grad_norm": 3.2013275623321533, + "learning_rate": 7.995100597507443e-06, + "loss": 1.0859, + "step": 9208 + }, + { + "epoch": 0.5774029719731645, + "grad_norm": 3.111537218093872, + "learning_rate": 7.993111107507897e-06, + "loss": 1.0219, + "step": 9209 + }, + { + "epoch": 0.5774656718289548, + "grad_norm": 2.7001826763153076, + "learning_rate": 7.991121700272532e-06, + "loss": 1.0515, + "step": 9210 + }, + { + "epoch": 0.5775283716847451, + "grad_norm": 3.104937791824341, + "learning_rate": 7.9891323758834e-06, + "loss": 1.044, + "step": 9211 + }, + { + "epoch": 0.5775910715405355, + "grad_norm": 3.3788206577301025, + "learning_rate": 7.987143134422534e-06, + "loss": 0.9073, + "step": 9212 + }, + { + "epoch": 0.5776537713963258, + "grad_norm": 3.2079408168792725, + "learning_rate": 7.985153975971976e-06, + "loss": 1.08, + "step": 9213 + }, + { + "epoch": 0.5777164712521161, + "grad_norm": 3.2392542362213135, + "learning_rate": 7.983164900613753e-06, + "loss": 1.1465, + "step": 9214 + }, + { + "epoch": 0.5777791711079064, + "grad_norm": 3.472031593322754, + "learning_rate": 7.9811759084299e-06, + "loss": 1.1634, + "step": 9215 + }, + { + "epoch": 0.5778418709636968, + "grad_norm": 3.1734237670898438, + "learning_rate": 7.979186999502438e-06, + "loss": 1.0516, + "step": 9216 + }, + { + "epoch": 0.5779045708194871, + "grad_norm": 3.1952714920043945, + "learning_rate": 7.977198173913394e-06, + "loss": 1.0638, + "step": 9217 + }, + { + "epoch": 0.5779672706752774, + "grad_norm": 2.928830862045288, + "learning_rate": 7.975209431744786e-06, + "loss": 1.1973, + "step": 9218 + }, + { + "epoch": 0.5780299705310677, + "grad_norm": 2.995427131652832, + "learning_rate": 7.973220773078628e-06, + "loss": 1.2414, + "step": 9219 + }, + { + "epoch": 0.5780926703868581, + "grad_norm": 3.335965156555176, + "learning_rate": 7.971232197996936e-06, + "loss": 1.0039, + "step": 9220 + }, + { + "epoch": 0.5781553702426484, + "grad_norm": 2.895942449569702, + "learning_rate": 7.969243706581716e-06, + "loss": 1.0681, + "step": 9221 + }, + { + "epoch": 0.5782180700984387, + "grad_norm": 3.050924301147461, + "learning_rate": 7.967255298914974e-06, + "loss": 0.9997, + "step": 9222 + }, + { + "epoch": 0.5782807699542291, + "grad_norm": 2.9823780059814453, + "learning_rate": 7.965266975078711e-06, + "loss": 1.1325, + "step": 9223 + }, + { + "epoch": 0.5783434698100194, + "grad_norm": 3.1124205589294434, + "learning_rate": 7.963278735154928e-06, + "loss": 1.1089, + "step": 9224 + }, + { + "epoch": 0.5784061696658098, + "grad_norm": 3.3283891677856445, + "learning_rate": 7.96129057922562e-06, + "loss": 1.3024, + "step": 9225 + }, + { + "epoch": 0.5784688695216001, + "grad_norm": 3.1033501625061035, + "learning_rate": 7.959302507372773e-06, + "loss": 1.032, + "step": 9226 + }, + { + "epoch": 0.5785315693773905, + "grad_norm": 3.1536059379577637, + "learning_rate": 7.957314519678385e-06, + "loss": 1.0315, + "step": 9227 + }, + { + "epoch": 0.5785942692331808, + "grad_norm": 3.2902326583862305, + "learning_rate": 7.955326616224432e-06, + "loss": 1.297, + "step": 9228 + }, + { + "epoch": 0.5786569690889711, + "grad_norm": 3.383497476577759, + "learning_rate": 7.953338797092902e-06, + "loss": 1.1942, + "step": 9229 + }, + { + "epoch": 0.5787196689447615, + "grad_norm": 3.2854936122894287, + "learning_rate": 7.951351062365766e-06, + "loss": 1.0693, + "step": 9230 + }, + { + "epoch": 0.5787823688005518, + "grad_norm": 2.723355293273926, + "learning_rate": 7.949363412125005e-06, + "loss": 1.1203, + "step": 9231 + }, + { + "epoch": 0.5788450686563421, + "grad_norm": 3.085568904876709, + "learning_rate": 7.947375846452583e-06, + "loss": 0.9346, + "step": 9232 + }, + { + "epoch": 0.5789077685121324, + "grad_norm": 3.371204137802124, + "learning_rate": 7.945388365430473e-06, + "loss": 1.0016, + "step": 9233 + }, + { + "epoch": 0.5789704683679228, + "grad_norm": 2.8177740573883057, + "learning_rate": 7.943400969140635e-06, + "loss": 1.1688, + "step": 9234 + }, + { + "epoch": 0.5790331682237131, + "grad_norm": 3.020328998565674, + "learning_rate": 7.941413657665034e-06, + "loss": 1.0391, + "step": 9235 + }, + { + "epoch": 0.5790958680795034, + "grad_norm": 3.069196939468384, + "learning_rate": 7.93942643108562e-06, + "loss": 1.1352, + "step": 9236 + }, + { + "epoch": 0.5791585679352937, + "grad_norm": 3.2916007041931152, + "learning_rate": 7.93743928948435e-06, + "loss": 1.2913, + "step": 9237 + }, + { + "epoch": 0.5792212677910841, + "grad_norm": 2.850656747817993, + "learning_rate": 7.935452232943175e-06, + "loss": 1.067, + "step": 9238 + }, + { + "epoch": 0.5792839676468744, + "grad_norm": 3.3826241493225098, + "learning_rate": 7.93346526154404e-06, + "loss": 1.1092, + "step": 9239 + }, + { + "epoch": 0.5793466675026647, + "grad_norm": 3.1624538898468018, + "learning_rate": 7.93147837536889e-06, + "loss": 1.0129, + "step": 9240 + }, + { + "epoch": 0.579409367358455, + "grad_norm": 3.052596092224121, + "learning_rate": 7.929491574499659e-06, + "loss": 1.0641, + "step": 9241 + }, + { + "epoch": 0.5794720672142454, + "grad_norm": 2.9952290058135986, + "learning_rate": 7.927504859018288e-06, + "loss": 1.1638, + "step": 9242 + }, + { + "epoch": 0.5795347670700357, + "grad_norm": 2.959760904312134, + "learning_rate": 7.925518229006706e-06, + "loss": 1.0943, + "step": 9243 + }, + { + "epoch": 0.579597466925826, + "grad_norm": 3.4340193271636963, + "learning_rate": 7.923531684546843e-06, + "loss": 1.1065, + "step": 9244 + }, + { + "epoch": 0.5796601667816164, + "grad_norm": 3.3947079181671143, + "learning_rate": 7.921545225720623e-06, + "loss": 0.9912, + "step": 9245 + }, + { + "epoch": 0.5797228666374067, + "grad_norm": 3.514286756515503, + "learning_rate": 7.919558852609971e-06, + "loss": 0.9712, + "step": 9246 + }, + { + "epoch": 0.579785566493197, + "grad_norm": 3.1523687839508057, + "learning_rate": 7.917572565296801e-06, + "loss": 1.1946, + "step": 9247 + }, + { + "epoch": 0.5798482663489875, + "grad_norm": 3.267892360687256, + "learning_rate": 7.915586363863027e-06, + "loss": 1.2077, + "step": 9248 + }, + { + "epoch": 0.5799109662047778, + "grad_norm": 3.0637195110321045, + "learning_rate": 7.913600248390567e-06, + "loss": 1.0359, + "step": 9249 + }, + { + "epoch": 0.5799736660605681, + "grad_norm": 3.331958293914795, + "learning_rate": 7.911614218961321e-06, + "loss": 1.083, + "step": 9250 + }, + { + "epoch": 0.5800363659163584, + "grad_norm": 3.4058210849761963, + "learning_rate": 7.909628275657199e-06, + "loss": 1.0714, + "step": 9251 + }, + { + "epoch": 0.5800990657721488, + "grad_norm": 2.7031772136688232, + "learning_rate": 7.907642418560094e-06, + "loss": 1.2748, + "step": 9252 + }, + { + "epoch": 0.5801617656279391, + "grad_norm": 3.350764513015747, + "learning_rate": 7.90565664775191e-06, + "loss": 1.3154, + "step": 9253 + }, + { + "epoch": 0.5802244654837294, + "grad_norm": 2.753588914871216, + "learning_rate": 7.903670963314536e-06, + "loss": 1.1869, + "step": 9254 + }, + { + "epoch": 0.5802871653395197, + "grad_norm": 3.2187321186065674, + "learning_rate": 7.901685365329865e-06, + "loss": 0.9966, + "step": 9255 + }, + { + "epoch": 0.5803498651953101, + "grad_norm": 2.822269916534424, + "learning_rate": 7.89969985387978e-06, + "loss": 1.2037, + "step": 9256 + }, + { + "epoch": 0.5804125650511004, + "grad_norm": 3.153428077697754, + "learning_rate": 7.897714429046161e-06, + "loss": 1.0042, + "step": 9257 + }, + { + "epoch": 0.5804752649068907, + "grad_norm": 3.1861727237701416, + "learning_rate": 7.895729090910895e-06, + "loss": 1.1191, + "step": 9258 + }, + { + "epoch": 0.580537964762681, + "grad_norm": 3.064425468444824, + "learning_rate": 7.893743839555852e-06, + "loss": 1.1195, + "step": 9259 + }, + { + "epoch": 0.5806006646184714, + "grad_norm": 3.131957530975342, + "learning_rate": 7.891758675062905e-06, + "loss": 1.0323, + "step": 9260 + }, + { + "epoch": 0.5806633644742617, + "grad_norm": 3.2556495666503906, + "learning_rate": 7.889773597513924e-06, + "loss": 0.9824, + "step": 9261 + }, + { + "epoch": 0.580726064330052, + "grad_norm": 3.0998826026916504, + "learning_rate": 7.88778860699077e-06, + "loss": 1.1478, + "step": 9262 + }, + { + "epoch": 0.5807887641858424, + "grad_norm": 3.1339833736419678, + "learning_rate": 7.885803703575307e-06, + "loss": 1.0899, + "step": 9263 + }, + { + "epoch": 0.5808514640416327, + "grad_norm": 3.1828160285949707, + "learning_rate": 7.883818887349391e-06, + "loss": 1.2669, + "step": 9264 + }, + { + "epoch": 0.580914163897423, + "grad_norm": 3.258883476257324, + "learning_rate": 7.881834158394876e-06, + "loss": 1.0601, + "step": 9265 + }, + { + "epoch": 0.5809768637532133, + "grad_norm": 3.2327864170074463, + "learning_rate": 7.879849516793615e-06, + "loss": 1.039, + "step": 9266 + }, + { + "epoch": 0.5810395636090037, + "grad_norm": 3.5284767150878906, + "learning_rate": 7.87786496262745e-06, + "loss": 1.093, + "step": 9267 + }, + { + "epoch": 0.581102263464794, + "grad_norm": 3.4243452548980713, + "learning_rate": 7.875880495978227e-06, + "loss": 1.0197, + "step": 9268 + }, + { + "epoch": 0.5811649633205843, + "grad_norm": 2.9603254795074463, + "learning_rate": 7.873896116927787e-06, + "loss": 1.1638, + "step": 9269 + }, + { + "epoch": 0.5812276631763746, + "grad_norm": 3.169499635696411, + "learning_rate": 7.871911825557962e-06, + "loss": 1.0166, + "step": 9270 + }, + { + "epoch": 0.5812903630321651, + "grad_norm": 2.722656488418579, + "learning_rate": 7.869927621950589e-06, + "loss": 1.1495, + "step": 9271 + }, + { + "epoch": 0.5813530628879554, + "grad_norm": 2.8789665699005127, + "learning_rate": 7.86794350618749e-06, + "loss": 1.1963, + "step": 9272 + }, + { + "epoch": 0.5814157627437457, + "grad_norm": 3.202575445175171, + "learning_rate": 7.865959478350497e-06, + "loss": 1.1674, + "step": 9273 + }, + { + "epoch": 0.5814784625995361, + "grad_norm": 3.0028114318847656, + "learning_rate": 7.863975538521426e-06, + "loss": 1.1623, + "step": 9274 + }, + { + "epoch": 0.5815411624553264, + "grad_norm": 3.0696234703063965, + "learning_rate": 7.861991686782099e-06, + "loss": 1.0243, + "step": 9275 + }, + { + "epoch": 0.5816038623111167, + "grad_norm": 3.186389923095703, + "learning_rate": 7.860007923214327e-06, + "loss": 1.1194, + "step": 9276 + }, + { + "epoch": 0.581666562166907, + "grad_norm": 3.1897168159484863, + "learning_rate": 7.85802424789992e-06, + "loss": 1.2007, + "step": 9277 + }, + { + "epoch": 0.5817292620226974, + "grad_norm": 3.5164759159088135, + "learning_rate": 7.856040660920691e-06, + "loss": 1.1853, + "step": 9278 + }, + { + "epoch": 0.5817919618784877, + "grad_norm": 3.306889057159424, + "learning_rate": 7.854057162358436e-06, + "loss": 1.0005, + "step": 9279 + }, + { + "epoch": 0.581854661734278, + "grad_norm": 2.914863109588623, + "learning_rate": 7.852073752294958e-06, + "loss": 1.055, + "step": 9280 + }, + { + "epoch": 0.5819173615900683, + "grad_norm": 3.021749973297119, + "learning_rate": 7.85009043081205e-06, + "loss": 1.1404, + "step": 9281 + }, + { + "epoch": 0.5819800614458587, + "grad_norm": 3.3646717071533203, + "learning_rate": 7.84810719799151e-06, + "loss": 1.0662, + "step": 9282 + }, + { + "epoch": 0.582042761301649, + "grad_norm": 3.315073013305664, + "learning_rate": 7.846124053915123e-06, + "loss": 1.2589, + "step": 9283 + }, + { + "epoch": 0.5821054611574393, + "grad_norm": 2.9751334190368652, + "learning_rate": 7.844140998664672e-06, + "loss": 1.1545, + "step": 9284 + }, + { + "epoch": 0.5821681610132297, + "grad_norm": 3.3733513355255127, + "learning_rate": 7.84215803232194e-06, + "loss": 1.2106, + "step": 9285 + }, + { + "epoch": 0.58223086086902, + "grad_norm": 2.9616103172302246, + "learning_rate": 7.840175154968708e-06, + "loss": 1.0327, + "step": 9286 + }, + { + "epoch": 0.5822935607248103, + "grad_norm": 2.8382935523986816, + "learning_rate": 7.838192366686743e-06, + "loss": 1.122, + "step": 9287 + }, + { + "epoch": 0.5823562605806006, + "grad_norm": 3.253330707550049, + "learning_rate": 7.836209667557822e-06, + "loss": 1.089, + "step": 9288 + }, + { + "epoch": 0.582418960436391, + "grad_norm": 3.090172052383423, + "learning_rate": 7.834227057663708e-06, + "loss": 1.1031, + "step": 9289 + }, + { + "epoch": 0.5824816602921813, + "grad_norm": 3.2088868618011475, + "learning_rate": 7.832244537086165e-06, + "loss": 0.8626, + "step": 9290 + }, + { + "epoch": 0.5825443601479716, + "grad_norm": 3.322328805923462, + "learning_rate": 7.830262105906952e-06, + "loss": 1.0128, + "step": 9291 + }, + { + "epoch": 0.582607060003762, + "grad_norm": 2.90682053565979, + "learning_rate": 7.828279764207823e-06, + "loss": 1.1232, + "step": 9292 + }, + { + "epoch": 0.5826697598595523, + "grad_norm": 2.976865291595459, + "learning_rate": 7.826297512070533e-06, + "loss": 1.0538, + "step": 9293 + }, + { + "epoch": 0.5827324597153427, + "grad_norm": 3.541684627532959, + "learning_rate": 7.824315349576828e-06, + "loss": 1.2372, + "step": 9294 + }, + { + "epoch": 0.582795159571133, + "grad_norm": 3.331254005432129, + "learning_rate": 7.822333276808454e-06, + "loss": 1.0679, + "step": 9295 + }, + { + "epoch": 0.5828578594269234, + "grad_norm": 3.0909528732299805, + "learning_rate": 7.820351293847149e-06, + "loss": 1.1052, + "step": 9296 + }, + { + "epoch": 0.5829205592827137, + "grad_norm": 3.2160487174987793, + "learning_rate": 7.818369400774653e-06, + "loss": 0.9677, + "step": 9297 + }, + { + "epoch": 0.582983259138504, + "grad_norm": 2.94105863571167, + "learning_rate": 7.816387597672698e-06, + "loss": 1.1321, + "step": 9298 + }, + { + "epoch": 0.5830459589942943, + "grad_norm": 3.046083688735962, + "learning_rate": 7.814405884623012e-06, + "loss": 1.1929, + "step": 9299 + }, + { + "epoch": 0.5831086588500847, + "grad_norm": 3.75429630279541, + "learning_rate": 7.812424261707325e-06, + "loss": 1.1668, + "step": 9300 + }, + { + "epoch": 0.583171358705875, + "grad_norm": 3.1936209201812744, + "learning_rate": 7.810442729007357e-06, + "loss": 1.1348, + "step": 9301 + }, + { + "epoch": 0.5832340585616653, + "grad_norm": 2.790426731109619, + "learning_rate": 7.808461286604828e-06, + "loss": 1.288, + "step": 9302 + }, + { + "epoch": 0.5832967584174557, + "grad_norm": 3.159344434738159, + "learning_rate": 7.806479934581447e-06, + "loss": 1.1011, + "step": 9303 + }, + { + "epoch": 0.583359458273246, + "grad_norm": 3.27064847946167, + "learning_rate": 7.804498673018931e-06, + "loss": 1.2354, + "step": 9304 + }, + { + "epoch": 0.5834221581290363, + "grad_norm": 2.8429665565490723, + "learning_rate": 7.802517501998986e-06, + "loss": 1.0283, + "step": 9305 + }, + { + "epoch": 0.5834848579848266, + "grad_norm": 2.792078733444214, + "learning_rate": 7.800536421603317e-06, + "loss": 1.1291, + "step": 9306 + }, + { + "epoch": 0.583547557840617, + "grad_norm": 2.939836263656616, + "learning_rate": 7.79855543191362e-06, + "loss": 1.1155, + "step": 9307 + }, + { + "epoch": 0.5836102576964073, + "grad_norm": 3.285845994949341, + "learning_rate": 7.796574533011592e-06, + "loss": 0.9365, + "step": 9308 + }, + { + "epoch": 0.5836729575521976, + "grad_norm": 3.2356953620910645, + "learning_rate": 7.794593724978929e-06, + "loss": 1.2491, + "step": 9309 + }, + { + "epoch": 0.5837356574079879, + "grad_norm": 3.1376302242279053, + "learning_rate": 7.792613007897315e-06, + "loss": 1.1665, + "step": 9310 + }, + { + "epoch": 0.5837983572637783, + "grad_norm": 3.3511970043182373, + "learning_rate": 7.790632381848438e-06, + "loss": 0.9823, + "step": 9311 + }, + { + "epoch": 0.5838610571195686, + "grad_norm": 3.474360466003418, + "learning_rate": 7.788651846913978e-06, + "loss": 1.0149, + "step": 9312 + }, + { + "epoch": 0.5839237569753589, + "grad_norm": 3.4437966346740723, + "learning_rate": 7.786671403175613e-06, + "loss": 0.9151, + "step": 9313 + }, + { + "epoch": 0.5839864568311492, + "grad_norm": 3.1672685146331787, + "learning_rate": 7.784691050715014e-06, + "loss": 1.1591, + "step": 9314 + }, + { + "epoch": 0.5840491566869396, + "grad_norm": 3.3848674297332764, + "learning_rate": 7.782710789613856e-06, + "loss": 1.1311, + "step": 9315 + }, + { + "epoch": 0.5841118565427299, + "grad_norm": 3.108793020248413, + "learning_rate": 7.780730619953799e-06, + "loss": 1.1178, + "step": 9316 + }, + { + "epoch": 0.5841745563985202, + "grad_norm": 3.0838875770568848, + "learning_rate": 7.77875054181651e-06, + "loss": 0.9301, + "step": 9317 + }, + { + "epoch": 0.5842372562543107, + "grad_norm": 2.9813482761383057, + "learning_rate": 7.776770555283644e-06, + "loss": 1.0781, + "step": 9318 + }, + { + "epoch": 0.584299956110101, + "grad_norm": 2.812957763671875, + "learning_rate": 7.774790660436857e-06, + "loss": 1.1363, + "step": 9319 + }, + { + "epoch": 0.5843626559658913, + "grad_norm": 3.2938427925109863, + "learning_rate": 7.772810857357803e-06, + "loss": 1.1913, + "step": 9320 + }, + { + "epoch": 0.5844253558216816, + "grad_norm": 3.2493629455566406, + "learning_rate": 7.770831146128123e-06, + "loss": 1.0168, + "step": 9321 + }, + { + "epoch": 0.584488055677472, + "grad_norm": 3.3401873111724854, + "learning_rate": 7.768851526829469e-06, + "loss": 1.0965, + "step": 9322 + }, + { + "epoch": 0.5845507555332623, + "grad_norm": 3.3079264163970947, + "learning_rate": 7.76687199954347e-06, + "loss": 1.0723, + "step": 9323 + }, + { + "epoch": 0.5846134553890526, + "grad_norm": 3.0382421016693115, + "learning_rate": 7.764892564351772e-06, + "loss": 0.8378, + "step": 9324 + }, + { + "epoch": 0.584676155244843, + "grad_norm": 3.608715534210205, + "learning_rate": 7.762913221335998e-06, + "loss": 1.1793, + "step": 9325 + }, + { + "epoch": 0.5847388551006333, + "grad_norm": 2.9342894554138184, + "learning_rate": 7.760933970577784e-06, + "loss": 1.1509, + "step": 9326 + }, + { + "epoch": 0.5848015549564236, + "grad_norm": 3.0836880207061768, + "learning_rate": 7.758954812158748e-06, + "loss": 1.0498, + "step": 9327 + }, + { + "epoch": 0.5848642548122139, + "grad_norm": 2.9347119331359863, + "learning_rate": 7.756975746160516e-06, + "loss": 1.1213, + "step": 9328 + }, + { + "epoch": 0.5849269546680043, + "grad_norm": 3.5245068073272705, + "learning_rate": 7.7549967726647e-06, + "loss": 1.0815, + "step": 9329 + }, + { + "epoch": 0.5849896545237946, + "grad_norm": 3.216698408126831, + "learning_rate": 7.753017891752914e-06, + "loss": 1.1936, + "step": 9330 + }, + { + "epoch": 0.5850523543795849, + "grad_norm": 2.954756259918213, + "learning_rate": 7.751039103506771e-06, + "loss": 1.0992, + "step": 9331 + }, + { + "epoch": 0.5851150542353752, + "grad_norm": 2.7199666500091553, + "learning_rate": 7.749060408007874e-06, + "loss": 1.1931, + "step": 9332 + }, + { + "epoch": 0.5851777540911656, + "grad_norm": 3.086618661880493, + "learning_rate": 7.747081805337822e-06, + "loss": 1.0973, + "step": 9333 + }, + { + "epoch": 0.5852404539469559, + "grad_norm": 3.117952346801758, + "learning_rate": 7.745103295578216e-06, + "loss": 1.2004, + "step": 9334 + }, + { + "epoch": 0.5853031538027462, + "grad_norm": 3.3509774208068848, + "learning_rate": 7.743124878810648e-06, + "loss": 1.1341, + "step": 9335 + }, + { + "epoch": 0.5853658536585366, + "grad_norm": 2.968198299407959, + "learning_rate": 7.741146555116708e-06, + "loss": 1.0697, + "step": 9336 + }, + { + "epoch": 0.5854285535143269, + "grad_norm": 3.1786253452301025, + "learning_rate": 7.739168324577985e-06, + "loss": 1.0515, + "step": 9337 + }, + { + "epoch": 0.5854912533701172, + "grad_norm": 3.3778738975524902, + "learning_rate": 7.737190187276058e-06, + "loss": 1.1652, + "step": 9338 + }, + { + "epoch": 0.5855539532259075, + "grad_norm": 3.4040963649749756, + "learning_rate": 7.735212143292504e-06, + "loss": 1.1976, + "step": 9339 + }, + { + "epoch": 0.5856166530816979, + "grad_norm": 3.064493179321289, + "learning_rate": 7.733234192708903e-06, + "loss": 0.9676, + "step": 9340 + }, + { + "epoch": 0.5856793529374883, + "grad_norm": 3.139631986618042, + "learning_rate": 7.73125633560682e-06, + "loss": 1.1093, + "step": 9341 + }, + { + "epoch": 0.5857420527932786, + "grad_norm": 3.566124200820923, + "learning_rate": 7.729278572067828e-06, + "loss": 1.1996, + "step": 9342 + }, + { + "epoch": 0.585804752649069, + "grad_norm": 3.36283016204834, + "learning_rate": 7.727300902173486e-06, + "loss": 1.1114, + "step": 9343 + }, + { + "epoch": 0.5858674525048593, + "grad_norm": 3.1164584159851074, + "learning_rate": 7.725323326005355e-06, + "loss": 1.1384, + "step": 9344 + }, + { + "epoch": 0.5859301523606496, + "grad_norm": 3.1023707389831543, + "learning_rate": 7.723345843644986e-06, + "loss": 1.0961, + "step": 9345 + }, + { + "epoch": 0.5859928522164399, + "grad_norm": 3.1813478469848633, + "learning_rate": 7.721368455173938e-06, + "loss": 1.0645, + "step": 9346 + }, + { + "epoch": 0.5860555520722303, + "grad_norm": 3.26884126663208, + "learning_rate": 7.71939116067375e-06, + "loss": 1.1035, + "step": 9347 + }, + { + "epoch": 0.5861182519280206, + "grad_norm": 2.7926578521728516, + "learning_rate": 7.717413960225974e-06, + "loss": 1.1165, + "step": 9348 + }, + { + "epoch": 0.5861809517838109, + "grad_norm": 3.152285575866699, + "learning_rate": 7.715436853912143e-06, + "loss": 1.0435, + "step": 9349 + }, + { + "epoch": 0.5862436516396012, + "grad_norm": 3.093858480453491, + "learning_rate": 7.713459841813797e-06, + "loss": 1.1192, + "step": 9350 + }, + { + "epoch": 0.5863063514953916, + "grad_norm": 3.095203399658203, + "learning_rate": 7.711482924012468e-06, + "loss": 1.1061, + "step": 9351 + }, + { + "epoch": 0.5863690513511819, + "grad_norm": 3.1737828254699707, + "learning_rate": 7.709506100589682e-06, + "loss": 1.0283, + "step": 9352 + }, + { + "epoch": 0.5864317512069722, + "grad_norm": 2.7149064540863037, + "learning_rate": 7.707529371626966e-06, + "loss": 1.1542, + "step": 9353 + }, + { + "epoch": 0.5864944510627625, + "grad_norm": 3.0081441402435303, + "learning_rate": 7.705552737205835e-06, + "loss": 1.1122, + "step": 9354 + }, + { + "epoch": 0.5865571509185529, + "grad_norm": 3.395432710647583, + "learning_rate": 7.703576197407814e-06, + "loss": 1.2551, + "step": 9355 + }, + { + "epoch": 0.5866198507743432, + "grad_norm": 3.1646134853363037, + "learning_rate": 7.701599752314408e-06, + "loss": 1.0949, + "step": 9356 + }, + { + "epoch": 0.5866825506301335, + "grad_norm": 3.290724277496338, + "learning_rate": 7.69962340200713e-06, + "loss": 1.0709, + "step": 9357 + }, + { + "epoch": 0.5867452504859239, + "grad_norm": 2.93679141998291, + "learning_rate": 7.697647146567484e-06, + "loss": 1.1083, + "step": 9358 + }, + { + "epoch": 0.5868079503417142, + "grad_norm": 3.308690071105957, + "learning_rate": 7.695670986076966e-06, + "loss": 1.2295, + "step": 9359 + }, + { + "epoch": 0.5868706501975045, + "grad_norm": 2.7963156700134277, + "learning_rate": 7.693694920617082e-06, + "loss": 1.2464, + "step": 9360 + }, + { + "epoch": 0.5869333500532948, + "grad_norm": 3.3062775135040283, + "learning_rate": 7.691718950269318e-06, + "loss": 1.1079, + "step": 9361 + }, + { + "epoch": 0.5869960499090852, + "grad_norm": 2.982009172439575, + "learning_rate": 7.689743075115166e-06, + "loss": 1.0882, + "step": 9362 + }, + { + "epoch": 0.5870587497648755, + "grad_norm": 3.4935834407806396, + "learning_rate": 7.68776729523611e-06, + "loss": 1.0368, + "step": 9363 + }, + { + "epoch": 0.5871214496206659, + "grad_norm": 3.222928285598755, + "learning_rate": 7.685791610713634e-06, + "loss": 1.0607, + "step": 9364 + }, + { + "epoch": 0.5871841494764563, + "grad_norm": 2.7770726680755615, + "learning_rate": 7.68381602162921e-06, + "loss": 1.0264, + "step": 9365 + }, + { + "epoch": 0.5872468493322466, + "grad_norm": 3.2097768783569336, + "learning_rate": 7.681840528064315e-06, + "loss": 1.1769, + "step": 9366 + }, + { + "epoch": 0.5873095491880369, + "grad_norm": 3.101135730743408, + "learning_rate": 7.679865130100417e-06, + "loss": 1.1181, + "step": 9367 + }, + { + "epoch": 0.5873722490438272, + "grad_norm": 3.114138603210449, + "learning_rate": 7.677889827818985e-06, + "loss": 1.2681, + "step": 9368 + }, + { + "epoch": 0.5874349488996176, + "grad_norm": 2.95212721824646, + "learning_rate": 7.675914621301476e-06, + "loss": 1.076, + "step": 9369 + }, + { + "epoch": 0.5874976487554079, + "grad_norm": 3.1988816261291504, + "learning_rate": 7.67393951062935e-06, + "loss": 1.23, + "step": 9370 + }, + { + "epoch": 0.5875603486111982, + "grad_norm": 3.201629400253296, + "learning_rate": 7.67196449588406e-06, + "loss": 1.095, + "step": 9371 + }, + { + "epoch": 0.5876230484669885, + "grad_norm": 3.3643205165863037, + "learning_rate": 7.669989577147055e-06, + "loss": 1.0522, + "step": 9372 + }, + { + "epoch": 0.5876857483227789, + "grad_norm": 3.1524133682250977, + "learning_rate": 7.668014754499783e-06, + "loss": 1.0358, + "step": 9373 + }, + { + "epoch": 0.5877484481785692, + "grad_norm": 3.048224449157715, + "learning_rate": 7.666040028023684e-06, + "loss": 1.1411, + "step": 9374 + }, + { + "epoch": 0.5878111480343595, + "grad_norm": 3.2324557304382324, + "learning_rate": 7.664065397800197e-06, + "loss": 0.9464, + "step": 9375 + }, + { + "epoch": 0.5878738478901498, + "grad_norm": 3.570685863494873, + "learning_rate": 7.662090863910753e-06, + "loss": 1.1808, + "step": 9376 + }, + { + "epoch": 0.5879365477459402, + "grad_norm": 3.1918656826019287, + "learning_rate": 7.660116426436786e-06, + "loss": 1.0977, + "step": 9377 + }, + { + "epoch": 0.5879992476017305, + "grad_norm": 3.1262638568878174, + "learning_rate": 7.658142085459717e-06, + "loss": 1.2146, + "step": 9378 + }, + { + "epoch": 0.5880619474575208, + "grad_norm": 3.4561758041381836, + "learning_rate": 7.656167841060975e-06, + "loss": 1.1706, + "step": 9379 + }, + { + "epoch": 0.5881246473133112, + "grad_norm": 3.257863998413086, + "learning_rate": 7.65419369332197e-06, + "loss": 1.1412, + "step": 9380 + }, + { + "epoch": 0.5881873471691015, + "grad_norm": 3.397104263305664, + "learning_rate": 7.65221964232412e-06, + "loss": 0.9556, + "step": 9381 + }, + { + "epoch": 0.5882500470248918, + "grad_norm": 3.1010689735412598, + "learning_rate": 7.650245688148834e-06, + "loss": 1.2724, + "step": 9382 + }, + { + "epoch": 0.5883127468806821, + "grad_norm": 3.179842472076416, + "learning_rate": 7.64827183087752e-06, + "loss": 1.0967, + "step": 9383 + }, + { + "epoch": 0.5883754467364725, + "grad_norm": 3.09809947013855, + "learning_rate": 7.646298070591578e-06, + "loss": 0.9927, + "step": 9384 + }, + { + "epoch": 0.5884381465922628, + "grad_norm": 2.951853036880493, + "learning_rate": 7.644324407372405e-06, + "loss": 1.0044, + "step": 9385 + }, + { + "epoch": 0.5885008464480531, + "grad_norm": 2.972348690032959, + "learning_rate": 7.642350841301399e-06, + "loss": 1.0452, + "step": 9386 + }, + { + "epoch": 0.5885635463038436, + "grad_norm": 3.5493369102478027, + "learning_rate": 7.640377372459944e-06, + "loss": 1.1518, + "step": 9387 + }, + { + "epoch": 0.5886262461596339, + "grad_norm": 3.293353319168091, + "learning_rate": 7.638404000929433e-06, + "loss": 1.2409, + "step": 9388 + }, + { + "epoch": 0.5886889460154242, + "grad_norm": 3.1890809535980225, + "learning_rate": 7.63643072679124e-06, + "loss": 1.3049, + "step": 9389 + }, + { + "epoch": 0.5887516458712145, + "grad_norm": 3.1234946250915527, + "learning_rate": 7.634457550126749e-06, + "loss": 0.9915, + "step": 9390 + }, + { + "epoch": 0.5888143457270049, + "grad_norm": 3.003619909286499, + "learning_rate": 7.632484471017334e-06, + "loss": 1.165, + "step": 9391 + }, + { + "epoch": 0.5888770455827952, + "grad_norm": 3.3781683444976807, + "learning_rate": 7.630511489544358e-06, + "loss": 1.0962, + "step": 9392 + }, + { + "epoch": 0.5889397454385855, + "grad_norm": 2.982455015182495, + "learning_rate": 7.628538605789198e-06, + "loss": 1.14, + "step": 9393 + }, + { + "epoch": 0.5890024452943758, + "grad_norm": 2.827732563018799, + "learning_rate": 7.626565819833204e-06, + "loss": 1.1184, + "step": 9394 + }, + { + "epoch": 0.5890651451501662, + "grad_norm": 3.488388776779175, + "learning_rate": 7.6245931317577425e-06, + "loss": 0.9782, + "step": 9395 + }, + { + "epoch": 0.5891278450059565, + "grad_norm": 2.9758682250976562, + "learning_rate": 7.622620541644161e-06, + "loss": 1.0277, + "step": 9396 + }, + { + "epoch": 0.5891905448617468, + "grad_norm": 3.421056032180786, + "learning_rate": 7.620648049573815e-06, + "loss": 0.9127, + "step": 9397 + }, + { + "epoch": 0.5892532447175372, + "grad_norm": 3.2731852531433105, + "learning_rate": 7.618675655628046e-06, + "loss": 1.1353, + "step": 9398 + }, + { + "epoch": 0.5893159445733275, + "grad_norm": 3.3488805294036865, + "learning_rate": 7.616703359888198e-06, + "loss": 1.0812, + "step": 9399 + }, + { + "epoch": 0.5893786444291178, + "grad_norm": 3.801429271697998, + "learning_rate": 7.614731162435606e-06, + "loss": 1.0634, + "step": 9400 + }, + { + "epoch": 0.5894413442849081, + "grad_norm": 2.9645819664001465, + "learning_rate": 7.612759063351605e-06, + "loss": 1.2464, + "step": 9401 + }, + { + "epoch": 0.5895040441406985, + "grad_norm": 3.7340524196624756, + "learning_rate": 7.610787062717527e-06, + "loss": 0.9982, + "step": 9402 + }, + { + "epoch": 0.5895667439964888, + "grad_norm": 3.2053418159484863, + "learning_rate": 7.608815160614691e-06, + "loss": 1.1689, + "step": 9403 + }, + { + "epoch": 0.5896294438522791, + "grad_norm": 3.4604156017303467, + "learning_rate": 7.606843357124426e-06, + "loss": 1.1003, + "step": 9404 + }, + { + "epoch": 0.5896921437080694, + "grad_norm": 3.498927116394043, + "learning_rate": 7.604871652328043e-06, + "loss": 1.0071, + "step": 9405 + }, + { + "epoch": 0.5897548435638598, + "grad_norm": 3.4001033306121826, + "learning_rate": 7.602900046306859e-06, + "loss": 1.1654, + "step": 9406 + }, + { + "epoch": 0.5898175434196501, + "grad_norm": 3.440201997756958, + "learning_rate": 7.60092853914218e-06, + "loss": 1.0409, + "step": 9407 + }, + { + "epoch": 0.5898802432754404, + "grad_norm": 3.1825382709503174, + "learning_rate": 7.598957130915315e-06, + "loss": 1.0382, + "step": 9408 + }, + { + "epoch": 0.5899429431312307, + "grad_norm": 3.1867146492004395, + "learning_rate": 7.596985821707561e-06, + "loss": 1.1614, + "step": 9409 + }, + { + "epoch": 0.5900056429870212, + "grad_norm": 3.3243472576141357, + "learning_rate": 7.5950146116002184e-06, + "loss": 1.1502, + "step": 9410 + }, + { + "epoch": 0.5900683428428115, + "grad_norm": 3.0510571002960205, + "learning_rate": 7.593043500674576e-06, + "loss": 1.1466, + "step": 9411 + }, + { + "epoch": 0.5901310426986018, + "grad_norm": 3.514071464538574, + "learning_rate": 7.591072489011926e-06, + "loss": 0.9277, + "step": 9412 + }, + { + "epoch": 0.5901937425543922, + "grad_norm": 3.199263095855713, + "learning_rate": 7.589101576693553e-06, + "loss": 1.2553, + "step": 9413 + }, + { + "epoch": 0.5902564424101825, + "grad_norm": 3.1271331310272217, + "learning_rate": 7.587130763800733e-06, + "loss": 1.2028, + "step": 9414 + }, + { + "epoch": 0.5903191422659728, + "grad_norm": 3.298448324203491, + "learning_rate": 7.585160050414749e-06, + "loss": 1.1992, + "step": 9415 + }, + { + "epoch": 0.5903818421217631, + "grad_norm": 3.2607226371765137, + "learning_rate": 7.583189436616868e-06, + "loss": 0.9048, + "step": 9416 + }, + { + "epoch": 0.5904445419775535, + "grad_norm": 2.8519842624664307, + "learning_rate": 7.581218922488361e-06, + "loss": 1.1514, + "step": 9417 + }, + { + "epoch": 0.5905072418333438, + "grad_norm": 2.9238970279693604, + "learning_rate": 7.579248508110491e-06, + "loss": 1.1847, + "step": 9418 + }, + { + "epoch": 0.5905699416891341, + "grad_norm": 3.517302989959717, + "learning_rate": 7.577278193564519e-06, + "loss": 0.9335, + "step": 9419 + }, + { + "epoch": 0.5906326415449245, + "grad_norm": 3.1905667781829834, + "learning_rate": 7.575307978931698e-06, + "loss": 1.2466, + "step": 9420 + }, + { + "epoch": 0.5906953414007148, + "grad_norm": 3.057640790939331, + "learning_rate": 7.573337864293283e-06, + "loss": 1.1201, + "step": 9421 + }, + { + "epoch": 0.5907580412565051, + "grad_norm": 3.055663824081421, + "learning_rate": 7.5713678497305225e-06, + "loss": 1.0902, + "step": 9422 + }, + { + "epoch": 0.5908207411122954, + "grad_norm": 3.241828680038452, + "learning_rate": 7.569397935324653e-06, + "loss": 1.0834, + "step": 9423 + }, + { + "epoch": 0.5908834409680858, + "grad_norm": 3.0397677421569824, + "learning_rate": 7.567428121156921e-06, + "loss": 1.1355, + "step": 9424 + }, + { + "epoch": 0.5909461408238761, + "grad_norm": 3.370253801345825, + "learning_rate": 7.565458407308562e-06, + "loss": 1.0346, + "step": 9425 + }, + { + "epoch": 0.5910088406796664, + "grad_norm": 3.7058231830596924, + "learning_rate": 7.5634887938608015e-06, + "loss": 1.0269, + "step": 9426 + }, + { + "epoch": 0.5910715405354567, + "grad_norm": 3.3171885013580322, + "learning_rate": 7.5615192808948725e-06, + "loss": 1.0132, + "step": 9427 + }, + { + "epoch": 0.5911342403912471, + "grad_norm": 3.2870547771453857, + "learning_rate": 7.559549868491992e-06, + "loss": 1.0604, + "step": 9428 + }, + { + "epoch": 0.5911969402470374, + "grad_norm": 2.9970548152923584, + "learning_rate": 7.557580556733384e-06, + "loss": 1.1265, + "step": 9429 + }, + { + "epoch": 0.5912596401028277, + "grad_norm": 3.183659315109253, + "learning_rate": 7.5556113457002575e-06, + "loss": 1.1323, + "step": 9430 + }, + { + "epoch": 0.591322339958618, + "grad_norm": 3.1633999347686768, + "learning_rate": 7.5536422354738285e-06, + "loss": 1.0739, + "step": 9431 + }, + { + "epoch": 0.5913850398144084, + "grad_norm": 3.302969217300415, + "learning_rate": 7.551673226135301e-06, + "loss": 1.0176, + "step": 9432 + }, + { + "epoch": 0.5914477396701988, + "grad_norm": 3.1206159591674805, + "learning_rate": 7.549704317765874e-06, + "loss": 1.0772, + "step": 9433 + }, + { + "epoch": 0.5915104395259891, + "grad_norm": 3.25453782081604, + "learning_rate": 7.54773551044675e-06, + "loss": 1.2902, + "step": 9434 + }, + { + "epoch": 0.5915731393817795, + "grad_norm": 2.8394010066986084, + "learning_rate": 7.545766804259119e-06, + "loss": 1.1556, + "step": 9435 + }, + { + "epoch": 0.5916358392375698, + "grad_norm": 3.169027090072632, + "learning_rate": 7.543798199284175e-06, + "loss": 1.0706, + "step": 9436 + }, + { + "epoch": 0.5916985390933601, + "grad_norm": 2.739499568939209, + "learning_rate": 7.541829695603098e-06, + "loss": 1.1315, + "step": 9437 + }, + { + "epoch": 0.5917612389491504, + "grad_norm": 3.0828683376312256, + "learning_rate": 7.539861293297073e-06, + "loss": 1.2748, + "step": 9438 + }, + { + "epoch": 0.5918239388049408, + "grad_norm": 3.0022034645080566, + "learning_rate": 7.5378929924472735e-06, + "loss": 0.8858, + "step": 9439 + }, + { + "epoch": 0.5918866386607311, + "grad_norm": 3.219133138656616, + "learning_rate": 7.535924793134875e-06, + "loss": 1.1446, + "step": 9440 + }, + { + "epoch": 0.5919493385165214, + "grad_norm": 3.208791732788086, + "learning_rate": 7.533956695441048e-06, + "loss": 1.0949, + "step": 9441 + }, + { + "epoch": 0.5920120383723118, + "grad_norm": 3.1403422355651855, + "learning_rate": 7.531988699446953e-06, + "loss": 1.08, + "step": 9442 + }, + { + "epoch": 0.5920747382281021, + "grad_norm": 3.1423542499542236, + "learning_rate": 7.530020805233753e-06, + "loss": 1.0184, + "step": 9443 + }, + { + "epoch": 0.5921374380838924, + "grad_norm": 2.8686821460723877, + "learning_rate": 7.528053012882601e-06, + "loss": 1.2405, + "step": 9444 + }, + { + "epoch": 0.5922001379396827, + "grad_norm": 3.0664336681365967, + "learning_rate": 7.5260853224746524e-06, + "loss": 1.2066, + "step": 9445 + }, + { + "epoch": 0.5922628377954731, + "grad_norm": 3.4222664833068848, + "learning_rate": 7.524117734091052e-06, + "loss": 1.0193, + "step": 9446 + }, + { + "epoch": 0.5923255376512634, + "grad_norm": 3.3027799129486084, + "learning_rate": 7.522150247812946e-06, + "loss": 0.978, + "step": 9447 + }, + { + "epoch": 0.5923882375070537, + "grad_norm": 3.0041348934173584, + "learning_rate": 7.52018286372147e-06, + "loss": 1.0252, + "step": 9448 + }, + { + "epoch": 0.592450937362844, + "grad_norm": 3.44549822807312, + "learning_rate": 7.518215581897763e-06, + "loss": 1.2145, + "step": 9449 + }, + { + "epoch": 0.5925136372186344, + "grad_norm": 3.265815496444702, + "learning_rate": 7.516248402422951e-06, + "loss": 1.0047, + "step": 9450 + }, + { + "epoch": 0.5925763370744247, + "grad_norm": 3.1302406787872314, + "learning_rate": 7.514281325378163e-06, + "loss": 1.0572, + "step": 9451 + }, + { + "epoch": 0.592639036930215, + "grad_norm": 2.8873023986816406, + "learning_rate": 7.512314350844525e-06, + "loss": 1.1099, + "step": 9452 + }, + { + "epoch": 0.5927017367860054, + "grad_norm": 3.120500087738037, + "learning_rate": 7.51034747890315e-06, + "loss": 1.1565, + "step": 9453 + }, + { + "epoch": 0.5927644366417957, + "grad_norm": 3.5233380794525146, + "learning_rate": 7.508380709635155e-06, + "loss": 0.9575, + "step": 9454 + }, + { + "epoch": 0.592827136497586, + "grad_norm": 3.061199188232422, + "learning_rate": 7.506414043121647e-06, + "loss": 1.0838, + "step": 9455 + }, + { + "epoch": 0.5928898363533764, + "grad_norm": 3.2601027488708496, + "learning_rate": 7.504447479443734e-06, + "loss": 1.3365, + "step": 9456 + }, + { + "epoch": 0.5929525362091668, + "grad_norm": 3.233552932739258, + "learning_rate": 7.502481018682515e-06, + "loss": 1.2683, + "step": 9457 + }, + { + "epoch": 0.5930152360649571, + "grad_norm": 3.512600898742676, + "learning_rate": 7.500514660919088e-06, + "loss": 0.9374, + "step": 9458 + }, + { + "epoch": 0.5930779359207474, + "grad_norm": 2.8826980590820312, + "learning_rate": 7.498548406234545e-06, + "loss": 1.1999, + "step": 9459 + }, + { + "epoch": 0.5931406357765378, + "grad_norm": 3.360668659210205, + "learning_rate": 7.496582254709974e-06, + "loss": 1.0734, + "step": 9460 + }, + { + "epoch": 0.5932033356323281, + "grad_norm": 3.1041183471679688, + "learning_rate": 7.494616206426464e-06, + "loss": 1.0489, + "step": 9461 + }, + { + "epoch": 0.5932660354881184, + "grad_norm": 3.0986483097076416, + "learning_rate": 7.4926502614650865e-06, + "loss": 1.0016, + "step": 9462 + }, + { + "epoch": 0.5933287353439087, + "grad_norm": 3.381331443786621, + "learning_rate": 7.490684419906926e-06, + "loss": 1.0433, + "step": 9463 + }, + { + "epoch": 0.5933914351996991, + "grad_norm": 2.7897965908050537, + "learning_rate": 7.488718681833046e-06, + "loss": 1.0718, + "step": 9464 + }, + { + "epoch": 0.5934541350554894, + "grad_norm": 2.958024024963379, + "learning_rate": 7.48675304732452e-06, + "loss": 1.1389, + "step": 9465 + }, + { + "epoch": 0.5935168349112797, + "grad_norm": 3.067347288131714, + "learning_rate": 7.484787516462406e-06, + "loss": 0.9983, + "step": 9466 + }, + { + "epoch": 0.59357953476707, + "grad_norm": 3.4893321990966797, + "learning_rate": 7.482822089327766e-06, + "loss": 1.1697, + "step": 9467 + }, + { + "epoch": 0.5936422346228604, + "grad_norm": 3.265378713607788, + "learning_rate": 7.480856766001652e-06, + "loss": 1.1539, + "step": 9468 + }, + { + "epoch": 0.5937049344786507, + "grad_norm": 3.198714256286621, + "learning_rate": 7.478891546565117e-06, + "loss": 1.1066, + "step": 9469 + }, + { + "epoch": 0.593767634334441, + "grad_norm": 3.551007032394409, + "learning_rate": 7.4769264310992005e-06, + "loss": 1.0411, + "step": 9470 + }, + { + "epoch": 0.5938303341902313, + "grad_norm": 3.056844711303711, + "learning_rate": 7.474961419684951e-06, + "loss": 1.038, + "step": 9471 + }, + { + "epoch": 0.5938930340460217, + "grad_norm": 3.037414073944092, + "learning_rate": 7.472996512403403e-06, + "loss": 1.1785, + "step": 9472 + }, + { + "epoch": 0.593955733901812, + "grad_norm": 3.3159306049346924, + "learning_rate": 7.4710317093355875e-06, + "loss": 0.9626, + "step": 9473 + }, + { + "epoch": 0.5940184337576023, + "grad_norm": 3.3749749660491943, + "learning_rate": 7.469067010562538e-06, + "loss": 1.0623, + "step": 9474 + }, + { + "epoch": 0.5940811336133927, + "grad_norm": 3.251255989074707, + "learning_rate": 7.4671024161652726e-06, + "loss": 1.0843, + "step": 9475 + }, + { + "epoch": 0.594143833469183, + "grad_norm": 3.6708731651306152, + "learning_rate": 7.4651379262248166e-06, + "loss": 1.0177, + "step": 9476 + }, + { + "epoch": 0.5942065333249733, + "grad_norm": 3.5248301029205322, + "learning_rate": 7.46317354082218e-06, + "loss": 1.0247, + "step": 9477 + }, + { + "epoch": 0.5942692331807636, + "grad_norm": 3.1549088954925537, + "learning_rate": 7.46120926003838e-06, + "loss": 1.2876, + "step": 9478 + }, + { + "epoch": 0.594331933036554, + "grad_norm": 3.523142099380493, + "learning_rate": 7.45924508395442e-06, + "loss": 1.2411, + "step": 9479 + }, + { + "epoch": 0.5943946328923444, + "grad_norm": 3.3941001892089844, + "learning_rate": 7.457281012651305e-06, + "loss": 1.0912, + "step": 9480 + }, + { + "epoch": 0.5944573327481347, + "grad_norm": 3.1346347332000732, + "learning_rate": 7.455317046210029e-06, + "loss": 1.1581, + "step": 9481 + }, + { + "epoch": 0.594520032603925, + "grad_norm": 3.5308756828308105, + "learning_rate": 7.45335318471159e-06, + "loss": 1.0167, + "step": 9482 + }, + { + "epoch": 0.5945827324597154, + "grad_norm": 3.43617844581604, + "learning_rate": 7.451389428236979e-06, + "loss": 1.1777, + "step": 9483 + }, + { + "epoch": 0.5946454323155057, + "grad_norm": 3.2253100872039795, + "learning_rate": 7.4494257768671785e-06, + "loss": 1.1199, + "step": 9484 + }, + { + "epoch": 0.594708132171296, + "grad_norm": 2.8559653759002686, + "learning_rate": 7.447462230683171e-06, + "loss": 1.1295, + "step": 9485 + }, + { + "epoch": 0.5947708320270864, + "grad_norm": 3.175914764404297, + "learning_rate": 7.44549878976593e-06, + "loss": 1.1675, + "step": 9486 + }, + { + "epoch": 0.5948335318828767, + "grad_norm": 3.1471104621887207, + "learning_rate": 7.443535454196433e-06, + "loss": 1.0577, + "step": 9487 + }, + { + "epoch": 0.594896231738667, + "grad_norm": 3.2181813716888428, + "learning_rate": 7.441572224055644e-06, + "loss": 1.1401, + "step": 9488 + }, + { + "epoch": 0.5949589315944573, + "grad_norm": 4.956153392791748, + "learning_rate": 7.4396090994245295e-06, + "loss": 1.2388, + "step": 9489 + }, + { + "epoch": 0.5950216314502477, + "grad_norm": 3.290560007095337, + "learning_rate": 7.4376460803840465e-06, + "loss": 1.0663, + "step": 9490 + }, + { + "epoch": 0.595084331306038, + "grad_norm": 2.9992384910583496, + "learning_rate": 7.4356831670151494e-06, + "loss": 1.117, + "step": 9491 + }, + { + "epoch": 0.5951470311618283, + "grad_norm": 3.3173511028289795, + "learning_rate": 7.433720359398794e-06, + "loss": 1.0589, + "step": 9492 + }, + { + "epoch": 0.5952097310176186, + "grad_norm": 3.022345542907715, + "learning_rate": 7.4317576576159215e-06, + "loss": 0.9554, + "step": 9493 + }, + { + "epoch": 0.595272430873409, + "grad_norm": 3.0143227577209473, + "learning_rate": 7.429795061747475e-06, + "loss": 1.0732, + "step": 9494 + }, + { + "epoch": 0.5953351307291993, + "grad_norm": 3.444535970687866, + "learning_rate": 7.427832571874391e-06, + "loss": 1.1614, + "step": 9495 + }, + { + "epoch": 0.5953978305849896, + "grad_norm": 2.9284489154815674, + "learning_rate": 7.425870188077609e-06, + "loss": 1.0855, + "step": 9496 + }, + { + "epoch": 0.59546053044078, + "grad_norm": 3.186246871948242, + "learning_rate": 7.4239079104380485e-06, + "loss": 1.0204, + "step": 9497 + }, + { + "epoch": 0.5955232302965703, + "grad_norm": 2.9948577880859375, + "learning_rate": 7.42194573903664e-06, + "loss": 1.0923, + "step": 9498 + }, + { + "epoch": 0.5955859301523606, + "grad_norm": 2.8050179481506348, + "learning_rate": 7.419983673954301e-06, + "loss": 1.1638, + "step": 9499 + }, + { + "epoch": 0.5956486300081509, + "grad_norm": 3.578862428665161, + "learning_rate": 7.418021715271948e-06, + "loss": 0.9821, + "step": 9500 + }, + { + "epoch": 0.5957113298639413, + "grad_norm": 3.0429718494415283, + "learning_rate": 7.416059863070492e-06, + "loss": 1.1282, + "step": 9501 + }, + { + "epoch": 0.5957740297197316, + "grad_norm": 3.29461669921875, + "learning_rate": 7.414098117430839e-06, + "loss": 0.943, + "step": 9502 + }, + { + "epoch": 0.595836729575522, + "grad_norm": 3.080679178237915, + "learning_rate": 7.412136478433895e-06, + "loss": 1.0595, + "step": 9503 + }, + { + "epoch": 0.5958994294313124, + "grad_norm": 3.0722692012786865, + "learning_rate": 7.4101749461605544e-06, + "loss": 1.1763, + "step": 9504 + }, + { + "epoch": 0.5959621292871027, + "grad_norm": 2.8249216079711914, + "learning_rate": 7.408213520691714e-06, + "loss": 1.1403, + "step": 9505 + }, + { + "epoch": 0.596024829142893, + "grad_norm": 3.164947271347046, + "learning_rate": 7.406252202108258e-06, + "loss": 1.1123, + "step": 9506 + }, + { + "epoch": 0.5960875289986833, + "grad_norm": 3.320737600326538, + "learning_rate": 7.404290990491077e-06, + "loss": 1.089, + "step": 9507 + }, + { + "epoch": 0.5961502288544737, + "grad_norm": 3.21313738822937, + "learning_rate": 7.402329885921047e-06, + "loss": 1.194, + "step": 9508 + }, + { + "epoch": 0.596212928710264, + "grad_norm": 3.362083911895752, + "learning_rate": 7.400368888479048e-06, + "loss": 1.2043, + "step": 9509 + }, + { + "epoch": 0.5962756285660543, + "grad_norm": 3.2948851585388184, + "learning_rate": 7.398407998245948e-06, + "loss": 0.9116, + "step": 9510 + }, + { + "epoch": 0.5963383284218446, + "grad_norm": 2.8375327587127686, + "learning_rate": 7.396447215302618e-06, + "loss": 1.0352, + "step": 9511 + }, + { + "epoch": 0.596401028277635, + "grad_norm": 3.2669453620910645, + "learning_rate": 7.394486539729915e-06, + "loss": 1.151, + "step": 9512 + }, + { + "epoch": 0.5964637281334253, + "grad_norm": 3.158642053604126, + "learning_rate": 7.392525971608702e-06, + "loss": 1.2303, + "step": 9513 + }, + { + "epoch": 0.5965264279892156, + "grad_norm": 3.3594443798065186, + "learning_rate": 7.390565511019834e-06, + "loss": 1.1167, + "step": 9514 + }, + { + "epoch": 0.596589127845006, + "grad_norm": 3.244091510772705, + "learning_rate": 7.388605158044155e-06, + "loss": 1.1015, + "step": 9515 + }, + { + "epoch": 0.5966518277007963, + "grad_norm": 2.9274942874908447, + "learning_rate": 7.386644912762516e-06, + "loss": 1.0809, + "step": 9516 + }, + { + "epoch": 0.5967145275565866, + "grad_norm": 3.3382744789123535, + "learning_rate": 7.3846847752557524e-06, + "loss": 0.9241, + "step": 9517 + }, + { + "epoch": 0.5967772274123769, + "grad_norm": 3.471358060836792, + "learning_rate": 7.382724745604706e-06, + "loss": 1.0624, + "step": 9518 + }, + { + "epoch": 0.5968399272681673, + "grad_norm": 2.7582919597625732, + "learning_rate": 7.380764823890203e-06, + "loss": 1.2799, + "step": 9519 + }, + { + "epoch": 0.5969026271239576, + "grad_norm": 3.072709560394287, + "learning_rate": 7.378805010193075e-06, + "loss": 1.068, + "step": 9520 + }, + { + "epoch": 0.5969653269797479, + "grad_norm": 2.80820631980896, + "learning_rate": 7.376845304594141e-06, + "loss": 1.0187, + "step": 9521 + }, + { + "epoch": 0.5970280268355382, + "grad_norm": 3.4151370525360107, + "learning_rate": 7.3748857071742205e-06, + "loss": 1.088, + "step": 9522 + }, + { + "epoch": 0.5970907266913286, + "grad_norm": 3.0107078552246094, + "learning_rate": 7.372926218014131e-06, + "loss": 1.1077, + "step": 9523 + }, + { + "epoch": 0.5971534265471189, + "grad_norm": 3.2372798919677734, + "learning_rate": 7.370966837194677e-06, + "loss": 1.1774, + "step": 9524 + }, + { + "epoch": 0.5972161264029092, + "grad_norm": 3.126239061355591, + "learning_rate": 7.369007564796667e-06, + "loss": 1.1231, + "step": 9525 + }, + { + "epoch": 0.5972788262586997, + "grad_norm": 3.407557487487793, + "learning_rate": 7.367048400900898e-06, + "loss": 1.2124, + "step": 9526 + }, + { + "epoch": 0.59734152611449, + "grad_norm": 2.641125202178955, + "learning_rate": 7.36508934558817e-06, + "loss": 1.2153, + "step": 9527 + }, + { + "epoch": 0.5974042259702803, + "grad_norm": 3.2690811157226562, + "learning_rate": 7.36313039893927e-06, + "loss": 1.2286, + "step": 9528 + }, + { + "epoch": 0.5974669258260706, + "grad_norm": 3.568039894104004, + "learning_rate": 7.36117156103499e-06, + "loss": 1.1418, + "step": 9529 + }, + { + "epoch": 0.597529625681861, + "grad_norm": 3.401926279067993, + "learning_rate": 7.359212831956108e-06, + "loss": 0.9148, + "step": 9530 + }, + { + "epoch": 0.5975923255376513, + "grad_norm": 3.1003777980804443, + "learning_rate": 7.357254211783405e-06, + "loss": 1.0557, + "step": 9531 + }, + { + "epoch": 0.5976550253934416, + "grad_norm": 3.3440470695495605, + "learning_rate": 7.3552957005976535e-06, + "loss": 1.1839, + "step": 9532 + }, + { + "epoch": 0.597717725249232, + "grad_norm": 3.0412142276763916, + "learning_rate": 7.353337298479621e-06, + "loss": 1.1756, + "step": 9533 + }, + { + "epoch": 0.5977804251050223, + "grad_norm": 3.4506919384002686, + "learning_rate": 7.351379005510078e-06, + "loss": 1.0246, + "step": 9534 + }, + { + "epoch": 0.5978431249608126, + "grad_norm": 3.2913708686828613, + "learning_rate": 7.349420821769777e-06, + "loss": 1.3493, + "step": 9535 + }, + { + "epoch": 0.5979058248166029, + "grad_norm": 3.1013107299804688, + "learning_rate": 7.34746274733948e-06, + "loss": 1.096, + "step": 9536 + }, + { + "epoch": 0.5979685246723933, + "grad_norm": 3.273533821105957, + "learning_rate": 7.345504782299933e-06, + "loss": 1.0218, + "step": 9537 + }, + { + "epoch": 0.5980312245281836, + "grad_norm": 3.1015658378601074, + "learning_rate": 7.343546926731886e-06, + "loss": 1.0128, + "step": 9538 + }, + { + "epoch": 0.5980939243839739, + "grad_norm": 2.971705913543701, + "learning_rate": 7.341589180716078e-06, + "loss": 1.2436, + "step": 9539 + }, + { + "epoch": 0.5981566242397642, + "grad_norm": 3.1146280765533447, + "learning_rate": 7.33963154433325e-06, + "loss": 1.1276, + "step": 9540 + }, + { + "epoch": 0.5982193240955546, + "grad_norm": 3.1335275173187256, + "learning_rate": 7.337674017664132e-06, + "loss": 1.1586, + "step": 9541 + }, + { + "epoch": 0.5982820239513449, + "grad_norm": 3.225626230239868, + "learning_rate": 7.335716600789453e-06, + "loss": 1.1199, + "step": 9542 + }, + { + "epoch": 0.5983447238071352, + "grad_norm": 2.9302797317504883, + "learning_rate": 7.333759293789941e-06, + "loss": 1.1706, + "step": 9543 + }, + { + "epoch": 0.5984074236629255, + "grad_norm": 3.326420783996582, + "learning_rate": 7.331802096746309e-06, + "loss": 1.0064, + "step": 9544 + }, + { + "epoch": 0.5984701235187159, + "grad_norm": 3.0961501598358154, + "learning_rate": 7.3298450097392774e-06, + "loss": 0.9642, + "step": 9545 + }, + { + "epoch": 0.5985328233745062, + "grad_norm": 2.87062668800354, + "learning_rate": 7.3278880328495516e-06, + "loss": 1.15, + "step": 9546 + }, + { + "epoch": 0.5985955232302965, + "grad_norm": 3.1209654808044434, + "learning_rate": 7.325931166157842e-06, + "loss": 1.266, + "step": 9547 + }, + { + "epoch": 0.5986582230860868, + "grad_norm": 2.966094732284546, + "learning_rate": 7.323974409744847e-06, + "loss": 1.116, + "step": 9548 + }, + { + "epoch": 0.5987209229418773, + "grad_norm": 3.1707262992858887, + "learning_rate": 7.322017763691266e-06, + "loss": 1.093, + "step": 9549 + }, + { + "epoch": 0.5987836227976676, + "grad_norm": 3.4138455390930176, + "learning_rate": 7.320061228077786e-06, + "loss": 1.0117, + "step": 9550 + }, + { + "epoch": 0.5988463226534579, + "grad_norm": 3.093010425567627, + "learning_rate": 7.318104802985102e-06, + "loss": 0.9742, + "step": 9551 + }, + { + "epoch": 0.5989090225092483, + "grad_norm": 3.230379581451416, + "learning_rate": 7.31614848849389e-06, + "loss": 1.1529, + "step": 9552 + }, + { + "epoch": 0.5989717223650386, + "grad_norm": 3.5087928771972656, + "learning_rate": 7.314192284684831e-06, + "loss": 1.1686, + "step": 9553 + }, + { + "epoch": 0.5990344222208289, + "grad_norm": 3.1931710243225098, + "learning_rate": 7.312236191638602e-06, + "loss": 1.134, + "step": 9554 + }, + { + "epoch": 0.5990971220766192, + "grad_norm": 2.7591006755828857, + "learning_rate": 7.3102802094358686e-06, + "loss": 1.0517, + "step": 9555 + }, + { + "epoch": 0.5991598219324096, + "grad_norm": 3.4680354595184326, + "learning_rate": 7.308324338157297e-06, + "loss": 0.9928, + "step": 9556 + }, + { + "epoch": 0.5992225217881999, + "grad_norm": 3.3806955814361572, + "learning_rate": 7.306368577883547e-06, + "loss": 1.0409, + "step": 9557 + }, + { + "epoch": 0.5992852216439902, + "grad_norm": 3.341632604598999, + "learning_rate": 7.304412928695277e-06, + "loss": 0.9408, + "step": 9558 + }, + { + "epoch": 0.5993479214997806, + "grad_norm": 3.0224692821502686, + "learning_rate": 7.302457390673133e-06, + "loss": 1.0687, + "step": 9559 + }, + { + "epoch": 0.5994106213555709, + "grad_norm": 3.3102686405181885, + "learning_rate": 7.300501963897764e-06, + "loss": 1.3051, + "step": 9560 + }, + { + "epoch": 0.5994733212113612, + "grad_norm": 3.3691389560699463, + "learning_rate": 7.298546648449811e-06, + "loss": 1.0418, + "step": 9561 + }, + { + "epoch": 0.5995360210671515, + "grad_norm": 3.0234220027923584, + "learning_rate": 7.296591444409915e-06, + "loss": 1.1404, + "step": 9562 + }, + { + "epoch": 0.5995987209229419, + "grad_norm": 3.195549249649048, + "learning_rate": 7.294636351858704e-06, + "loss": 1.246, + "step": 9563 + }, + { + "epoch": 0.5996614207787322, + "grad_norm": 3.089129686355591, + "learning_rate": 7.292681370876807e-06, + "loss": 1.0085, + "step": 9564 + }, + { + "epoch": 0.5997241206345225, + "grad_norm": 2.952913522720337, + "learning_rate": 7.29072650154485e-06, + "loss": 1.094, + "step": 9565 + }, + { + "epoch": 0.5997868204903128, + "grad_norm": 3.047764778137207, + "learning_rate": 7.28877174394345e-06, + "loss": 1.1836, + "step": 9566 + }, + { + "epoch": 0.5998495203461032, + "grad_norm": 3.3353636264801025, + "learning_rate": 7.2868170981532226e-06, + "loss": 1.181, + "step": 9567 + }, + { + "epoch": 0.5999122202018935, + "grad_norm": 3.025507926940918, + "learning_rate": 7.284862564254775e-06, + "loss": 1.0729, + "step": 9568 + }, + { + "epoch": 0.5999749200576838, + "grad_norm": 3.1239445209503174, + "learning_rate": 7.282908142328715e-06, + "loss": 1.0288, + "step": 9569 + }, + { + "epoch": 0.6000376199134742, + "grad_norm": 3.313467025756836, + "learning_rate": 7.2809538324556415e-06, + "loss": 1.2423, + "step": 9570 + }, + { + "epoch": 0.6001003197692645, + "grad_norm": 3.2244985103607178, + "learning_rate": 7.2789996347161505e-06, + "loss": 1.1773, + "step": 9571 + }, + { + "epoch": 0.6001630196250549, + "grad_norm": 3.417802333831787, + "learning_rate": 7.277045549190833e-06, + "loss": 1.0479, + "step": 9572 + }, + { + "epoch": 0.6002257194808452, + "grad_norm": 3.3900771141052246, + "learning_rate": 7.275091575960276e-06, + "loss": 1.0739, + "step": 9573 + }, + { + "epoch": 0.6002884193366356, + "grad_norm": 2.843599796295166, + "learning_rate": 7.273137715105063e-06, + "loss": 1.0418, + "step": 9574 + }, + { + "epoch": 0.6003511191924259, + "grad_norm": 3.042654275894165, + "learning_rate": 7.271183966705769e-06, + "loss": 1.2011, + "step": 9575 + }, + { + "epoch": 0.6004138190482162, + "grad_norm": 3.1713404655456543, + "learning_rate": 7.269230330842967e-06, + "loss": 1.1836, + "step": 9576 + }, + { + "epoch": 0.6004765189040066, + "grad_norm": 3.0109527111053467, + "learning_rate": 7.267276807597224e-06, + "loss": 1.0636, + "step": 9577 + }, + { + "epoch": 0.6005392187597969, + "grad_norm": 2.957639217376709, + "learning_rate": 7.265323397049107e-06, + "loss": 1.1198, + "step": 9578 + }, + { + "epoch": 0.6006019186155872, + "grad_norm": 3.267098903656006, + "learning_rate": 7.263370099279173e-06, + "loss": 0.9629, + "step": 9579 + }, + { + "epoch": 0.6006646184713775, + "grad_norm": 3.31876540184021, + "learning_rate": 7.261416914367974e-06, + "loss": 1.0739, + "step": 9580 + }, + { + "epoch": 0.6007273183271679, + "grad_norm": 2.9800894260406494, + "learning_rate": 7.25946384239606e-06, + "loss": 1.1653, + "step": 9581 + }, + { + "epoch": 0.6007900181829582, + "grad_norm": 3.1155595779418945, + "learning_rate": 7.25751088344398e-06, + "loss": 1.0987, + "step": 9582 + }, + { + "epoch": 0.6008527180387485, + "grad_norm": 3.322519540786743, + "learning_rate": 7.255558037592267e-06, + "loss": 0.9849, + "step": 9583 + }, + { + "epoch": 0.6009154178945388, + "grad_norm": 2.8619601726531982, + "learning_rate": 7.253605304921463e-06, + "loss": 1.1439, + "step": 9584 + }, + { + "epoch": 0.6009781177503292, + "grad_norm": 3.271728754043579, + "learning_rate": 7.251652685512096e-06, + "loss": 1.1866, + "step": 9585 + }, + { + "epoch": 0.6010408176061195, + "grad_norm": 3.606083393096924, + "learning_rate": 7.249700179444692e-06, + "loss": 1.0932, + "step": 9586 + }, + { + "epoch": 0.6011035174619098, + "grad_norm": 3.144550323486328, + "learning_rate": 7.247747786799774e-06, + "loss": 1.1385, + "step": 9587 + }, + { + "epoch": 0.6011662173177001, + "grad_norm": 3.0239837169647217, + "learning_rate": 7.245795507657855e-06, + "loss": 1.0647, + "step": 9588 + }, + { + "epoch": 0.6012289171734905, + "grad_norm": 3.4095282554626465, + "learning_rate": 7.243843342099452e-06, + "loss": 1.1517, + "step": 9589 + }, + { + "epoch": 0.6012916170292808, + "grad_norm": 3.160019874572754, + "learning_rate": 7.241891290205067e-06, + "loss": 1.0328, + "step": 9590 + }, + { + "epoch": 0.6013543168850711, + "grad_norm": 3.5187010765075684, + "learning_rate": 7.239939352055208e-06, + "loss": 1.2407, + "step": 9591 + }, + { + "epoch": 0.6014170167408615, + "grad_norm": 3.1231307983398438, + "learning_rate": 7.23798752773037e-06, + "loss": 1.0343, + "step": 9592 + }, + { + "epoch": 0.6014797165966518, + "grad_norm": 3.126706600189209, + "learning_rate": 7.236035817311047e-06, + "loss": 1.0606, + "step": 9593 + }, + { + "epoch": 0.6015424164524421, + "grad_norm": 2.813995838165283, + "learning_rate": 7.2340842208777276e-06, + "loss": 1.1296, + "step": 9594 + }, + { + "epoch": 0.6016051163082325, + "grad_norm": 2.9490251541137695, + "learning_rate": 7.232132738510894e-06, + "loss": 1.0603, + "step": 9595 + }, + { + "epoch": 0.6016678161640229, + "grad_norm": 2.9355368614196777, + "learning_rate": 7.230181370291029e-06, + "loss": 1.1329, + "step": 9596 + }, + { + "epoch": 0.6017305160198132, + "grad_norm": 3.201014995574951, + "learning_rate": 7.228230116298605e-06, + "loss": 1.1723, + "step": 9597 + }, + { + "epoch": 0.6017932158756035, + "grad_norm": 3.3392598628997803, + "learning_rate": 7.226278976614094e-06, + "loss": 1.1696, + "step": 9598 + }, + { + "epoch": 0.6018559157313939, + "grad_norm": 3.0698704719543457, + "learning_rate": 7.224327951317956e-06, + "loss": 1.2395, + "step": 9599 + }, + { + "epoch": 0.6019186155871842, + "grad_norm": 3.462576389312744, + "learning_rate": 7.222377040490656e-06, + "loss": 1.1263, + "step": 9600 + }, + { + "epoch": 0.6019813154429745, + "grad_norm": 3.1839940547943115, + "learning_rate": 7.220426244212647e-06, + "loss": 1.028, + "step": 9601 + }, + { + "epoch": 0.6020440152987648, + "grad_norm": 3.1421146392822266, + "learning_rate": 7.218475562564383e-06, + "loss": 1.1382, + "step": 9602 + }, + { + "epoch": 0.6021067151545552, + "grad_norm": 3.2371413707733154, + "learning_rate": 7.216524995626306e-06, + "loss": 1.1004, + "step": 9603 + }, + { + "epoch": 0.6021694150103455, + "grad_norm": 3.214226722717285, + "learning_rate": 7.214574543478861e-06, + "loss": 1.0996, + "step": 9604 + }, + { + "epoch": 0.6022321148661358, + "grad_norm": 3.035504102706909, + "learning_rate": 7.212624206202483e-06, + "loss": 1.0683, + "step": 9605 + }, + { + "epoch": 0.6022948147219261, + "grad_norm": 3.016813278198242, + "learning_rate": 7.210673983877605e-06, + "loss": 1.1347, + "step": 9606 + }, + { + "epoch": 0.6023575145777165, + "grad_norm": 2.9920589923858643, + "learning_rate": 7.208723876584654e-06, + "loss": 1.1366, + "step": 9607 + }, + { + "epoch": 0.6024202144335068, + "grad_norm": 3.1958730220794678, + "learning_rate": 7.2067738844040516e-06, + "loss": 1.2502, + "step": 9608 + }, + { + "epoch": 0.6024829142892971, + "grad_norm": 3.073958158493042, + "learning_rate": 7.204824007416217e-06, + "loss": 0.9694, + "step": 9609 + }, + { + "epoch": 0.6025456141450874, + "grad_norm": 3.141202926635742, + "learning_rate": 7.202874245701561e-06, + "loss": 1.2014, + "step": 9610 + }, + { + "epoch": 0.6026083140008778, + "grad_norm": 3.231762647628784, + "learning_rate": 7.200924599340495e-06, + "loss": 0.9854, + "step": 9611 + }, + { + "epoch": 0.6026710138566681, + "grad_norm": 3.2730395793914795, + "learning_rate": 7.198975068413419e-06, + "loss": 0.951, + "step": 9612 + }, + { + "epoch": 0.6027337137124584, + "grad_norm": 2.658284902572632, + "learning_rate": 7.197025653000736e-06, + "loss": 1.0322, + "step": 9613 + }, + { + "epoch": 0.6027964135682488, + "grad_norm": 3.619377613067627, + "learning_rate": 7.195076353182834e-06, + "loss": 1.0393, + "step": 9614 + }, + { + "epoch": 0.6028591134240391, + "grad_norm": 3.325613260269165, + "learning_rate": 7.193127169040106e-06, + "loss": 0.9742, + "step": 9615 + }, + { + "epoch": 0.6029218132798294, + "grad_norm": 3.0948944091796875, + "learning_rate": 7.191178100652939e-06, + "loss": 1.1895, + "step": 9616 + }, + { + "epoch": 0.6029845131356197, + "grad_norm": 3.2152743339538574, + "learning_rate": 7.1892291481017075e-06, + "loss": 1.1159, + "step": 9617 + }, + { + "epoch": 0.6030472129914101, + "grad_norm": 3.2427260875701904, + "learning_rate": 7.1872803114667914e-06, + "loss": 1.1776, + "step": 9618 + }, + { + "epoch": 0.6031099128472005, + "grad_norm": 3.256117343902588, + "learning_rate": 7.185331590828555e-06, + "loss": 1.0989, + "step": 9619 + }, + { + "epoch": 0.6031726127029908, + "grad_norm": 3.2662668228149414, + "learning_rate": 7.183382986267369e-06, + "loss": 1.1655, + "step": 9620 + }, + { + "epoch": 0.6032353125587812, + "grad_norm": 3.0484683513641357, + "learning_rate": 7.181434497863589e-06, + "loss": 0.947, + "step": 9621 + }, + { + "epoch": 0.6032980124145715, + "grad_norm": 3.2634496688842773, + "learning_rate": 7.1794861256975765e-06, + "loss": 1.2134, + "step": 9622 + }, + { + "epoch": 0.6033607122703618, + "grad_norm": 3.1074259281158447, + "learning_rate": 7.177537869849677e-06, + "loss": 1.2492, + "step": 9623 + }, + { + "epoch": 0.6034234121261521, + "grad_norm": 3.2358973026275635, + "learning_rate": 7.1755897304002385e-06, + "loss": 1.0555, + "step": 9624 + }, + { + "epoch": 0.6034861119819425, + "grad_norm": 3.1587111949920654, + "learning_rate": 7.173641707429606e-06, + "loss": 0.9645, + "step": 9625 + }, + { + "epoch": 0.6035488118377328, + "grad_norm": 3.084192991256714, + "learning_rate": 7.171693801018109e-06, + "loss": 1.1003, + "step": 9626 + }, + { + "epoch": 0.6036115116935231, + "grad_norm": 2.97613787651062, + "learning_rate": 7.169746011246087e-06, + "loss": 1.1146, + "step": 9627 + }, + { + "epoch": 0.6036742115493134, + "grad_norm": 2.9231741428375244, + "learning_rate": 7.167798338193861e-06, + "loss": 1.0185, + "step": 9628 + }, + { + "epoch": 0.6037369114051038, + "grad_norm": 3.721872329711914, + "learning_rate": 7.165850781941757e-06, + "loss": 1.1269, + "step": 9629 + }, + { + "epoch": 0.6037996112608941, + "grad_norm": 2.9054133892059326, + "learning_rate": 7.163903342570087e-06, + "loss": 0.98, + "step": 9630 + }, + { + "epoch": 0.6038623111166844, + "grad_norm": 3.1424622535705566, + "learning_rate": 7.1619560201591705e-06, + "loss": 1.0978, + "step": 9631 + }, + { + "epoch": 0.6039250109724748, + "grad_norm": 2.981100559234619, + "learning_rate": 7.16000881478931e-06, + "loss": 1.0731, + "step": 9632 + }, + { + "epoch": 0.6039877108282651, + "grad_norm": 3.4122660160064697, + "learning_rate": 7.158061726540811e-06, + "loss": 1.1544, + "step": 9633 + }, + { + "epoch": 0.6040504106840554, + "grad_norm": 3.3290257453918457, + "learning_rate": 7.156114755493968e-06, + "loss": 1.1357, + "step": 9634 + }, + { + "epoch": 0.6041131105398457, + "grad_norm": 3.0269670486450195, + "learning_rate": 7.154167901729078e-06, + "loss": 1.1104, + "step": 9635 + }, + { + "epoch": 0.6041758103956361, + "grad_norm": 3.2543625831604004, + "learning_rate": 7.15222116532643e-06, + "loss": 1.128, + "step": 9636 + }, + { + "epoch": 0.6042385102514264, + "grad_norm": 2.9479360580444336, + "learning_rate": 7.1502745463663025e-06, + "loss": 1.2026, + "step": 9637 + }, + { + "epoch": 0.6043012101072167, + "grad_norm": 3.360524892807007, + "learning_rate": 7.14832804492898e-06, + "loss": 1.0572, + "step": 9638 + }, + { + "epoch": 0.604363909963007, + "grad_norm": 3.255750894546509, + "learning_rate": 7.14638166109473e-06, + "loss": 1.0431, + "step": 9639 + }, + { + "epoch": 0.6044266098187974, + "grad_norm": 3.476369857788086, + "learning_rate": 7.144435394943829e-06, + "loss": 1.1319, + "step": 9640 + }, + { + "epoch": 0.6044893096745877, + "grad_norm": 3.283942222595215, + "learning_rate": 7.1424892465565345e-06, + "loss": 1.1628, + "step": 9641 + }, + { + "epoch": 0.6045520095303781, + "grad_norm": 2.9782769680023193, + "learning_rate": 7.140543216013109e-06, + "loss": 1.0473, + "step": 9642 + }, + { + "epoch": 0.6046147093861685, + "grad_norm": 2.936911106109619, + "learning_rate": 7.138597303393805e-06, + "loss": 0.8615, + "step": 9643 + }, + { + "epoch": 0.6046774092419588, + "grad_norm": 3.4512758255004883, + "learning_rate": 7.136651508778876e-06, + "loss": 1.071, + "step": 9644 + }, + { + "epoch": 0.6047401090977491, + "grad_norm": 3.6037418842315674, + "learning_rate": 7.134705832248561e-06, + "loss": 1.1065, + "step": 9645 + }, + { + "epoch": 0.6048028089535394, + "grad_norm": 3.089494228363037, + "learning_rate": 7.132760273883101e-06, + "loss": 1.4005, + "step": 9646 + }, + { + "epoch": 0.6048655088093298, + "grad_norm": 3.1320035457611084, + "learning_rate": 7.130814833762737e-06, + "loss": 0.9824, + "step": 9647 + }, + { + "epoch": 0.6049282086651201, + "grad_norm": 3.582824468612671, + "learning_rate": 7.128869511967691e-06, + "loss": 1.1294, + "step": 9648 + }, + { + "epoch": 0.6049909085209104, + "grad_norm": 3.265711545944214, + "learning_rate": 7.126924308578196e-06, + "loss": 0.9438, + "step": 9649 + }, + { + "epoch": 0.6050536083767007, + "grad_norm": 3.45635724067688, + "learning_rate": 7.1249792236744645e-06, + "loss": 1.1166, + "step": 9650 + }, + { + "epoch": 0.6051163082324911, + "grad_norm": 3.284264087677002, + "learning_rate": 7.123034257336716e-06, + "loss": 1.097, + "step": 9651 + }, + { + "epoch": 0.6051790080882814, + "grad_norm": 3.464782476425171, + "learning_rate": 7.12108940964516e-06, + "loss": 1.1471, + "step": 9652 + }, + { + "epoch": 0.6052417079440717, + "grad_norm": 3.0786550045013428, + "learning_rate": 7.119144680680004e-06, + "loss": 1.049, + "step": 9653 + }, + { + "epoch": 0.605304407799862, + "grad_norm": 3.3033878803253174, + "learning_rate": 7.117200070521445e-06, + "loss": 0.887, + "step": 9654 + }, + { + "epoch": 0.6053671076556524, + "grad_norm": 3.077180862426758, + "learning_rate": 7.115255579249681e-06, + "loss": 1.2348, + "step": 9655 + }, + { + "epoch": 0.6054298075114427, + "grad_norm": 3.1592600345611572, + "learning_rate": 7.113311206944905e-06, + "loss": 1.1315, + "step": 9656 + }, + { + "epoch": 0.605492507367233, + "grad_norm": 3.192218542098999, + "learning_rate": 7.1113669536872996e-06, + "loss": 0.9191, + "step": 9657 + }, + { + "epoch": 0.6055552072230234, + "grad_norm": 3.3007352352142334, + "learning_rate": 7.109422819557048e-06, + "loss": 1.2848, + "step": 9658 + }, + { + "epoch": 0.6056179070788137, + "grad_norm": 3.2238855361938477, + "learning_rate": 7.107478804634324e-06, + "loss": 1.0726, + "step": 9659 + }, + { + "epoch": 0.605680606934604, + "grad_norm": 3.479729413986206, + "learning_rate": 7.105534908999303e-06, + "loss": 1.1386, + "step": 9660 + }, + { + "epoch": 0.6057433067903943, + "grad_norm": 2.9062535762786865, + "learning_rate": 7.103591132732146e-06, + "loss": 1.2181, + "step": 9661 + }, + { + "epoch": 0.6058060066461847, + "grad_norm": 3.0259082317352295, + "learning_rate": 7.10164747591302e-06, + "loss": 1.222, + "step": 9662 + }, + { + "epoch": 0.605868706501975, + "grad_norm": 3.000764846801758, + "learning_rate": 7.099703938622076e-06, + "loss": 1.1235, + "step": 9663 + }, + { + "epoch": 0.6059314063577653, + "grad_norm": 3.052851438522339, + "learning_rate": 7.09776052093947e-06, + "loss": 1.2096, + "step": 9664 + }, + { + "epoch": 0.6059941062135558, + "grad_norm": 3.24527907371521, + "learning_rate": 7.0958172229453446e-06, + "loss": 1.0367, + "step": 9665 + }, + { + "epoch": 0.6060568060693461, + "grad_norm": 3.532968044281006, + "learning_rate": 7.093874044719845e-06, + "loss": 1.1288, + "step": 9666 + }, + { + "epoch": 0.6061195059251364, + "grad_norm": 3.380805253982544, + "learning_rate": 7.091930986343107e-06, + "loss": 1.1129, + "step": 9667 + }, + { + "epoch": 0.6061822057809267, + "grad_norm": 2.978053569793701, + "learning_rate": 7.089988047895262e-06, + "loss": 1.1261, + "step": 9668 + }, + { + "epoch": 0.6062449056367171, + "grad_norm": 3.6449828147888184, + "learning_rate": 7.088045229456438e-06, + "loss": 1.0641, + "step": 9669 + }, + { + "epoch": 0.6063076054925074, + "grad_norm": 3.0584187507629395, + "learning_rate": 7.086102531106755e-06, + "loss": 0.9224, + "step": 9670 + }, + { + "epoch": 0.6063703053482977, + "grad_norm": 3.3024380207061768, + "learning_rate": 7.084159952926332e-06, + "loss": 1.0155, + "step": 9671 + }, + { + "epoch": 0.606433005204088, + "grad_norm": 3.3682446479797363, + "learning_rate": 7.082217494995277e-06, + "loss": 1.0216, + "step": 9672 + }, + { + "epoch": 0.6064957050598784, + "grad_norm": 3.6928436756134033, + "learning_rate": 7.080275157393704e-06, + "loss": 0.8563, + "step": 9673 + }, + { + "epoch": 0.6065584049156687, + "grad_norm": 3.3404107093811035, + "learning_rate": 7.078332940201707e-06, + "loss": 1.0204, + "step": 9674 + }, + { + "epoch": 0.606621104771459, + "grad_norm": 3.056957960128784, + "learning_rate": 7.07639084349939e-06, + "loss": 1.0141, + "step": 9675 + }, + { + "epoch": 0.6066838046272494, + "grad_norm": 3.010462999343872, + "learning_rate": 7.07444886736684e-06, + "loss": 1.0072, + "step": 9676 + }, + { + "epoch": 0.6067465044830397, + "grad_norm": 2.798304796218872, + "learning_rate": 7.072507011884146e-06, + "loss": 1.0898, + "step": 9677 + }, + { + "epoch": 0.60680920433883, + "grad_norm": 3.357797861099243, + "learning_rate": 7.070565277131394e-06, + "loss": 1.0538, + "step": 9678 + }, + { + "epoch": 0.6068719041946203, + "grad_norm": 3.2406156063079834, + "learning_rate": 7.068623663188654e-06, + "loss": 1.0009, + "step": 9679 + }, + { + "epoch": 0.6069346040504107, + "grad_norm": 3.134951114654541, + "learning_rate": 7.0666821701360055e-06, + "loss": 0.9968, + "step": 9680 + }, + { + "epoch": 0.606997303906201, + "grad_norm": 3.54228138923645, + "learning_rate": 7.064740798053511e-06, + "loss": 1.0121, + "step": 9681 + }, + { + "epoch": 0.6070600037619913, + "grad_norm": 3.563882350921631, + "learning_rate": 7.062799547021235e-06, + "loss": 1.1964, + "step": 9682 + }, + { + "epoch": 0.6071227036177816, + "grad_norm": 3.2780585289001465, + "learning_rate": 7.060858417119233e-06, + "loss": 1.1948, + "step": 9683 + }, + { + "epoch": 0.607185403473572, + "grad_norm": 2.9334323406219482, + "learning_rate": 7.058917408427559e-06, + "loss": 1.0368, + "step": 9684 + }, + { + "epoch": 0.6072481033293623, + "grad_norm": 3.3212060928344727, + "learning_rate": 7.056976521026258e-06, + "loss": 1.0422, + "step": 9685 + }, + { + "epoch": 0.6073108031851526, + "grad_norm": 3.4109718799591064, + "learning_rate": 7.055035754995374e-06, + "loss": 1.2076, + "step": 9686 + }, + { + "epoch": 0.607373503040943, + "grad_norm": 2.949693441390991, + "learning_rate": 7.053095110414947e-06, + "loss": 1.0206, + "step": 9687 + }, + { + "epoch": 0.6074362028967334, + "grad_norm": 3.246473550796509, + "learning_rate": 7.0511545873650035e-06, + "loss": 1.152, + "step": 9688 + }, + { + "epoch": 0.6074989027525237, + "grad_norm": 3.2397098541259766, + "learning_rate": 7.049214185925574e-06, + "loss": 0.9795, + "step": 9689 + }, + { + "epoch": 0.607561602608314, + "grad_norm": 3.0947399139404297, + "learning_rate": 7.04727390617668e-06, + "loss": 1.1774, + "step": 9690 + }, + { + "epoch": 0.6076243024641044, + "grad_norm": 3.1649537086486816, + "learning_rate": 7.045333748198342e-06, + "loss": 1.0792, + "step": 9691 + }, + { + "epoch": 0.6076870023198947, + "grad_norm": 3.2760353088378906, + "learning_rate": 7.043393712070566e-06, + "loss": 1.0717, + "step": 9692 + }, + { + "epoch": 0.607749702175685, + "grad_norm": 3.1986429691314697, + "learning_rate": 7.041453797873363e-06, + "loss": 1.1566, + "step": 9693 + }, + { + "epoch": 0.6078124020314754, + "grad_norm": 3.225458860397339, + "learning_rate": 7.039514005686735e-06, + "loss": 0.8689, + "step": 9694 + }, + { + "epoch": 0.6078751018872657, + "grad_norm": 3.1900343894958496, + "learning_rate": 7.03757433559068e-06, + "loss": 1.205, + "step": 9695 + }, + { + "epoch": 0.607937801743056, + "grad_norm": 3.1974637508392334, + "learning_rate": 7.035634787665186e-06, + "loss": 1.1765, + "step": 9696 + }, + { + "epoch": 0.6080005015988463, + "grad_norm": 3.1984164714813232, + "learning_rate": 7.033695361990244e-06, + "loss": 0.9879, + "step": 9697 + }, + { + "epoch": 0.6080632014546367, + "grad_norm": 2.947007656097412, + "learning_rate": 7.031756058645835e-06, + "loss": 1.0693, + "step": 9698 + }, + { + "epoch": 0.608125901310427, + "grad_norm": 3.403693199157715, + "learning_rate": 7.0298168777119356e-06, + "loss": 1.0372, + "step": 9699 + }, + { + "epoch": 0.6081886011662173, + "grad_norm": 3.345149040222168, + "learning_rate": 7.027877819268519e-06, + "loss": 1.1171, + "step": 9700 + }, + { + "epoch": 0.6082513010220076, + "grad_norm": 3.5003550052642822, + "learning_rate": 7.025938883395549e-06, + "loss": 1.0132, + "step": 9701 + }, + { + "epoch": 0.608314000877798, + "grad_norm": 3.4913554191589355, + "learning_rate": 7.024000070172992e-06, + "loss": 1.0956, + "step": 9702 + }, + { + "epoch": 0.6083767007335883, + "grad_norm": 3.044638156890869, + "learning_rate": 7.022061379680799e-06, + "loss": 1.1277, + "step": 9703 + }, + { + "epoch": 0.6084394005893786, + "grad_norm": 3.2779524326324463, + "learning_rate": 7.020122811998927e-06, + "loss": 1.1284, + "step": 9704 + }, + { + "epoch": 0.608502100445169, + "grad_norm": 3.194836139678955, + "learning_rate": 7.0181843672073195e-06, + "loss": 1.1034, + "step": 9705 + }, + { + "epoch": 0.6085648003009593, + "grad_norm": 3.30485463142395, + "learning_rate": 7.0162460453859174e-06, + "loss": 1.0709, + "step": 9706 + }, + { + "epoch": 0.6086275001567496, + "grad_norm": 3.4813637733459473, + "learning_rate": 7.014307846614661e-06, + "loss": 0.9741, + "step": 9707 + }, + { + "epoch": 0.6086902000125399, + "grad_norm": 3.088392972946167, + "learning_rate": 7.012369770973478e-06, + "loss": 1.1353, + "step": 9708 + }, + { + "epoch": 0.6087528998683303, + "grad_norm": 3.0145161151885986, + "learning_rate": 7.010431818542298e-06, + "loss": 1.0216, + "step": 9709 + }, + { + "epoch": 0.6088155997241206, + "grad_norm": 3.3973841667175293, + "learning_rate": 7.008493989401039e-06, + "loss": 1.1439, + "step": 9710 + }, + { + "epoch": 0.608878299579911, + "grad_norm": 3.3576974868774414, + "learning_rate": 7.00655628362962e-06, + "loss": 1.1594, + "step": 9711 + }, + { + "epoch": 0.6089409994357013, + "grad_norm": 3.4485089778900146, + "learning_rate": 7.004618701307948e-06, + "loss": 1.0594, + "step": 9712 + }, + { + "epoch": 0.6090036992914917, + "grad_norm": 3.4699206352233887, + "learning_rate": 7.002681242515934e-06, + "loss": 1.0221, + "step": 9713 + }, + { + "epoch": 0.609066399147282, + "grad_norm": 2.94488787651062, + "learning_rate": 7.000743907333473e-06, + "loss": 1.214, + "step": 9714 + }, + { + "epoch": 0.6091290990030723, + "grad_norm": 3.283539295196533, + "learning_rate": 6.998806695840468e-06, + "loss": 1.1061, + "step": 9715 + }, + { + "epoch": 0.6091917988588627, + "grad_norm": 2.837921619415283, + "learning_rate": 6.996869608116803e-06, + "loss": 1.1191, + "step": 9716 + }, + { + "epoch": 0.609254498714653, + "grad_norm": 3.353006601333618, + "learning_rate": 6.9949326442423674e-06, + "loss": 1.0478, + "step": 9717 + }, + { + "epoch": 0.6093171985704433, + "grad_norm": 3.2504501342773438, + "learning_rate": 6.992995804297043e-06, + "loss": 1.1641, + "step": 9718 + }, + { + "epoch": 0.6093798984262336, + "grad_norm": 3.0964913368225098, + "learning_rate": 6.9910590883607e-06, + "loss": 1.1071, + "step": 9719 + }, + { + "epoch": 0.609442598282024, + "grad_norm": 3.390484571456909, + "learning_rate": 6.989122496513214e-06, + "loss": 0.8502, + "step": 9720 + }, + { + "epoch": 0.6095052981378143, + "grad_norm": 3.1463568210601807, + "learning_rate": 6.987186028834445e-06, + "loss": 1.1354, + "step": 9721 + }, + { + "epoch": 0.6095679979936046, + "grad_norm": 2.8752455711364746, + "learning_rate": 6.9852496854042585e-06, + "loss": 1.1498, + "step": 9722 + }, + { + "epoch": 0.6096306978493949, + "grad_norm": 2.98022198677063, + "learning_rate": 6.983313466302505e-06, + "loss": 1.0389, + "step": 9723 + }, + { + "epoch": 0.6096933977051853, + "grad_norm": 3.080482244491577, + "learning_rate": 6.981377371609037e-06, + "loss": 1.1588, + "step": 9724 + }, + { + "epoch": 0.6097560975609756, + "grad_norm": 3.171132802963257, + "learning_rate": 6.979441401403697e-06, + "loss": 1.2566, + "step": 9725 + }, + { + "epoch": 0.6098187974167659, + "grad_norm": 3.2602767944335938, + "learning_rate": 6.977505555766326e-06, + "loss": 1.1818, + "step": 9726 + }, + { + "epoch": 0.6098814972725562, + "grad_norm": 3.4923362731933594, + "learning_rate": 6.975569834776757e-06, + "loss": 1.1112, + "step": 9727 + }, + { + "epoch": 0.6099441971283466, + "grad_norm": 3.302116632461548, + "learning_rate": 6.97363423851482e-06, + "loss": 1.1957, + "step": 9728 + }, + { + "epoch": 0.6100068969841369, + "grad_norm": 3.26492977142334, + "learning_rate": 6.971698767060342e-06, + "loss": 1.1575, + "step": 9729 + }, + { + "epoch": 0.6100695968399272, + "grad_norm": 3.3650362491607666, + "learning_rate": 6.969763420493135e-06, + "loss": 0.9796, + "step": 9730 + }, + { + "epoch": 0.6101322966957176, + "grad_norm": 3.172274351119995, + "learning_rate": 6.9678281988930205e-06, + "loss": 0.9783, + "step": 9731 + }, + { + "epoch": 0.6101949965515079, + "grad_norm": 3.1119773387908936, + "learning_rate": 6.9658931023398e-06, + "loss": 1.3113, + "step": 9732 + }, + { + "epoch": 0.6102576964072982, + "grad_norm": 3.0583536624908447, + "learning_rate": 6.963958130913285e-06, + "loss": 1.187, + "step": 9733 + }, + { + "epoch": 0.6103203962630886, + "grad_norm": 3.2245869636535645, + "learning_rate": 6.962023284693265e-06, + "loss": 0.9451, + "step": 9734 + }, + { + "epoch": 0.610383096118879, + "grad_norm": 3.1413073539733887, + "learning_rate": 6.96008856375954e-06, + "loss": 1.2161, + "step": 9735 + }, + { + "epoch": 0.6104457959746693, + "grad_norm": 3.2483232021331787, + "learning_rate": 6.958153968191894e-06, + "loss": 1.1643, + "step": 9736 + }, + { + "epoch": 0.6105084958304596, + "grad_norm": 3.2618842124938965, + "learning_rate": 6.95621949807011e-06, + "loss": 1.0475, + "step": 9737 + }, + { + "epoch": 0.61057119568625, + "grad_norm": 3.0072245597839355, + "learning_rate": 6.9542851534739695e-06, + "loss": 1.1965, + "step": 9738 + }, + { + "epoch": 0.6106338955420403, + "grad_norm": 3.012209415435791, + "learning_rate": 6.9523509344832406e-06, + "loss": 1.0703, + "step": 9739 + }, + { + "epoch": 0.6106965953978306, + "grad_norm": 3.144300937652588, + "learning_rate": 6.950416841177694e-06, + "loss": 1.137, + "step": 9740 + }, + { + "epoch": 0.6107592952536209, + "grad_norm": 3.152272939682007, + "learning_rate": 6.948482873637088e-06, + "loss": 1.0645, + "step": 9741 + }, + { + "epoch": 0.6108219951094113, + "grad_norm": 3.3366897106170654, + "learning_rate": 6.946549031941183e-06, + "loss": 1.1161, + "step": 9742 + }, + { + "epoch": 0.6108846949652016, + "grad_norm": 3.2976326942443848, + "learning_rate": 6.94461531616973e-06, + "loss": 1.1663, + "step": 9743 + }, + { + "epoch": 0.6109473948209919, + "grad_norm": 3.14235782623291, + "learning_rate": 6.942681726402474e-06, + "loss": 0.9832, + "step": 9744 + }, + { + "epoch": 0.6110100946767822, + "grad_norm": 3.2060389518737793, + "learning_rate": 6.940748262719155e-06, + "loss": 1.1062, + "step": 9745 + }, + { + "epoch": 0.6110727945325726, + "grad_norm": 3.426535129547119, + "learning_rate": 6.938814925199514e-06, + "loss": 1.048, + "step": 9746 + }, + { + "epoch": 0.6111354943883629, + "grad_norm": 3.338770866394043, + "learning_rate": 6.936881713923278e-06, + "loss": 1.0204, + "step": 9747 + }, + { + "epoch": 0.6111981942441532, + "grad_norm": 3.2180662155151367, + "learning_rate": 6.934948628970173e-06, + "loss": 1.1287, + "step": 9748 + }, + { + "epoch": 0.6112608940999436, + "grad_norm": 3.2863645553588867, + "learning_rate": 6.933015670419922e-06, + "loss": 0.9183, + "step": 9749 + }, + { + "epoch": 0.6113235939557339, + "grad_norm": 3.10782790184021, + "learning_rate": 6.931082838352236e-06, + "loss": 1.1161, + "step": 9750 + }, + { + "epoch": 0.6113862938115242, + "grad_norm": 3.3313822746276855, + "learning_rate": 6.929150132846831e-06, + "loss": 1.1345, + "step": 9751 + }, + { + "epoch": 0.6114489936673145, + "grad_norm": 3.2473018169403076, + "learning_rate": 6.927217553983408e-06, + "loss": 1.2157, + "step": 9752 + }, + { + "epoch": 0.6115116935231049, + "grad_norm": 2.9249887466430664, + "learning_rate": 6.9252851018416675e-06, + "loss": 1.2202, + "step": 9753 + }, + { + "epoch": 0.6115743933788952, + "grad_norm": 3.5229921340942383, + "learning_rate": 6.923352776501302e-06, + "loss": 1.036, + "step": 9754 + }, + { + "epoch": 0.6116370932346855, + "grad_norm": 3.3075008392333984, + "learning_rate": 6.921420578042004e-06, + "loss": 0.983, + "step": 9755 + }, + { + "epoch": 0.6116997930904758, + "grad_norm": 3.2459771633148193, + "learning_rate": 6.919488506543455e-06, + "loss": 1.0214, + "step": 9756 + }, + { + "epoch": 0.6117624929462663, + "grad_norm": 3.122753381729126, + "learning_rate": 6.917556562085335e-06, + "loss": 1.0794, + "step": 9757 + }, + { + "epoch": 0.6118251928020566, + "grad_norm": 3.3992345333099365, + "learning_rate": 6.9156247447473155e-06, + "loss": 1.2386, + "step": 9758 + }, + { + "epoch": 0.6118878926578469, + "grad_norm": 2.6615989208221436, + "learning_rate": 6.913693054609065e-06, + "loss": 1.1526, + "step": 9759 + }, + { + "epoch": 0.6119505925136373, + "grad_norm": 3.3198184967041016, + "learning_rate": 6.911761491750253e-06, + "loss": 1.0326, + "step": 9760 + }, + { + "epoch": 0.6120132923694276, + "grad_norm": 3.334016799926758, + "learning_rate": 6.909830056250527e-06, + "loss": 1.0803, + "step": 9761 + }, + { + "epoch": 0.6120759922252179, + "grad_norm": 3.151092529296875, + "learning_rate": 6.907898748189548e-06, + "loss": 1.0638, + "step": 9762 + }, + { + "epoch": 0.6121386920810082, + "grad_norm": 3.307795286178589, + "learning_rate": 6.905967567646956e-06, + "loss": 1.1126, + "step": 9763 + }, + { + "epoch": 0.6122013919367986, + "grad_norm": 3.160348415374756, + "learning_rate": 6.9040365147024e-06, + "loss": 1.0898, + "step": 9764 + }, + { + "epoch": 0.6122640917925889, + "grad_norm": 2.9667277336120605, + "learning_rate": 6.902105589435511e-06, + "loss": 1.1366, + "step": 9765 + }, + { + "epoch": 0.6123267916483792, + "grad_norm": 3.177119493484497, + "learning_rate": 6.900174791925924e-06, + "loss": 1.2314, + "step": 9766 + }, + { + "epoch": 0.6123894915041695, + "grad_norm": 3.354419469833374, + "learning_rate": 6.898244122253264e-06, + "loss": 1.1894, + "step": 9767 + }, + { + "epoch": 0.6124521913599599, + "grad_norm": 3.111196279525757, + "learning_rate": 6.89631358049715e-06, + "loss": 1.1378, + "step": 9768 + }, + { + "epoch": 0.6125148912157502, + "grad_norm": 3.496854782104492, + "learning_rate": 6.894383166737202e-06, + "loss": 1.1306, + "step": 9769 + }, + { + "epoch": 0.6125775910715405, + "grad_norm": 3.2413763999938965, + "learning_rate": 6.892452881053027e-06, + "loss": 1.0292, + "step": 9770 + }, + { + "epoch": 0.6126402909273309, + "grad_norm": 3.1741042137145996, + "learning_rate": 6.890522723524231e-06, + "loss": 1.1791, + "step": 9771 + }, + { + "epoch": 0.6127029907831212, + "grad_norm": 3.092654228210449, + "learning_rate": 6.888592694230413e-06, + "loss": 1.1663, + "step": 9772 + }, + { + "epoch": 0.6127656906389115, + "grad_norm": 3.2484629154205322, + "learning_rate": 6.886662793251169e-06, + "loss": 1.1792, + "step": 9773 + }, + { + "epoch": 0.6128283904947018, + "grad_norm": 3.2421820163726807, + "learning_rate": 6.884733020666086e-06, + "loss": 1.0867, + "step": 9774 + }, + { + "epoch": 0.6128910903504922, + "grad_norm": 3.584338903427124, + "learning_rate": 6.882803376554752e-06, + "loss": 0.8583, + "step": 9775 + }, + { + "epoch": 0.6129537902062825, + "grad_norm": 3.63917875289917, + "learning_rate": 6.880873860996739e-06, + "loss": 0.8582, + "step": 9776 + }, + { + "epoch": 0.6130164900620728, + "grad_norm": 2.981321334838867, + "learning_rate": 6.878944474071628e-06, + "loss": 1.0934, + "step": 9777 + }, + { + "epoch": 0.6130791899178631, + "grad_norm": 2.8041770458221436, + "learning_rate": 6.8770152158589806e-06, + "loss": 1.0829, + "step": 9778 + }, + { + "epoch": 0.6131418897736535, + "grad_norm": 3.151007890701294, + "learning_rate": 6.875086086438363e-06, + "loss": 1.14, + "step": 9779 + }, + { + "epoch": 0.6132045896294438, + "grad_norm": 3.2441511154174805, + "learning_rate": 6.873157085889333e-06, + "loss": 1.1452, + "step": 9780 + }, + { + "epoch": 0.6132672894852342, + "grad_norm": 3.2150819301605225, + "learning_rate": 6.871228214291437e-06, + "loss": 1.1262, + "step": 9781 + }, + { + "epoch": 0.6133299893410246, + "grad_norm": 3.259586811065674, + "learning_rate": 6.869299471724231e-06, + "loss": 1.153, + "step": 9782 + }, + { + "epoch": 0.6133926891968149, + "grad_norm": 3.4903483390808105, + "learning_rate": 6.867370858267248e-06, + "loss": 1.066, + "step": 9783 + }, + { + "epoch": 0.6134553890526052, + "grad_norm": 2.9512147903442383, + "learning_rate": 6.865442374000031e-06, + "loss": 1.098, + "step": 9784 + }, + { + "epoch": 0.6135180889083955, + "grad_norm": 3.2011983394622803, + "learning_rate": 6.863514019002105e-06, + "loss": 1.1938, + "step": 9785 + }, + { + "epoch": 0.6135807887641859, + "grad_norm": 3.1058647632598877, + "learning_rate": 6.8615857933529986e-06, + "loss": 1.0295, + "step": 9786 + }, + { + "epoch": 0.6136434886199762, + "grad_norm": 3.1753616333007812, + "learning_rate": 6.859657697132229e-06, + "loss": 1.0807, + "step": 9787 + }, + { + "epoch": 0.6137061884757665, + "grad_norm": 3.6944260597229004, + "learning_rate": 6.857729730419316e-06, + "loss": 0.9142, + "step": 9788 + }, + { + "epoch": 0.6137688883315568, + "grad_norm": 3.1850788593292236, + "learning_rate": 6.855801893293765e-06, + "loss": 1.135, + "step": 9789 + }, + { + "epoch": 0.6138315881873472, + "grad_norm": 3.458299160003662, + "learning_rate": 6.853874185835081e-06, + "loss": 1.1047, + "step": 9790 + }, + { + "epoch": 0.6138942880431375, + "grad_norm": 3.141002655029297, + "learning_rate": 6.851946608122764e-06, + "loss": 1.0635, + "step": 9791 + }, + { + "epoch": 0.6139569878989278, + "grad_norm": 2.8683664798736572, + "learning_rate": 6.850019160236305e-06, + "loss": 1.1326, + "step": 9792 + }, + { + "epoch": 0.6140196877547182, + "grad_norm": 3.189953565597534, + "learning_rate": 6.848091842255195e-06, + "loss": 0.9446, + "step": 9793 + }, + { + "epoch": 0.6140823876105085, + "grad_norm": 2.978322982788086, + "learning_rate": 6.846164654258913e-06, + "loss": 1.0532, + "step": 9794 + }, + { + "epoch": 0.6141450874662988, + "grad_norm": 3.3999550342559814, + "learning_rate": 6.844237596326941e-06, + "loss": 1.0668, + "step": 9795 + }, + { + "epoch": 0.6142077873220891, + "grad_norm": 3.0791549682617188, + "learning_rate": 6.842310668538745e-06, + "loss": 1.1382, + "step": 9796 + }, + { + "epoch": 0.6142704871778795, + "grad_norm": 3.3781559467315674, + "learning_rate": 6.840383870973799e-06, + "loss": 1.134, + "step": 9797 + }, + { + "epoch": 0.6143331870336698, + "grad_norm": 3.070728063583374, + "learning_rate": 6.838457203711557e-06, + "loss": 0.9922, + "step": 9798 + }, + { + "epoch": 0.6143958868894601, + "grad_norm": 3.1717705726623535, + "learning_rate": 6.836530666831478e-06, + "loss": 1.1231, + "step": 9799 + }, + { + "epoch": 0.6144585867452504, + "grad_norm": 3.2761101722717285, + "learning_rate": 6.834604260413014e-06, + "loss": 1.1304, + "step": 9800 + }, + { + "epoch": 0.6145212866010408, + "grad_norm": 3.571911573410034, + "learning_rate": 6.832677984535607e-06, + "loss": 1.2466, + "step": 9801 + }, + { + "epoch": 0.6145839864568311, + "grad_norm": 3.2041425704956055, + "learning_rate": 6.8307518392786995e-06, + "loss": 1.2136, + "step": 9802 + }, + { + "epoch": 0.6146466863126214, + "grad_norm": 3.155026912689209, + "learning_rate": 6.828825824721723e-06, + "loss": 1.1899, + "step": 9803 + }, + { + "epoch": 0.6147093861684119, + "grad_norm": 3.115508556365967, + "learning_rate": 6.826899940944111e-06, + "loss": 1.0274, + "step": 9804 + }, + { + "epoch": 0.6147720860242022, + "grad_norm": 3.1328117847442627, + "learning_rate": 6.824974188025282e-06, + "loss": 1.0075, + "step": 9805 + }, + { + "epoch": 0.6148347858799925, + "grad_norm": 3.445101737976074, + "learning_rate": 6.823048566044656e-06, + "loss": 1.0934, + "step": 9806 + }, + { + "epoch": 0.6148974857357828, + "grad_norm": 3.167844533920288, + "learning_rate": 6.821123075081646e-06, + "loss": 1.0253, + "step": 9807 + }, + { + "epoch": 0.6149601855915732, + "grad_norm": 3.177121877670288, + "learning_rate": 6.81919771521566e-06, + "loss": 1.0828, + "step": 9808 + }, + { + "epoch": 0.6150228854473635, + "grad_norm": 3.2875332832336426, + "learning_rate": 6.817272486526098e-06, + "loss": 0.9433, + "step": 9809 + }, + { + "epoch": 0.6150855853031538, + "grad_norm": 3.031458854675293, + "learning_rate": 6.8153473890923576e-06, + "loss": 1.2807, + "step": 9810 + }, + { + "epoch": 0.6151482851589442, + "grad_norm": 3.2401249408721924, + "learning_rate": 6.8134224229938315e-06, + "loss": 1.2696, + "step": 9811 + }, + { + "epoch": 0.6152109850147345, + "grad_norm": 3.2982096672058105, + "learning_rate": 6.811497588309901e-06, + "loss": 1.1799, + "step": 9812 + }, + { + "epoch": 0.6152736848705248, + "grad_norm": 3.2495968341827393, + "learning_rate": 6.809572885119952e-06, + "loss": 1.0595, + "step": 9813 + }, + { + "epoch": 0.6153363847263151, + "grad_norm": 3.374720335006714, + "learning_rate": 6.807648313503355e-06, + "loss": 1.1184, + "step": 9814 + }, + { + "epoch": 0.6153990845821055, + "grad_norm": 3.223017930984497, + "learning_rate": 6.8057238735394825e-06, + "loss": 1.0157, + "step": 9815 + }, + { + "epoch": 0.6154617844378958, + "grad_norm": 3.3154900074005127, + "learning_rate": 6.803799565307696e-06, + "loss": 1.2395, + "step": 9816 + }, + { + "epoch": 0.6155244842936861, + "grad_norm": 3.4450531005859375, + "learning_rate": 6.801875388887356e-06, + "loss": 1.0791, + "step": 9817 + }, + { + "epoch": 0.6155871841494764, + "grad_norm": 2.99824595451355, + "learning_rate": 6.799951344357813e-06, + "loss": 1.2616, + "step": 9818 + }, + { + "epoch": 0.6156498840052668, + "grad_norm": 3.0387942790985107, + "learning_rate": 6.798027431798415e-06, + "loss": 1.2435, + "step": 9819 + }, + { + "epoch": 0.6157125838610571, + "grad_norm": 3.1762735843658447, + "learning_rate": 6.796103651288508e-06, + "loss": 1.1552, + "step": 9820 + }, + { + "epoch": 0.6157752837168474, + "grad_norm": 3.01540207862854, + "learning_rate": 6.794180002907426e-06, + "loss": 1.102, + "step": 9821 + }, + { + "epoch": 0.6158379835726377, + "grad_norm": 3.26645827293396, + "learning_rate": 6.7922564867345005e-06, + "loss": 1.028, + "step": 9822 + }, + { + "epoch": 0.6159006834284281, + "grad_norm": 3.3534157276153564, + "learning_rate": 6.790333102849057e-06, + "loss": 1.2781, + "step": 9823 + }, + { + "epoch": 0.6159633832842184, + "grad_norm": 3.2605183124542236, + "learning_rate": 6.788409851330419e-06, + "loss": 0.9143, + "step": 9824 + }, + { + "epoch": 0.6160260831400087, + "grad_norm": 3.613154172897339, + "learning_rate": 6.786486732257895e-06, + "loss": 1.1964, + "step": 9825 + }, + { + "epoch": 0.6160887829957991, + "grad_norm": 3.548774003982544, + "learning_rate": 6.7845637457108016e-06, + "loss": 1.2427, + "step": 9826 + }, + { + "epoch": 0.6161514828515895, + "grad_norm": 3.1781835556030273, + "learning_rate": 6.782640891768439e-06, + "loss": 0.7496, + "step": 9827 + }, + { + "epoch": 0.6162141827073798, + "grad_norm": 3.178940534591675, + "learning_rate": 6.780718170510107e-06, + "loss": 1.1906, + "step": 9828 + }, + { + "epoch": 0.6162768825631701, + "grad_norm": 3.054971933364868, + "learning_rate": 6.778795582015096e-06, + "loss": 1.2182, + "step": 9829 + }, + { + "epoch": 0.6163395824189605, + "grad_norm": 3.1656453609466553, + "learning_rate": 6.7768731263626965e-06, + "loss": 0.9253, + "step": 9830 + }, + { + "epoch": 0.6164022822747508, + "grad_norm": 3.221836566925049, + "learning_rate": 6.774950803632192e-06, + "loss": 1.0659, + "step": 9831 + }, + { + "epoch": 0.6164649821305411, + "grad_norm": 2.862368583679199, + "learning_rate": 6.773028613902855e-06, + "loss": 1.1659, + "step": 9832 + }, + { + "epoch": 0.6165276819863315, + "grad_norm": 2.9747097492218018, + "learning_rate": 6.771106557253961e-06, + "loss": 1.1328, + "step": 9833 + }, + { + "epoch": 0.6165903818421218, + "grad_norm": 3.1298139095306396, + "learning_rate": 6.769184633764772e-06, + "loss": 1.0305, + "step": 9834 + }, + { + "epoch": 0.6166530816979121, + "grad_norm": 3.1670854091644287, + "learning_rate": 6.76726284351455e-06, + "loss": 1.1363, + "step": 9835 + }, + { + "epoch": 0.6167157815537024, + "grad_norm": 3.228689670562744, + "learning_rate": 6.76534118658255e-06, + "loss": 1.1691, + "step": 9836 + }, + { + "epoch": 0.6167784814094928, + "grad_norm": 3.4829649925231934, + "learning_rate": 6.763419663048021e-06, + "loss": 1.0367, + "step": 9837 + }, + { + "epoch": 0.6168411812652831, + "grad_norm": 3.1632585525512695, + "learning_rate": 6.761498272990205e-06, + "loss": 1.0695, + "step": 9838 + }, + { + "epoch": 0.6169038811210734, + "grad_norm": 3.4253995418548584, + "learning_rate": 6.759577016488343e-06, + "loss": 1.1261, + "step": 9839 + }, + { + "epoch": 0.6169665809768637, + "grad_norm": 3.4052891731262207, + "learning_rate": 6.757655893621666e-06, + "loss": 1.1321, + "step": 9840 + }, + { + "epoch": 0.6170292808326541, + "grad_norm": 3.4932875633239746, + "learning_rate": 6.755734904469401e-06, + "loss": 0.8983, + "step": 9841 + }, + { + "epoch": 0.6170919806884444, + "grad_norm": 3.4990792274475098, + "learning_rate": 6.753814049110772e-06, + "loss": 1.0581, + "step": 9842 + }, + { + "epoch": 0.6171546805442347, + "grad_norm": 3.2355246543884277, + "learning_rate": 6.751893327624991e-06, + "loss": 1.0719, + "step": 9843 + }, + { + "epoch": 0.617217380400025, + "grad_norm": 3.359004020690918, + "learning_rate": 6.749972740091273e-06, + "loss": 1.0617, + "step": 9844 + }, + { + "epoch": 0.6172800802558154, + "grad_norm": 3.1684632301330566, + "learning_rate": 6.74805228658882e-06, + "loss": 1.1541, + "step": 9845 + }, + { + "epoch": 0.6173427801116057, + "grad_norm": 3.457571268081665, + "learning_rate": 6.746131967196834e-06, + "loss": 0.8601, + "step": 9846 + }, + { + "epoch": 0.617405479967396, + "grad_norm": 3.4584877490997314, + "learning_rate": 6.744211781994506e-06, + "loss": 1.096, + "step": 9847 + }, + { + "epoch": 0.6174681798231864, + "grad_norm": 3.289987802505493, + "learning_rate": 6.742291731061027e-06, + "loss": 1.0257, + "step": 9848 + }, + { + "epoch": 0.6175308796789767, + "grad_norm": 3.5264484882354736, + "learning_rate": 6.740371814475579e-06, + "loss": 0.9964, + "step": 9849 + }, + { + "epoch": 0.6175935795347671, + "grad_norm": 3.361180543899536, + "learning_rate": 6.738452032317339e-06, + "loss": 1.0407, + "step": 9850 + }, + { + "epoch": 0.6176562793905574, + "grad_norm": 2.931689500808716, + "learning_rate": 6.736532384665482e-06, + "loss": 1.0149, + "step": 9851 + }, + { + "epoch": 0.6177189792463478, + "grad_norm": 3.230398416519165, + "learning_rate": 6.734612871599169e-06, + "loss": 1.1843, + "step": 9852 + }, + { + "epoch": 0.6177816791021381, + "grad_norm": 3.1926381587982178, + "learning_rate": 6.732693493197566e-06, + "loss": 1.1282, + "step": 9853 + }, + { + "epoch": 0.6178443789579284, + "grad_norm": 3.2187516689300537, + "learning_rate": 6.730774249539824e-06, + "loss": 1.0603, + "step": 9854 + }, + { + "epoch": 0.6179070788137188, + "grad_norm": 3.0784857273101807, + "learning_rate": 6.728855140705097e-06, + "loss": 1.1111, + "step": 9855 + }, + { + "epoch": 0.6179697786695091, + "grad_norm": 3.3331613540649414, + "learning_rate": 6.726936166772526e-06, + "loss": 1.0604, + "step": 9856 + }, + { + "epoch": 0.6180324785252994, + "grad_norm": 3.4219589233398438, + "learning_rate": 6.725017327821252e-06, + "loss": 1.0407, + "step": 9857 + }, + { + "epoch": 0.6180951783810897, + "grad_norm": 2.8590383529663086, + "learning_rate": 6.723098623930403e-06, + "loss": 1.1507, + "step": 9858 + }, + { + "epoch": 0.6181578782368801, + "grad_norm": 3.3949527740478516, + "learning_rate": 6.721180055179113e-06, + "loss": 0.9392, + "step": 9859 + }, + { + "epoch": 0.6182205780926704, + "grad_norm": 3.2000744342803955, + "learning_rate": 6.719261621646498e-06, + "loss": 1.2556, + "step": 9860 + }, + { + "epoch": 0.6182832779484607, + "grad_norm": 3.6927223205566406, + "learning_rate": 6.717343323411678e-06, + "loss": 1.0126, + "step": 9861 + }, + { + "epoch": 0.618345977804251, + "grad_norm": 3.189131021499634, + "learning_rate": 6.715425160553764e-06, + "loss": 0.9671, + "step": 9862 + }, + { + "epoch": 0.6184086776600414, + "grad_norm": 3.524611711502075, + "learning_rate": 6.7135071331518575e-06, + "loss": 0.9532, + "step": 9863 + }, + { + "epoch": 0.6184713775158317, + "grad_norm": 3.530067205429077, + "learning_rate": 6.7115892412850635e-06, + "loss": 1.0524, + "step": 9864 + }, + { + "epoch": 0.618534077371622, + "grad_norm": 3.1110236644744873, + "learning_rate": 6.7096714850324694e-06, + "loss": 0.9839, + "step": 9865 + }, + { + "epoch": 0.6185967772274124, + "grad_norm": 3.4167118072509766, + "learning_rate": 6.70775386447317e-06, + "loss": 1.1639, + "step": 9866 + }, + { + "epoch": 0.6186594770832027, + "grad_norm": 3.36922550201416, + "learning_rate": 6.7058363796862425e-06, + "loss": 1.0082, + "step": 9867 + }, + { + "epoch": 0.618722176938993, + "grad_norm": 3.070460319519043, + "learning_rate": 6.7039190307507694e-06, + "loss": 1.1321, + "step": 9868 + }, + { + "epoch": 0.6187848767947833, + "grad_norm": 3.194938898086548, + "learning_rate": 6.7020018177458156e-06, + "loss": 1.1989, + "step": 9869 + }, + { + "epoch": 0.6188475766505737, + "grad_norm": 3.191505193710327, + "learning_rate": 6.700084740750452e-06, + "loss": 1.312, + "step": 9870 + }, + { + "epoch": 0.618910276506364, + "grad_norm": 3.4283394813537598, + "learning_rate": 6.69816779984374e-06, + "loss": 1.0431, + "step": 9871 + }, + { + "epoch": 0.6189729763621543, + "grad_norm": 3.132067918777466, + "learning_rate": 6.69625099510473e-06, + "loss": 1.1245, + "step": 9872 + }, + { + "epoch": 0.6190356762179448, + "grad_norm": 3.0511112213134766, + "learning_rate": 6.694334326612475e-06, + "loss": 1.0303, + "step": 9873 + }, + { + "epoch": 0.6190983760737351, + "grad_norm": 3.3849692344665527, + "learning_rate": 6.692417794446016e-06, + "loss": 1.1552, + "step": 9874 + }, + { + "epoch": 0.6191610759295254, + "grad_norm": 3.2105462551116943, + "learning_rate": 6.6905013986843915e-06, + "loss": 1.2069, + "step": 9875 + }, + { + "epoch": 0.6192237757853157, + "grad_norm": 3.2412831783294678, + "learning_rate": 6.688585139406634e-06, + "loss": 1.087, + "step": 9876 + }, + { + "epoch": 0.6192864756411061, + "grad_norm": 3.4973392486572266, + "learning_rate": 6.686669016691769e-06, + "loss": 0.9742, + "step": 9877 + }, + { + "epoch": 0.6193491754968964, + "grad_norm": 3.4642679691314697, + "learning_rate": 6.684753030618819e-06, + "loss": 0.9937, + "step": 9878 + }, + { + "epoch": 0.6194118753526867, + "grad_norm": 3.0983216762542725, + "learning_rate": 6.682837181266799e-06, + "loss": 1.1241, + "step": 9879 + }, + { + "epoch": 0.619474575208477, + "grad_norm": 3.1262080669403076, + "learning_rate": 6.680921468714718e-06, + "loss": 1.0303, + "step": 9880 + }, + { + "epoch": 0.6195372750642674, + "grad_norm": 2.9567158222198486, + "learning_rate": 6.67900589304158e-06, + "loss": 1.1268, + "step": 9881 + }, + { + "epoch": 0.6195999749200577, + "grad_norm": 2.9580061435699463, + "learning_rate": 6.677090454326387e-06, + "loss": 1.0301, + "step": 9882 + }, + { + "epoch": 0.619662674775848, + "grad_norm": 3.3833911418914795, + "learning_rate": 6.675175152648126e-06, + "loss": 1.1563, + "step": 9883 + }, + { + "epoch": 0.6197253746316383, + "grad_norm": 3.283799648284912, + "learning_rate": 6.673259988085788e-06, + "loss": 1.1286, + "step": 9884 + }, + { + "epoch": 0.6197880744874287, + "grad_norm": 3.2661561965942383, + "learning_rate": 6.671344960718352e-06, + "loss": 1.0806, + "step": 9885 + }, + { + "epoch": 0.619850774343219, + "grad_norm": 3.265749216079712, + "learning_rate": 6.669430070624799e-06, + "loss": 1.1021, + "step": 9886 + }, + { + "epoch": 0.6199134741990093, + "grad_norm": 3.424480676651001, + "learning_rate": 6.66751531788409e-06, + "loss": 1.1868, + "step": 9887 + }, + { + "epoch": 0.6199761740547997, + "grad_norm": 3.225724220275879, + "learning_rate": 6.665600702575199e-06, + "loss": 1.0003, + "step": 9888 + }, + { + "epoch": 0.62003887391059, + "grad_norm": 3.4112026691436768, + "learning_rate": 6.663686224777078e-06, + "loss": 1.2144, + "step": 9889 + }, + { + "epoch": 0.6201015737663803, + "grad_norm": 3.400078535079956, + "learning_rate": 6.661771884568684e-06, + "loss": 1.1513, + "step": 9890 + }, + { + "epoch": 0.6201642736221706, + "grad_norm": 2.778141975402832, + "learning_rate": 6.659857682028963e-06, + "loss": 1.1516, + "step": 9891 + }, + { + "epoch": 0.620226973477961, + "grad_norm": 2.986947536468506, + "learning_rate": 6.657943617236856e-06, + "loss": 1.1146, + "step": 9892 + }, + { + "epoch": 0.6202896733337513, + "grad_norm": 3.4976141452789307, + "learning_rate": 6.6560296902713015e-06, + "loss": 1.0287, + "step": 9893 + }, + { + "epoch": 0.6203523731895416, + "grad_norm": 3.3792500495910645, + "learning_rate": 6.654115901211229e-06, + "loss": 1.2966, + "step": 9894 + }, + { + "epoch": 0.6204150730453319, + "grad_norm": 3.383014678955078, + "learning_rate": 6.6522022501355625e-06, + "loss": 1.0529, + "step": 9895 + }, + { + "epoch": 0.6204777729011224, + "grad_norm": 3.174513339996338, + "learning_rate": 6.65028873712322e-06, + "loss": 1.0805, + "step": 9896 + }, + { + "epoch": 0.6205404727569127, + "grad_norm": 3.2575132846832275, + "learning_rate": 6.648375362253119e-06, + "loss": 1.0696, + "step": 9897 + }, + { + "epoch": 0.620603172612703, + "grad_norm": 3.345599889755249, + "learning_rate": 6.646462125604162e-06, + "loss": 1.0943, + "step": 9898 + }, + { + "epoch": 0.6206658724684934, + "grad_norm": 2.9274041652679443, + "learning_rate": 6.644549027255255e-06, + "loss": 0.9831, + "step": 9899 + }, + { + "epoch": 0.6207285723242837, + "grad_norm": 2.9618325233459473, + "learning_rate": 6.642636067285291e-06, + "loss": 1.0146, + "step": 9900 + }, + { + "epoch": 0.620791272180074, + "grad_norm": 3.2012434005737305, + "learning_rate": 6.640723245773162e-06, + "loss": 1.1187, + "step": 9901 + }, + { + "epoch": 0.6208539720358643, + "grad_norm": 3.1306722164154053, + "learning_rate": 6.638810562797755e-06, + "loss": 1.1283, + "step": 9902 + }, + { + "epoch": 0.6209166718916547, + "grad_norm": 3.0828874111175537, + "learning_rate": 6.636898018437945e-06, + "loss": 1.0572, + "step": 9903 + }, + { + "epoch": 0.620979371747445, + "grad_norm": 3.5930604934692383, + "learning_rate": 6.634985612772611e-06, + "loss": 0.9184, + "step": 9904 + }, + { + "epoch": 0.6210420716032353, + "grad_norm": 3.0633246898651123, + "learning_rate": 6.633073345880614e-06, + "loss": 1.1122, + "step": 9905 + }, + { + "epoch": 0.6211047714590257, + "grad_norm": 3.4268715381622314, + "learning_rate": 6.631161217840821e-06, + "loss": 1.1011, + "step": 9906 + }, + { + "epoch": 0.621167471314816, + "grad_norm": 3.5986239910125732, + "learning_rate": 6.629249228732084e-06, + "loss": 1.0571, + "step": 9907 + }, + { + "epoch": 0.6212301711706063, + "grad_norm": 3.20955753326416, + "learning_rate": 6.627337378633256e-06, + "loss": 0.958, + "step": 9908 + }, + { + "epoch": 0.6212928710263966, + "grad_norm": 3.1788833141326904, + "learning_rate": 6.625425667623185e-06, + "loss": 1.1425, + "step": 9909 + }, + { + "epoch": 0.621355570882187, + "grad_norm": 3.1226861476898193, + "learning_rate": 6.623514095780705e-06, + "loss": 1.1584, + "step": 9910 + }, + { + "epoch": 0.6214182707379773, + "grad_norm": 2.9747798442840576, + "learning_rate": 6.62160266318465e-06, + "loss": 1.2592, + "step": 9911 + }, + { + "epoch": 0.6214809705937676, + "grad_norm": 3.4556663036346436, + "learning_rate": 6.619691369913852e-06, + "loss": 0.9731, + "step": 9912 + }, + { + "epoch": 0.6215436704495579, + "grad_norm": 3.017075777053833, + "learning_rate": 6.617780216047127e-06, + "loss": 1.1958, + "step": 9913 + }, + { + "epoch": 0.6216063703053483, + "grad_norm": 3.0327250957489014, + "learning_rate": 6.615869201663296e-06, + "loss": 1.0375, + "step": 9914 + }, + { + "epoch": 0.6216690701611386, + "grad_norm": 3.099546194076538, + "learning_rate": 6.613958326841163e-06, + "loss": 0.998, + "step": 9915 + }, + { + "epoch": 0.6217317700169289, + "grad_norm": 3.268840789794922, + "learning_rate": 6.6120475916595404e-06, + "loss": 1.163, + "step": 9916 + }, + { + "epoch": 0.6217944698727192, + "grad_norm": 2.942749261856079, + "learning_rate": 6.610136996197221e-06, + "loss": 1.1259, + "step": 9917 + }, + { + "epoch": 0.6218571697285096, + "grad_norm": 3.4186341762542725, + "learning_rate": 6.608226540533003e-06, + "loss": 0.9916, + "step": 9918 + }, + { + "epoch": 0.6219198695843, + "grad_norm": 3.6727724075317383, + "learning_rate": 6.606316224745666e-06, + "loss": 0.9277, + "step": 9919 + }, + { + "epoch": 0.6219825694400903, + "grad_norm": 3.044064998626709, + "learning_rate": 6.604406048913997e-06, + "loss": 1.0879, + "step": 9920 + }, + { + "epoch": 0.6220452692958807, + "grad_norm": 3.0004653930664062, + "learning_rate": 6.6024960131167725e-06, + "loss": 1.0265, + "step": 9921 + }, + { + "epoch": 0.622107969151671, + "grad_norm": 3.4069604873657227, + "learning_rate": 6.6005861174327605e-06, + "loss": 1.0143, + "step": 9922 + }, + { + "epoch": 0.6221706690074613, + "grad_norm": 3.8451876640319824, + "learning_rate": 6.598676361940726e-06, + "loss": 1.0668, + "step": 9923 + }, + { + "epoch": 0.6222333688632516, + "grad_norm": 3.204716920852661, + "learning_rate": 6.596766746719425e-06, + "loss": 1.0625, + "step": 9924 + }, + { + "epoch": 0.622296068719042, + "grad_norm": 3.4813232421875, + "learning_rate": 6.594857271847615e-06, + "loss": 1.1723, + "step": 9925 + }, + { + "epoch": 0.6223587685748323, + "grad_norm": 3.4995288848876953, + "learning_rate": 6.592947937404036e-06, + "loss": 1.127, + "step": 9926 + }, + { + "epoch": 0.6224214684306226, + "grad_norm": 3.297861337661743, + "learning_rate": 6.5910387434674365e-06, + "loss": 0.9957, + "step": 9927 + }, + { + "epoch": 0.622484168286413, + "grad_norm": 3.1755595207214355, + "learning_rate": 6.589129690116545e-06, + "loss": 1.0824, + "step": 9928 + }, + { + "epoch": 0.6225468681422033, + "grad_norm": 3.348005533218384, + "learning_rate": 6.587220777430097e-06, + "loss": 1.1018, + "step": 9929 + }, + { + "epoch": 0.6226095679979936, + "grad_norm": 3.067260265350342, + "learning_rate": 6.58531200548681e-06, + "loss": 1.1326, + "step": 9930 + }, + { + "epoch": 0.6226722678537839, + "grad_norm": 3.027320146560669, + "learning_rate": 6.583403374365406e-06, + "loss": 1.1695, + "step": 9931 + }, + { + "epoch": 0.6227349677095743, + "grad_norm": 3.1455230712890625, + "learning_rate": 6.581494884144597e-06, + "loss": 1.095, + "step": 9932 + }, + { + "epoch": 0.6227976675653646, + "grad_norm": 3.1803622245788574, + "learning_rate": 6.579586534903087e-06, + "loss": 1.1025, + "step": 9933 + }, + { + "epoch": 0.6228603674211549, + "grad_norm": 3.295138120651245, + "learning_rate": 6.57767832671958e-06, + "loss": 1.1092, + "step": 9934 + }, + { + "epoch": 0.6229230672769452, + "grad_norm": 3.2757182121276855, + "learning_rate": 6.575770259672766e-06, + "loss": 1.1197, + "step": 9935 + }, + { + "epoch": 0.6229857671327356, + "grad_norm": 3.132880449295044, + "learning_rate": 6.573862333841338e-06, + "loss": 1.1304, + "step": 9936 + }, + { + "epoch": 0.6230484669885259, + "grad_norm": 2.9821817874908447, + "learning_rate": 6.571954549303975e-06, + "loss": 1.1885, + "step": 9937 + }, + { + "epoch": 0.6231111668443162, + "grad_norm": 2.8734724521636963, + "learning_rate": 6.5700469061393576e-06, + "loss": 1.1901, + "step": 9938 + }, + { + "epoch": 0.6231738667001065, + "grad_norm": 3.288959503173828, + "learning_rate": 6.568139404426155e-06, + "loss": 0.9823, + "step": 9939 + }, + { + "epoch": 0.6232365665558969, + "grad_norm": 3.1946916580200195, + "learning_rate": 6.566232044243033e-06, + "loss": 1.1165, + "step": 9940 + }, + { + "epoch": 0.6232992664116872, + "grad_norm": 2.9513871669769287, + "learning_rate": 6.564324825668651e-06, + "loss": 1.2273, + "step": 9941 + }, + { + "epoch": 0.6233619662674775, + "grad_norm": 3.139674186706543, + "learning_rate": 6.562417748781662e-06, + "loss": 1.1972, + "step": 9942 + }, + { + "epoch": 0.623424666123268, + "grad_norm": 3.110097885131836, + "learning_rate": 6.560510813660719e-06, + "loss": 1.1604, + "step": 9943 + }, + { + "epoch": 0.6234873659790583, + "grad_norm": 3.675727128982544, + "learning_rate": 6.558604020384457e-06, + "loss": 1.0015, + "step": 9944 + }, + { + "epoch": 0.6235500658348486, + "grad_norm": 3.372222423553467, + "learning_rate": 6.556697369031518e-06, + "loss": 1.0022, + "step": 9945 + }, + { + "epoch": 0.623612765690639, + "grad_norm": 3.381997585296631, + "learning_rate": 6.554790859680527e-06, + "loss": 1.0865, + "step": 9946 + }, + { + "epoch": 0.6236754655464293, + "grad_norm": 2.85204815864563, + "learning_rate": 6.552884492410114e-06, + "loss": 1.1345, + "step": 9947 + }, + { + "epoch": 0.6237381654022196, + "grad_norm": 3.09702205657959, + "learning_rate": 6.550978267298893e-06, + "loss": 1.1692, + "step": 9948 + }, + { + "epoch": 0.6238008652580099, + "grad_norm": 3.235896110534668, + "learning_rate": 6.549072184425481e-06, + "loss": 1.1307, + "step": 9949 + }, + { + "epoch": 0.6238635651138003, + "grad_norm": 3.1918022632598877, + "learning_rate": 6.547166243868481e-06, + "loss": 1.1368, + "step": 9950 + }, + { + "epoch": 0.6239262649695906, + "grad_norm": 3.413938522338867, + "learning_rate": 6.545260445706496e-06, + "loss": 1.0252, + "step": 9951 + }, + { + "epoch": 0.6239889648253809, + "grad_norm": 3.1576740741729736, + "learning_rate": 6.543354790018123e-06, + "loss": 0.9913, + "step": 9952 + }, + { + "epoch": 0.6240516646811712, + "grad_norm": 2.9076409339904785, + "learning_rate": 6.541449276881946e-06, + "loss": 1.0409, + "step": 9953 + }, + { + "epoch": 0.6241143645369616, + "grad_norm": 3.1605958938598633, + "learning_rate": 6.539543906376556e-06, + "loss": 1.0056, + "step": 9954 + }, + { + "epoch": 0.6241770643927519, + "grad_norm": 3.200779438018799, + "learning_rate": 6.537638678580523e-06, + "loss": 1.2138, + "step": 9955 + }, + { + "epoch": 0.6242397642485422, + "grad_norm": 3.36742901802063, + "learning_rate": 6.535733593572424e-06, + "loss": 0.9923, + "step": 9956 + }, + { + "epoch": 0.6243024641043325, + "grad_norm": 2.9549314975738525, + "learning_rate": 6.533828651430823e-06, + "loss": 1.1874, + "step": 9957 + }, + { + "epoch": 0.6243651639601229, + "grad_norm": 3.0099925994873047, + "learning_rate": 6.531923852234279e-06, + "loss": 1.2213, + "step": 9958 + }, + { + "epoch": 0.6244278638159132, + "grad_norm": 3.235624313354492, + "learning_rate": 6.5300191960613454e-06, + "loss": 1.0368, + "step": 9959 + }, + { + "epoch": 0.6244905636717035, + "grad_norm": 3.1687655448913574, + "learning_rate": 6.528114682990576e-06, + "loss": 1.2028, + "step": 9960 + }, + { + "epoch": 0.6245532635274939, + "grad_norm": 3.3270089626312256, + "learning_rate": 6.526210313100505e-06, + "loss": 1.1193, + "step": 9961 + }, + { + "epoch": 0.6246159633832842, + "grad_norm": 2.9849328994750977, + "learning_rate": 6.524306086469672e-06, + "loss": 0.918, + "step": 9962 + }, + { + "epoch": 0.6246786632390745, + "grad_norm": 3.1013288497924805, + "learning_rate": 6.52240200317661e-06, + "loss": 0.898, + "step": 9963 + }, + { + "epoch": 0.6247413630948648, + "grad_norm": 3.0582504272460938, + "learning_rate": 6.5204980632998394e-06, + "loss": 1.1717, + "step": 9964 + }, + { + "epoch": 0.6248040629506552, + "grad_norm": 3.0155200958251953, + "learning_rate": 6.518594266917883e-06, + "loss": 1.1077, + "step": 9965 + }, + { + "epoch": 0.6248667628064456, + "grad_norm": 3.143564462661743, + "learning_rate": 6.516690614109248e-06, + "loss": 1.0935, + "step": 9966 + }, + { + "epoch": 0.6249294626622359, + "grad_norm": 3.3090295791625977, + "learning_rate": 6.514787104952447e-06, + "loss": 0.9687, + "step": 9967 + }, + { + "epoch": 0.6249921625180263, + "grad_norm": 3.2262730598449707, + "learning_rate": 6.512883739525976e-06, + "loss": 1.1301, + "step": 9968 + }, + { + "epoch": 0.6250548623738166, + "grad_norm": 3.3662173748016357, + "learning_rate": 6.510980517908334e-06, + "loss": 0.9591, + "step": 9969 + }, + { + "epoch": 0.6251175622296069, + "grad_norm": 3.2971291542053223, + "learning_rate": 6.509077440178004e-06, + "loss": 1.2199, + "step": 9970 + }, + { + "epoch": 0.6251802620853972, + "grad_norm": 3.180277109146118, + "learning_rate": 6.507174506413476e-06, + "loss": 1.1616, + "step": 9971 + }, + { + "epoch": 0.6252429619411876, + "grad_norm": 3.630535125732422, + "learning_rate": 6.505271716693224e-06, + "loss": 1.042, + "step": 9972 + }, + { + "epoch": 0.6253056617969779, + "grad_norm": 3.277214288711548, + "learning_rate": 6.503369071095718e-06, + "loss": 0.9976, + "step": 9973 + }, + { + "epoch": 0.6253683616527682, + "grad_norm": 3.4740474224090576, + "learning_rate": 6.501466569699424e-06, + "loss": 1.0622, + "step": 9974 + }, + { + "epoch": 0.6254310615085585, + "grad_norm": 3.25722336769104, + "learning_rate": 6.499564212582801e-06, + "loss": 1.2719, + "step": 9975 + }, + { + "epoch": 0.6254937613643489, + "grad_norm": 3.3273162841796875, + "learning_rate": 6.497661999824305e-06, + "loss": 1.0743, + "step": 9976 + }, + { + "epoch": 0.6255564612201392, + "grad_norm": 3.5682849884033203, + "learning_rate": 6.495759931502379e-06, + "loss": 1.0066, + "step": 9977 + }, + { + "epoch": 0.6256191610759295, + "grad_norm": 3.07277774810791, + "learning_rate": 6.493858007695468e-06, + "loss": 0.9769, + "step": 9978 + }, + { + "epoch": 0.6256818609317198, + "grad_norm": 2.986232042312622, + "learning_rate": 6.491956228482004e-06, + "loss": 1.0475, + "step": 9979 + }, + { + "epoch": 0.6257445607875102, + "grad_norm": 3.3327858448028564, + "learning_rate": 6.49005459394042e-06, + "loss": 0.8623, + "step": 9980 + }, + { + "epoch": 0.6258072606433005, + "grad_norm": 3.3649044036865234, + "learning_rate": 6.488153104149136e-06, + "loss": 1.2388, + "step": 9981 + }, + { + "epoch": 0.6258699604990908, + "grad_norm": 3.5158097743988037, + "learning_rate": 6.486251759186573e-06, + "loss": 1.143, + "step": 9982 + }, + { + "epoch": 0.6259326603548812, + "grad_norm": 2.931516647338867, + "learning_rate": 6.484350559131141e-06, + "loss": 1.2112, + "step": 9983 + }, + { + "epoch": 0.6259953602106715, + "grad_norm": 2.9923324584960938, + "learning_rate": 6.482449504061243e-06, + "loss": 1.1474, + "step": 9984 + }, + { + "epoch": 0.6260580600664618, + "grad_norm": 3.2151615619659424, + "learning_rate": 6.480548594055285e-06, + "loss": 1.0246, + "step": 9985 + }, + { + "epoch": 0.6261207599222521, + "grad_norm": 3.062201738357544, + "learning_rate": 6.478647829191653e-06, + "loss": 1.1278, + "step": 9986 + }, + { + "epoch": 0.6261834597780425, + "grad_norm": 3.3916358947753906, + "learning_rate": 6.476747209548742e-06, + "loss": 1.0897, + "step": 9987 + }, + { + "epoch": 0.6262461596338328, + "grad_norm": 3.0196433067321777, + "learning_rate": 6.474846735204926e-06, + "loss": 0.9913, + "step": 9988 + }, + { + "epoch": 0.6263088594896232, + "grad_norm": 3.5253138542175293, + "learning_rate": 6.4729464062385875e-06, + "loss": 1.1299, + "step": 9989 + }, + { + "epoch": 0.6263715593454136, + "grad_norm": 3.302816152572632, + "learning_rate": 6.4710462227280914e-06, + "loss": 1.1018, + "step": 9990 + }, + { + "epoch": 0.6264342592012039, + "grad_norm": 3.49981427192688, + "learning_rate": 6.469146184751805e-06, + "loss": 0.9856, + "step": 9991 + }, + { + "epoch": 0.6264969590569942, + "grad_norm": 2.9762375354766846, + "learning_rate": 6.467246292388082e-06, + "loss": 1.1516, + "step": 9992 + }, + { + "epoch": 0.6265596589127845, + "grad_norm": 3.171863079071045, + "learning_rate": 6.4653465457152765e-06, + "loss": 1.1827, + "step": 9993 + }, + { + "epoch": 0.6266223587685749, + "grad_norm": 3.016232967376709, + "learning_rate": 6.463446944811735e-06, + "loss": 1.1791, + "step": 9994 + }, + { + "epoch": 0.6266850586243652, + "grad_norm": 3.2224624156951904, + "learning_rate": 6.461547489755795e-06, + "loss": 1.1193, + "step": 9995 + }, + { + "epoch": 0.6267477584801555, + "grad_norm": 3.2134995460510254, + "learning_rate": 6.459648180625793e-06, + "loss": 1.1384, + "step": 9996 + }, + { + "epoch": 0.6268104583359458, + "grad_norm": 3.1365292072296143, + "learning_rate": 6.457749017500052e-06, + "loss": 1.1367, + "step": 9997 + }, + { + "epoch": 0.6268731581917362, + "grad_norm": 3.4501004219055176, + "learning_rate": 6.455850000456898e-06, + "loss": 1.142, + "step": 9998 + }, + { + "epoch": 0.6269358580475265, + "grad_norm": 3.385406017303467, + "learning_rate": 6.453951129574644e-06, + "loss": 1.1866, + "step": 9999 + }, + { + "epoch": 0.6269985579033168, + "grad_norm": 3.060765504837036, + "learning_rate": 6.452052404931602e-06, + "loss": 1.1423, + "step": 10000 + }, + { + "epoch": 0.6269985579033168, + "eval_loss": 1.1103445291519165, + "eval_runtime": 144.2946, + "eval_samples_per_second": 4.366, + "eval_steps_per_second": 1.095, + "step": 10000 + }, + { + "epoch": 0.6270612577591071, + "grad_norm": 3.315847158432007, + "learning_rate": 6.45015382660607e-06, + "loss": 0.9767, + "step": 10001 + }, + { + "epoch": 0.6271239576148975, + "grad_norm": 3.199017286300659, + "learning_rate": 6.44825539467635e-06, + "loss": 1.0804, + "step": 10002 + }, + { + "epoch": 0.6271866574706878, + "grad_norm": 3.0030736923217773, + "learning_rate": 6.446357109220736e-06, + "loss": 1.3373, + "step": 10003 + }, + { + "epoch": 0.6272493573264781, + "grad_norm": 2.8246052265167236, + "learning_rate": 6.444458970317507e-06, + "loss": 1.106, + "step": 10004 + }, + { + "epoch": 0.6273120571822685, + "grad_norm": 3.2415034770965576, + "learning_rate": 6.4425609780449474e-06, + "loss": 0.9941, + "step": 10005 + }, + { + "epoch": 0.6273747570380588, + "grad_norm": 3.096151113510132, + "learning_rate": 6.440663132481326e-06, + "loss": 1.1006, + "step": 10006 + }, + { + "epoch": 0.6274374568938491, + "grad_norm": 3.117185592651367, + "learning_rate": 6.438765433704916e-06, + "loss": 1.0486, + "step": 10007 + }, + { + "epoch": 0.6275001567496394, + "grad_norm": 3.1183481216430664, + "learning_rate": 6.436867881793971e-06, + "loss": 0.9884, + "step": 10008 + }, + { + "epoch": 0.6275628566054298, + "grad_norm": 3.44386887550354, + "learning_rate": 6.434970476826754e-06, + "loss": 1.3103, + "step": 10009 + }, + { + "epoch": 0.6276255564612201, + "grad_norm": 3.0855231285095215, + "learning_rate": 6.433073218881507e-06, + "loss": 1.1732, + "step": 10010 + }, + { + "epoch": 0.6276882563170104, + "grad_norm": 3.267577648162842, + "learning_rate": 6.431176108036479e-06, + "loss": 1.1361, + "step": 10011 + }, + { + "epoch": 0.6277509561728009, + "grad_norm": 3.0166373252868652, + "learning_rate": 6.429279144369901e-06, + "loss": 1.0266, + "step": 10012 + }, + { + "epoch": 0.6278136560285912, + "grad_norm": 3.3111681938171387, + "learning_rate": 6.427382327960008e-06, + "loss": 1.2149, + "step": 10013 + }, + { + "epoch": 0.6278763558843815, + "grad_norm": 3.137843370437622, + "learning_rate": 6.425485658885026e-06, + "loss": 1.1125, + "step": 10014 + }, + { + "epoch": 0.6279390557401718, + "grad_norm": 3.569352388381958, + "learning_rate": 6.423589137223168e-06, + "loss": 0.9202, + "step": 10015 + }, + { + "epoch": 0.6280017555959622, + "grad_norm": 2.9221951961517334, + "learning_rate": 6.421692763052654e-06, + "loss": 1.2236, + "step": 10016 + }, + { + "epoch": 0.6280644554517525, + "grad_norm": 3.3758952617645264, + "learning_rate": 6.419796536451684e-06, + "loss": 0.9965, + "step": 10017 + }, + { + "epoch": 0.6281271553075428, + "grad_norm": 2.988309144973755, + "learning_rate": 6.417900457498463e-06, + "loss": 1.1002, + "step": 10018 + }, + { + "epoch": 0.6281898551633331, + "grad_norm": 2.928121566772461, + "learning_rate": 6.416004526271182e-06, + "loss": 1.173, + "step": 10019 + }, + { + "epoch": 0.6282525550191235, + "grad_norm": 3.0027291774749756, + "learning_rate": 6.414108742848032e-06, + "loss": 1.1977, + "step": 10020 + }, + { + "epoch": 0.6283152548749138, + "grad_norm": 3.143127679824829, + "learning_rate": 6.412213107307192e-06, + "loss": 0.9652, + "step": 10021 + }, + { + "epoch": 0.6283779547307041, + "grad_norm": 3.5940027236938477, + "learning_rate": 6.410317619726843e-06, + "loss": 1.0163, + "step": 10022 + }, + { + "epoch": 0.6284406545864945, + "grad_norm": 3.4347076416015625, + "learning_rate": 6.40842228018515e-06, + "loss": 1.0062, + "step": 10023 + }, + { + "epoch": 0.6285033544422848, + "grad_norm": 2.958139181137085, + "learning_rate": 6.406527088760279e-06, + "loss": 1.1758, + "step": 10024 + }, + { + "epoch": 0.6285660542980751, + "grad_norm": 3.0496506690979004, + "learning_rate": 6.40463204553039e-06, + "loss": 1.2502, + "step": 10025 + }, + { + "epoch": 0.6286287541538654, + "grad_norm": 3.047802686691284, + "learning_rate": 6.40273715057363e-06, + "loss": 0.9418, + "step": 10026 + }, + { + "epoch": 0.6286914540096558, + "grad_norm": 3.3057734966278076, + "learning_rate": 6.400842403968148e-06, + "loss": 0.9625, + "step": 10027 + }, + { + "epoch": 0.6287541538654461, + "grad_norm": 3.313215732574463, + "learning_rate": 6.398947805792082e-06, + "loss": 1.0157, + "step": 10028 + }, + { + "epoch": 0.6288168537212364, + "grad_norm": 3.31141996383667, + "learning_rate": 6.3970533561235685e-06, + "loss": 1.0576, + "step": 10029 + }, + { + "epoch": 0.6288795535770267, + "grad_norm": 3.4703919887542725, + "learning_rate": 6.3951590550407275e-06, + "loss": 1.1293, + "step": 10030 + }, + { + "epoch": 0.6289422534328171, + "grad_norm": 3.1458377838134766, + "learning_rate": 6.393264902621688e-06, + "loss": 0.9975, + "step": 10031 + }, + { + "epoch": 0.6290049532886074, + "grad_norm": 3.07254695892334, + "learning_rate": 6.391370898944559e-06, + "loss": 1.1483, + "step": 10032 + }, + { + "epoch": 0.6290676531443977, + "grad_norm": 2.9179680347442627, + "learning_rate": 6.3894770440874525e-06, + "loss": 1.0635, + "step": 10033 + }, + { + "epoch": 0.629130353000188, + "grad_norm": 3.2694501876831055, + "learning_rate": 6.387583338128471e-06, + "loss": 1.1157, + "step": 10034 + }, + { + "epoch": 0.6291930528559785, + "grad_norm": 3.1735637187957764, + "learning_rate": 6.38568978114571e-06, + "loss": 1.1145, + "step": 10035 + }, + { + "epoch": 0.6292557527117688, + "grad_norm": 3.104574680328369, + "learning_rate": 6.3837963732172615e-06, + "loss": 1.2013, + "step": 10036 + }, + { + "epoch": 0.6293184525675591, + "grad_norm": 3.097115993499756, + "learning_rate": 6.3819031144212065e-06, + "loss": 1.0282, + "step": 10037 + }, + { + "epoch": 0.6293811524233495, + "grad_norm": 3.4054291248321533, + "learning_rate": 6.380010004835626e-06, + "loss": 1.0837, + "step": 10038 + }, + { + "epoch": 0.6294438522791398, + "grad_norm": 3.232577085494995, + "learning_rate": 6.37811704453859e-06, + "loss": 1.116, + "step": 10039 + }, + { + "epoch": 0.6295065521349301, + "grad_norm": 3.2914655208587646, + "learning_rate": 6.376224233608167e-06, + "loss": 1.2465, + "step": 10040 + }, + { + "epoch": 0.6295692519907204, + "grad_norm": 3.3476932048797607, + "learning_rate": 6.374331572122413e-06, + "loss": 1.044, + "step": 10041 + }, + { + "epoch": 0.6296319518465108, + "grad_norm": 2.8422060012817383, + "learning_rate": 6.372439060159385e-06, + "loss": 1.1836, + "step": 10042 + }, + { + "epoch": 0.6296946517023011, + "grad_norm": 3.61954927444458, + "learning_rate": 6.370546697797126e-06, + "loss": 0.9918, + "step": 10043 + }, + { + "epoch": 0.6297573515580914, + "grad_norm": 3.012073040008545, + "learning_rate": 6.368654485113681e-06, + "loss": 1.1265, + "step": 10044 + }, + { + "epoch": 0.6298200514138818, + "grad_norm": 3.1665403842926025, + "learning_rate": 6.366762422187086e-06, + "loss": 1.0268, + "step": 10045 + }, + { + "epoch": 0.6298827512696721, + "grad_norm": 3.398782253265381, + "learning_rate": 6.364870509095364e-06, + "loss": 0.9089, + "step": 10046 + }, + { + "epoch": 0.6299454511254624, + "grad_norm": 2.9247119426727295, + "learning_rate": 6.362978745916544e-06, + "loss": 1.0618, + "step": 10047 + }, + { + "epoch": 0.6300081509812527, + "grad_norm": 3.2375965118408203, + "learning_rate": 6.361087132728636e-06, + "loss": 1.1281, + "step": 10048 + }, + { + "epoch": 0.6300708508370431, + "grad_norm": 3.293884038925171, + "learning_rate": 6.3591956696096565e-06, + "loss": 1.0272, + "step": 10049 + }, + { + "epoch": 0.6301335506928334, + "grad_norm": 3.359851360321045, + "learning_rate": 6.357304356637606e-06, + "loss": 1.1576, + "step": 10050 + }, + { + "epoch": 0.6301962505486237, + "grad_norm": 3.210099458694458, + "learning_rate": 6.355413193890482e-06, + "loss": 1.2104, + "step": 10051 + }, + { + "epoch": 0.630258950404414, + "grad_norm": 3.1633803844451904, + "learning_rate": 6.353522181446276e-06, + "loss": 1.152, + "step": 10052 + }, + { + "epoch": 0.6303216502602044, + "grad_norm": 3.2498884201049805, + "learning_rate": 6.351631319382976e-06, + "loss": 1.1329, + "step": 10053 + }, + { + "epoch": 0.6303843501159947, + "grad_norm": 2.9411001205444336, + "learning_rate": 6.3497406077785605e-06, + "loss": 1.1716, + "step": 10054 + }, + { + "epoch": 0.630447049971785, + "grad_norm": 3.2200331687927246, + "learning_rate": 6.3478500467109995e-06, + "loss": 0.951, + "step": 10055 + }, + { + "epoch": 0.6305097498275753, + "grad_norm": 3.218526601791382, + "learning_rate": 6.345959636258265e-06, + "loss": 1.0892, + "step": 10056 + }, + { + "epoch": 0.6305724496833657, + "grad_norm": 3.531299352645874, + "learning_rate": 6.344069376498312e-06, + "loss": 0.9078, + "step": 10057 + }, + { + "epoch": 0.6306351495391561, + "grad_norm": 3.729872465133667, + "learning_rate": 6.3421792675091e-06, + "loss": 1.0643, + "step": 10058 + }, + { + "epoch": 0.6306978493949464, + "grad_norm": 3.0595755577087402, + "learning_rate": 6.340289309368572e-06, + "loss": 0.9898, + "step": 10059 + }, + { + "epoch": 0.6307605492507368, + "grad_norm": 3.771362543106079, + "learning_rate": 6.338399502154675e-06, + "loss": 0.8716, + "step": 10060 + }, + { + "epoch": 0.6308232491065271, + "grad_norm": 3.2304298877716064, + "learning_rate": 6.3365098459453416e-06, + "loss": 0.989, + "step": 10061 + }, + { + "epoch": 0.6308859489623174, + "grad_norm": 3.362837553024292, + "learning_rate": 6.334620340818502e-06, + "loss": 1.0814, + "step": 10062 + }, + { + "epoch": 0.6309486488181077, + "grad_norm": 3.296299695968628, + "learning_rate": 6.3327309868520795e-06, + "loss": 0.9347, + "step": 10063 + }, + { + "epoch": 0.6310113486738981, + "grad_norm": 3.398212194442749, + "learning_rate": 6.33084178412399e-06, + "loss": 1.0829, + "step": 10064 + }, + { + "epoch": 0.6310740485296884, + "grad_norm": 3.6553516387939453, + "learning_rate": 6.328952732712148e-06, + "loss": 1.0669, + "step": 10065 + }, + { + "epoch": 0.6311367483854787, + "grad_norm": 3.373263120651245, + "learning_rate": 6.3270638326944536e-06, + "loss": 1.1855, + "step": 10066 + }, + { + "epoch": 0.6311994482412691, + "grad_norm": 3.302827835083008, + "learning_rate": 6.325175084148809e-06, + "loss": 0.9645, + "step": 10067 + }, + { + "epoch": 0.6312621480970594, + "grad_norm": 3.515052556991577, + "learning_rate": 6.323286487153103e-06, + "loss": 1.2624, + "step": 10068 + }, + { + "epoch": 0.6313248479528497, + "grad_norm": 3.5124549865722656, + "learning_rate": 6.321398041785225e-06, + "loss": 1.0754, + "step": 10069 + }, + { + "epoch": 0.63138754780864, + "grad_norm": 3.1665608882904053, + "learning_rate": 6.31950974812305e-06, + "loss": 0.9702, + "step": 10070 + }, + { + "epoch": 0.6314502476644304, + "grad_norm": 2.8975207805633545, + "learning_rate": 6.317621606244455e-06, + "loss": 1.1663, + "step": 10071 + }, + { + "epoch": 0.6315129475202207, + "grad_norm": 3.3559067249298096, + "learning_rate": 6.315733616227304e-06, + "loss": 1.0119, + "step": 10072 + }, + { + "epoch": 0.631575647376011, + "grad_norm": 3.4074344635009766, + "learning_rate": 6.313845778149461e-06, + "loss": 1.1003, + "step": 10073 + }, + { + "epoch": 0.6316383472318013, + "grad_norm": 3.583678722381592, + "learning_rate": 6.311958092088779e-06, + "loss": 1.0146, + "step": 10074 + }, + { + "epoch": 0.6317010470875917, + "grad_norm": 2.9555764198303223, + "learning_rate": 6.3100705581231045e-06, + "loss": 1.1796, + "step": 10075 + }, + { + "epoch": 0.631763746943382, + "grad_norm": 3.545027494430542, + "learning_rate": 6.308183176330284e-06, + "loss": 0.9313, + "step": 10076 + }, + { + "epoch": 0.6318264467991723, + "grad_norm": 3.405585289001465, + "learning_rate": 6.306295946788149e-06, + "loss": 1.0167, + "step": 10077 + }, + { + "epoch": 0.6318891466549627, + "grad_norm": 3.0592801570892334, + "learning_rate": 6.304408869574532e-06, + "loss": 1.1917, + "step": 10078 + }, + { + "epoch": 0.631951846510753, + "grad_norm": 3.037893295288086, + "learning_rate": 6.302521944767253e-06, + "loss": 1.1399, + "step": 10079 + }, + { + "epoch": 0.6320145463665433, + "grad_norm": 3.2228946685791016, + "learning_rate": 6.300635172444132e-06, + "loss": 1.0495, + "step": 10080 + }, + { + "epoch": 0.6320772462223337, + "grad_norm": 3.527369737625122, + "learning_rate": 6.2987485526829765e-06, + "loss": 1.188, + "step": 10081 + }, + { + "epoch": 0.6321399460781241, + "grad_norm": 3.2865376472473145, + "learning_rate": 6.296862085561595e-06, + "loss": 1.0766, + "step": 10082 + }, + { + "epoch": 0.6322026459339144, + "grad_norm": 3.256059408187866, + "learning_rate": 6.294975771157779e-06, + "loss": 1.0941, + "step": 10083 + }, + { + "epoch": 0.6322653457897047, + "grad_norm": 2.868054151535034, + "learning_rate": 6.293089609549325e-06, + "loss": 0.9212, + "step": 10084 + }, + { + "epoch": 0.632328045645495, + "grad_norm": 2.9278342723846436, + "learning_rate": 6.2912036008140185e-06, + "loss": 1.128, + "step": 10085 + }, + { + "epoch": 0.6323907455012854, + "grad_norm": 3.9027180671691895, + "learning_rate": 6.289317745029637e-06, + "loss": 1.1265, + "step": 10086 + }, + { + "epoch": 0.6324534453570757, + "grad_norm": 3.285010814666748, + "learning_rate": 6.287432042273954e-06, + "loss": 1.0651, + "step": 10087 + }, + { + "epoch": 0.632516145212866, + "grad_norm": 3.3660495281219482, + "learning_rate": 6.285546492624734e-06, + "loss": 1.1743, + "step": 10088 + }, + { + "epoch": 0.6325788450686564, + "grad_norm": 2.96510648727417, + "learning_rate": 6.283661096159744e-06, + "loss": 1.1698, + "step": 10089 + }, + { + "epoch": 0.6326415449244467, + "grad_norm": 3.251950979232788, + "learning_rate": 6.281775852956726e-06, + "loss": 1.0105, + "step": 10090 + }, + { + "epoch": 0.632704244780237, + "grad_norm": 3.4252941608428955, + "learning_rate": 6.279890763093439e-06, + "loss": 1.1558, + "step": 10091 + }, + { + "epoch": 0.6327669446360273, + "grad_norm": 3.3513271808624268, + "learning_rate": 6.278005826647617e-06, + "loss": 1.1692, + "step": 10092 + }, + { + "epoch": 0.6328296444918177, + "grad_norm": 3.4616284370422363, + "learning_rate": 6.276121043696999e-06, + "loss": 1.0676, + "step": 10093 + }, + { + "epoch": 0.632892344347608, + "grad_norm": 3.0359888076782227, + "learning_rate": 6.27423641431931e-06, + "loss": 1.0132, + "step": 10094 + }, + { + "epoch": 0.6329550442033983, + "grad_norm": 3.411609411239624, + "learning_rate": 6.272351938592274e-06, + "loss": 1.1629, + "step": 10095 + }, + { + "epoch": 0.6330177440591886, + "grad_norm": 3.3363308906555176, + "learning_rate": 6.270467616593608e-06, + "loss": 1.1395, + "step": 10096 + }, + { + "epoch": 0.633080443914979, + "grad_norm": 3.1853604316711426, + "learning_rate": 6.26858344840102e-06, + "loss": 1.1012, + "step": 10097 + }, + { + "epoch": 0.6331431437707693, + "grad_norm": 3.1881051063537598, + "learning_rate": 6.266699434092214e-06, + "loss": 1.2827, + "step": 10098 + }, + { + "epoch": 0.6332058436265596, + "grad_norm": 3.419293165206909, + "learning_rate": 6.264815573744884e-06, + "loss": 1.1766, + "step": 10099 + }, + { + "epoch": 0.63326854348235, + "grad_norm": 3.1379780769348145, + "learning_rate": 6.262931867436725e-06, + "loss": 1.1307, + "step": 10100 + }, + { + "epoch": 0.6333312433381403, + "grad_norm": 3.232933521270752, + "learning_rate": 6.261048315245419e-06, + "loss": 1.0224, + "step": 10101 + }, + { + "epoch": 0.6333939431939306, + "grad_norm": 3.224032402038574, + "learning_rate": 6.259164917248645e-06, + "loss": 1.2738, + "step": 10102 + }, + { + "epoch": 0.6334566430497209, + "grad_norm": 3.1940042972564697, + "learning_rate": 6.2572816735240696e-06, + "loss": 1.2077, + "step": 10103 + }, + { + "epoch": 0.6335193429055113, + "grad_norm": 3.253397226333618, + "learning_rate": 6.255398584149366e-06, + "loss": 1.0544, + "step": 10104 + }, + { + "epoch": 0.6335820427613017, + "grad_norm": 3.0733299255371094, + "learning_rate": 6.253515649202184e-06, + "loss": 1.1474, + "step": 10105 + }, + { + "epoch": 0.633644742617092, + "grad_norm": 3.116682767868042, + "learning_rate": 6.2516328687601805e-06, + "loss": 1.1779, + "step": 10106 + }, + { + "epoch": 0.6337074424728824, + "grad_norm": 3.0536396503448486, + "learning_rate": 6.249750242901005e-06, + "loss": 1.0219, + "step": 10107 + }, + { + "epoch": 0.6337701423286727, + "grad_norm": 3.381244421005249, + "learning_rate": 6.2478677717022894e-06, + "loss": 1.0945, + "step": 10108 + }, + { + "epoch": 0.633832842184463, + "grad_norm": 3.005819320678711, + "learning_rate": 6.245985455241673e-06, + "loss": 1.0697, + "step": 10109 + }, + { + "epoch": 0.6338955420402533, + "grad_norm": 2.9674487113952637, + "learning_rate": 6.24410329359678e-06, + "loss": 1.1434, + "step": 10110 + }, + { + "epoch": 0.6339582418960437, + "grad_norm": 3.3498988151550293, + "learning_rate": 6.24222128684523e-06, + "loss": 1.0923, + "step": 10111 + }, + { + "epoch": 0.634020941751834, + "grad_norm": 3.4237260818481445, + "learning_rate": 6.240339435064638e-06, + "loss": 1.1839, + "step": 10112 + }, + { + "epoch": 0.6340836416076243, + "grad_norm": 3.0963315963745117, + "learning_rate": 6.238457738332615e-06, + "loss": 1.1501, + "step": 10113 + }, + { + "epoch": 0.6341463414634146, + "grad_norm": 2.8107240200042725, + "learning_rate": 6.236576196726756e-06, + "loss": 1.2252, + "step": 10114 + }, + { + "epoch": 0.634209041319205, + "grad_norm": 3.2799136638641357, + "learning_rate": 6.2346948103246595e-06, + "loss": 1.0995, + "step": 10115 + }, + { + "epoch": 0.6342717411749953, + "grad_norm": 2.8796820640563965, + "learning_rate": 6.232813579203915e-06, + "loss": 1.0974, + "step": 10116 + }, + { + "epoch": 0.6343344410307856, + "grad_norm": 3.2277004718780518, + "learning_rate": 6.230932503442101e-06, + "loss": 1.1488, + "step": 10117 + }, + { + "epoch": 0.634397140886576, + "grad_norm": 3.0904202461242676, + "learning_rate": 6.229051583116796e-06, + "loss": 1.1757, + "step": 10118 + }, + { + "epoch": 0.6344598407423663, + "grad_norm": 3.360837936401367, + "learning_rate": 6.227170818305568e-06, + "loss": 1.116, + "step": 10119 + }, + { + "epoch": 0.6345225405981566, + "grad_norm": 3.530846357345581, + "learning_rate": 6.225290209085982e-06, + "loss": 1.1294, + "step": 10120 + }, + { + "epoch": 0.6345852404539469, + "grad_norm": 3.030189037322998, + "learning_rate": 6.223409755535591e-06, + "loss": 1.0334, + "step": 10121 + }, + { + "epoch": 0.6346479403097373, + "grad_norm": 3.078890562057495, + "learning_rate": 6.2215294577319475e-06, + "loss": 0.9336, + "step": 10122 + }, + { + "epoch": 0.6347106401655276, + "grad_norm": 3.2626407146453857, + "learning_rate": 6.219649315752594e-06, + "loss": 1.0836, + "step": 10123 + }, + { + "epoch": 0.6347733400213179, + "grad_norm": 3.2196359634399414, + "learning_rate": 6.2177693296750675e-06, + "loss": 1.2995, + "step": 10124 + }, + { + "epoch": 0.6348360398771082, + "grad_norm": 3.4261205196380615, + "learning_rate": 6.215889499576898e-06, + "loss": 1.0372, + "step": 10125 + }, + { + "epoch": 0.6348987397328986, + "grad_norm": 2.99908447265625, + "learning_rate": 6.214009825535612e-06, + "loss": 1.0645, + "step": 10126 + }, + { + "epoch": 0.6349614395886889, + "grad_norm": 3.3014307022094727, + "learning_rate": 6.212130307628726e-06, + "loss": 1.2398, + "step": 10127 + }, + { + "epoch": 0.6350241394444793, + "grad_norm": 3.378509998321533, + "learning_rate": 6.210250945933751e-06, + "loss": 0.9979, + "step": 10128 + }, + { + "epoch": 0.6350868393002697, + "grad_norm": 3.5456855297088623, + "learning_rate": 6.208371740528194e-06, + "loss": 1.1287, + "step": 10129 + }, + { + "epoch": 0.63514953915606, + "grad_norm": 3.6691982746124268, + "learning_rate": 6.20649269148955e-06, + "loss": 1.1927, + "step": 10130 + }, + { + "epoch": 0.6352122390118503, + "grad_norm": 3.024803876876831, + "learning_rate": 6.204613798895315e-06, + "loss": 0.9691, + "step": 10131 + }, + { + "epoch": 0.6352749388676406, + "grad_norm": 3.5145201683044434, + "learning_rate": 6.202735062822971e-06, + "loss": 1.2154, + "step": 10132 + }, + { + "epoch": 0.635337638723431, + "grad_norm": 3.278595209121704, + "learning_rate": 6.20085648335e-06, + "loss": 1.1247, + "step": 10133 + }, + { + "epoch": 0.6354003385792213, + "grad_norm": 3.0920143127441406, + "learning_rate": 6.198978060553873e-06, + "loss": 1.021, + "step": 10134 + }, + { + "epoch": 0.6354630384350116, + "grad_norm": 2.9054062366485596, + "learning_rate": 6.197099794512056e-06, + "loss": 1.1927, + "step": 10135 + }, + { + "epoch": 0.635525738290802, + "grad_norm": 3.144688606262207, + "learning_rate": 6.195221685302011e-06, + "loss": 1.1355, + "step": 10136 + }, + { + "epoch": 0.6355884381465923, + "grad_norm": 3.234285593032837, + "learning_rate": 6.193343733001188e-06, + "loss": 0.8837, + "step": 10137 + }, + { + "epoch": 0.6356511380023826, + "grad_norm": 3.383957624435425, + "learning_rate": 6.191465937687038e-06, + "loss": 1.1295, + "step": 10138 + }, + { + "epoch": 0.6357138378581729, + "grad_norm": 2.909234046936035, + "learning_rate": 6.189588299436997e-06, + "loss": 0.9917, + "step": 10139 + }, + { + "epoch": 0.6357765377139633, + "grad_norm": 3.100818395614624, + "learning_rate": 6.187710818328503e-06, + "loss": 0.987, + "step": 10140 + }, + { + "epoch": 0.6358392375697536, + "grad_norm": 3.127135753631592, + "learning_rate": 6.18583349443898e-06, + "loss": 0.9871, + "step": 10141 + }, + { + "epoch": 0.6359019374255439, + "grad_norm": 3.1789350509643555, + "learning_rate": 6.1839563278458506e-06, + "loss": 1.1411, + "step": 10142 + }, + { + "epoch": 0.6359646372813342, + "grad_norm": 3.487429141998291, + "learning_rate": 6.1820793186265285e-06, + "loss": 0.9499, + "step": 10143 + }, + { + "epoch": 0.6360273371371246, + "grad_norm": 3.385505199432373, + "learning_rate": 6.180202466858423e-06, + "loss": 1.2304, + "step": 10144 + }, + { + "epoch": 0.6360900369929149, + "grad_norm": 3.220308303833008, + "learning_rate": 6.178325772618933e-06, + "loss": 1.1464, + "step": 10145 + }, + { + "epoch": 0.6361527368487052, + "grad_norm": 3.090691089630127, + "learning_rate": 6.176449235985456e-06, + "loss": 1.1148, + "step": 10146 + }, + { + "epoch": 0.6362154367044955, + "grad_norm": 3.0201141834259033, + "learning_rate": 6.174572857035379e-06, + "loss": 1.0207, + "step": 10147 + }, + { + "epoch": 0.6362781365602859, + "grad_norm": 3.364670753479004, + "learning_rate": 6.172696635846085e-06, + "loss": 1.0647, + "step": 10148 + }, + { + "epoch": 0.6363408364160762, + "grad_norm": 3.2944412231445312, + "learning_rate": 6.170820572494948e-06, + "loss": 1.073, + "step": 10149 + }, + { + "epoch": 0.6364035362718665, + "grad_norm": 3.404794931411743, + "learning_rate": 6.168944667059338e-06, + "loss": 1.2159, + "step": 10150 + }, + { + "epoch": 0.636466236127657, + "grad_norm": 3.235642194747925, + "learning_rate": 6.167068919616619e-06, + "loss": 1.0013, + "step": 10151 + }, + { + "epoch": 0.6365289359834473, + "grad_norm": 3.5383567810058594, + "learning_rate": 6.165193330244144e-06, + "loss": 1.1065, + "step": 10152 + }, + { + "epoch": 0.6365916358392376, + "grad_norm": 3.121166706085205, + "learning_rate": 6.163317899019263e-06, + "loss": 1.0868, + "step": 10153 + }, + { + "epoch": 0.6366543356950279, + "grad_norm": 3.19498348236084, + "learning_rate": 6.161442626019319e-06, + "loss": 1.0577, + "step": 10154 + }, + { + "epoch": 0.6367170355508183, + "grad_norm": 3.381574869155884, + "learning_rate": 6.159567511321651e-06, + "loss": 0.973, + "step": 10155 + }, + { + "epoch": 0.6367797354066086, + "grad_norm": 2.951184034347534, + "learning_rate": 6.157692555003585e-06, + "loss": 1.161, + "step": 10156 + }, + { + "epoch": 0.6368424352623989, + "grad_norm": 3.28328537940979, + "learning_rate": 6.155817757142444e-06, + "loss": 1.1414, + "step": 10157 + }, + { + "epoch": 0.6369051351181892, + "grad_norm": 3.3195760250091553, + "learning_rate": 6.1539431178155505e-06, + "loss": 1.1933, + "step": 10158 + }, + { + "epoch": 0.6369678349739796, + "grad_norm": 3.2926595211029053, + "learning_rate": 6.152068637100208e-06, + "loss": 1.295, + "step": 10159 + }, + { + "epoch": 0.6370305348297699, + "grad_norm": 2.966952323913574, + "learning_rate": 6.150194315073724e-06, + "loss": 1.1521, + "step": 10160 + }, + { + "epoch": 0.6370932346855602, + "grad_norm": 3.0852560997009277, + "learning_rate": 6.148320151813394e-06, + "loss": 1.2894, + "step": 10161 + }, + { + "epoch": 0.6371559345413506, + "grad_norm": 3.7112691402435303, + "learning_rate": 6.14644614739651e-06, + "loss": 1.0364, + "step": 10162 + }, + { + "epoch": 0.6372186343971409, + "grad_norm": 3.2338812351226807, + "learning_rate": 6.1445723019003535e-06, + "loss": 1.1298, + "step": 10163 + }, + { + "epoch": 0.6372813342529312, + "grad_norm": 3.7342402935028076, + "learning_rate": 6.142698615402205e-06, + "loss": 1.2119, + "step": 10164 + }, + { + "epoch": 0.6373440341087215, + "grad_norm": 3.0255935192108154, + "learning_rate": 6.140825087979334e-06, + "loss": 1.175, + "step": 10165 + }, + { + "epoch": 0.6374067339645119, + "grad_norm": 3.4019734859466553, + "learning_rate": 6.138951719709003e-06, + "loss": 1.0005, + "step": 10166 + }, + { + "epoch": 0.6374694338203022, + "grad_norm": 3.1139700412750244, + "learning_rate": 6.137078510668475e-06, + "loss": 1.0532, + "step": 10167 + }, + { + "epoch": 0.6375321336760925, + "grad_norm": 2.914983034133911, + "learning_rate": 6.135205460934996e-06, + "loss": 1.2476, + "step": 10168 + }, + { + "epoch": 0.6375948335318828, + "grad_norm": 3.4585180282592773, + "learning_rate": 6.133332570585813e-06, + "loss": 1.0933, + "step": 10169 + }, + { + "epoch": 0.6376575333876732, + "grad_norm": 3.4381203651428223, + "learning_rate": 6.131459839698163e-06, + "loss": 1.1492, + "step": 10170 + }, + { + "epoch": 0.6377202332434635, + "grad_norm": 3.0838394165039062, + "learning_rate": 6.12958726834928e-06, + "loss": 1.1183, + "step": 10171 + }, + { + "epoch": 0.6377829330992538, + "grad_norm": 3.1407582759857178, + "learning_rate": 6.1277148566163845e-06, + "loss": 1.2847, + "step": 10172 + }, + { + "epoch": 0.6378456329550442, + "grad_norm": 3.153144121170044, + "learning_rate": 6.1258426045767e-06, + "loss": 1.067, + "step": 10173 + }, + { + "epoch": 0.6379083328108346, + "grad_norm": 3.721931219100952, + "learning_rate": 6.123970512307433e-06, + "loss": 0.9197, + "step": 10174 + }, + { + "epoch": 0.6379710326666249, + "grad_norm": 3.1531002521514893, + "learning_rate": 6.1220985798857934e-06, + "loss": 0.99, + "step": 10175 + }, + { + "epoch": 0.6380337325224152, + "grad_norm": 3.316289186477661, + "learning_rate": 6.120226807388976e-06, + "loss": 1.0741, + "step": 10176 + }, + { + "epoch": 0.6380964323782056, + "grad_norm": 3.091071605682373, + "learning_rate": 6.118355194894174e-06, + "loss": 1.1142, + "step": 10177 + }, + { + "epoch": 0.6381591322339959, + "grad_norm": 3.2504844665527344, + "learning_rate": 6.1164837424785765e-06, + "loss": 1.1665, + "step": 10178 + }, + { + "epoch": 0.6382218320897862, + "grad_norm": 3.319690227508545, + "learning_rate": 6.114612450219356e-06, + "loss": 1.085, + "step": 10179 + }, + { + "epoch": 0.6382845319455765, + "grad_norm": 3.136791467666626, + "learning_rate": 6.11274131819369e-06, + "loss": 1.0922, + "step": 10180 + }, + { + "epoch": 0.6383472318013669, + "grad_norm": 2.9714975357055664, + "learning_rate": 6.11087034647874e-06, + "loss": 1.1407, + "step": 10181 + }, + { + "epoch": 0.6384099316571572, + "grad_norm": 2.9717888832092285, + "learning_rate": 6.10899953515167e-06, + "loss": 1.1624, + "step": 10182 + }, + { + "epoch": 0.6384726315129475, + "grad_norm": 3.3909518718719482, + "learning_rate": 6.107128884289625e-06, + "loss": 1.0586, + "step": 10183 + }, + { + "epoch": 0.6385353313687379, + "grad_norm": 3.241807222366333, + "learning_rate": 6.105258393969757e-06, + "loss": 1.118, + "step": 10184 + }, + { + "epoch": 0.6385980312245282, + "grad_norm": 3.123690605163574, + "learning_rate": 6.103388064269202e-06, + "loss": 1.0293, + "step": 10185 + }, + { + "epoch": 0.6386607310803185, + "grad_norm": 3.461704969406128, + "learning_rate": 6.101517895265094e-06, + "loss": 1.2154, + "step": 10186 + }, + { + "epoch": 0.6387234309361088, + "grad_norm": 3.4866268634796143, + "learning_rate": 6.099647887034558e-06, + "loss": 1.035, + "step": 10187 + }, + { + "epoch": 0.6387861307918992, + "grad_norm": 3.163069009780884, + "learning_rate": 6.0977780396547115e-06, + "loss": 1.1847, + "step": 10188 + }, + { + "epoch": 0.6388488306476895, + "grad_norm": 3.2428221702575684, + "learning_rate": 6.0959083532026735e-06, + "loss": 0.9915, + "step": 10189 + }, + { + "epoch": 0.6389115305034798, + "grad_norm": 3.809239387512207, + "learning_rate": 6.0940388277555415e-06, + "loss": 1.0352, + "step": 10190 + }, + { + "epoch": 0.6389742303592701, + "grad_norm": 3.158332586288452, + "learning_rate": 6.09216946339042e-06, + "loss": 0.9937, + "step": 10191 + }, + { + "epoch": 0.6390369302150605, + "grad_norm": 3.74198317527771, + "learning_rate": 6.0903002601844e-06, + "loss": 1.1668, + "step": 10192 + }, + { + "epoch": 0.6390996300708508, + "grad_norm": 3.1735308170318604, + "learning_rate": 6.088431218214571e-06, + "loss": 1.0185, + "step": 10193 + }, + { + "epoch": 0.6391623299266411, + "grad_norm": 3.5373892784118652, + "learning_rate": 6.086562337558006e-06, + "loss": 1.0169, + "step": 10194 + }, + { + "epoch": 0.6392250297824315, + "grad_norm": 3.28214955329895, + "learning_rate": 6.084693618291784e-06, + "loss": 1.0812, + "step": 10195 + }, + { + "epoch": 0.6392877296382218, + "grad_norm": 3.3823492527008057, + "learning_rate": 6.082825060492965e-06, + "loss": 1.0594, + "step": 10196 + }, + { + "epoch": 0.6393504294940122, + "grad_norm": 2.988968849182129, + "learning_rate": 6.080956664238613e-06, + "loss": 1.172, + "step": 10197 + }, + { + "epoch": 0.6394131293498025, + "grad_norm": 3.029355525970459, + "learning_rate": 6.0790884296057796e-06, + "loss": 1.1108, + "step": 10198 + }, + { + "epoch": 0.6394758292055929, + "grad_norm": 2.94805645942688, + "learning_rate": 6.07722035667151e-06, + "loss": 1.1409, + "step": 10199 + }, + { + "epoch": 0.6395385290613832, + "grad_norm": 3.3491084575653076, + "learning_rate": 6.075352445512845e-06, + "loss": 0.9274, + "step": 10200 + }, + { + "epoch": 0.6396012289171735, + "grad_norm": 3.1453943252563477, + "learning_rate": 6.073484696206815e-06, + "loss": 1.2353, + "step": 10201 + }, + { + "epoch": 0.6396639287729639, + "grad_norm": 3.2617626190185547, + "learning_rate": 6.07161710883045e-06, + "loss": 1.2677, + "step": 10202 + }, + { + "epoch": 0.6397266286287542, + "grad_norm": 3.3048479557037354, + "learning_rate": 6.069749683460765e-06, + "loss": 1.2222, + "step": 10203 + }, + { + "epoch": 0.6397893284845445, + "grad_norm": 3.167675733566284, + "learning_rate": 6.067882420174777e-06, + "loss": 1.2439, + "step": 10204 + }, + { + "epoch": 0.6398520283403348, + "grad_norm": 3.18033766746521, + "learning_rate": 6.0660153190494864e-06, + "loss": 1.1578, + "step": 10205 + }, + { + "epoch": 0.6399147281961252, + "grad_norm": 3.1588058471679688, + "learning_rate": 6.064148380161898e-06, + "loss": 1.0014, + "step": 10206 + }, + { + "epoch": 0.6399774280519155, + "grad_norm": 3.014369010925293, + "learning_rate": 6.062281603589002e-06, + "loss": 1.07, + "step": 10207 + }, + { + "epoch": 0.6400401279077058, + "grad_norm": 3.485145092010498, + "learning_rate": 6.060414989407783e-06, + "loss": 0.9982, + "step": 10208 + }, + { + "epoch": 0.6401028277634961, + "grad_norm": 3.0532419681549072, + "learning_rate": 6.058548537695225e-06, + "loss": 1.1307, + "step": 10209 + }, + { + "epoch": 0.6401655276192865, + "grad_norm": 3.1913483142852783, + "learning_rate": 6.0566822485282935e-06, + "loss": 1.1069, + "step": 10210 + }, + { + "epoch": 0.6402282274750768, + "grad_norm": 2.973536491394043, + "learning_rate": 6.054816121983962e-06, + "loss": 1.1814, + "step": 10211 + }, + { + "epoch": 0.6402909273308671, + "grad_norm": 3.2033679485321045, + "learning_rate": 6.052950158139184e-06, + "loss": 1.0174, + "step": 10212 + }, + { + "epoch": 0.6403536271866574, + "grad_norm": 3.351447582244873, + "learning_rate": 6.051084357070916e-06, + "loss": 1.1339, + "step": 10213 + }, + { + "epoch": 0.6404163270424478, + "grad_norm": 3.220198392868042, + "learning_rate": 6.049218718856098e-06, + "loss": 1.0466, + "step": 10214 + }, + { + "epoch": 0.6404790268982381, + "grad_norm": 3.033998727798462, + "learning_rate": 6.047353243571677e-06, + "loss": 1.0914, + "step": 10215 + }, + { + "epoch": 0.6405417267540284, + "grad_norm": 3.4163215160369873, + "learning_rate": 6.0454879312945755e-06, + "loss": 1.2943, + "step": 10216 + }, + { + "epoch": 0.6406044266098188, + "grad_norm": 3.530787229537964, + "learning_rate": 6.043622782101727e-06, + "loss": 0.9762, + "step": 10217 + }, + { + "epoch": 0.6406671264656091, + "grad_norm": 3.073404550552368, + "learning_rate": 6.041757796070049e-06, + "loss": 1.204, + "step": 10218 + }, + { + "epoch": 0.6407298263213994, + "grad_norm": 3.050628185272217, + "learning_rate": 6.03989297327645e-06, + "loss": 1.1069, + "step": 10219 + }, + { + "epoch": 0.6407925261771898, + "grad_norm": 3.144646167755127, + "learning_rate": 6.03802831379784e-06, + "loss": 0.9432, + "step": 10220 + }, + { + "epoch": 0.6408552260329802, + "grad_norm": 3.2888455390930176, + "learning_rate": 6.036163817711114e-06, + "loss": 1.0197, + "step": 10221 + }, + { + "epoch": 0.6409179258887705, + "grad_norm": 3.5116207599639893, + "learning_rate": 6.0342994850931665e-06, + "loss": 0.9339, + "step": 10222 + }, + { + "epoch": 0.6409806257445608, + "grad_norm": 3.356218099594116, + "learning_rate": 6.032435316020879e-06, + "loss": 1.0759, + "step": 10223 + }, + { + "epoch": 0.6410433256003512, + "grad_norm": 3.2015230655670166, + "learning_rate": 6.030571310571135e-06, + "loss": 1.0961, + "step": 10224 + }, + { + "epoch": 0.6411060254561415, + "grad_norm": 3.50238037109375, + "learning_rate": 6.028707468820801e-06, + "loss": 1.0361, + "step": 10225 + }, + { + "epoch": 0.6411687253119318, + "grad_norm": 3.4995601177215576, + "learning_rate": 6.026843790846748e-06, + "loss": 1.0381, + "step": 10226 + }, + { + "epoch": 0.6412314251677221, + "grad_norm": 3.2148361206054688, + "learning_rate": 6.024980276725827e-06, + "loss": 1.1533, + "step": 10227 + }, + { + "epoch": 0.6412941250235125, + "grad_norm": 3.074213981628418, + "learning_rate": 6.0231169265348934e-06, + "loss": 1.1093, + "step": 10228 + }, + { + "epoch": 0.6413568248793028, + "grad_norm": 2.9695401191711426, + "learning_rate": 6.021253740350793e-06, + "loss": 1.0843, + "step": 10229 + }, + { + "epoch": 0.6414195247350931, + "grad_norm": 3.171022891998291, + "learning_rate": 6.019390718250361e-06, + "loss": 1.0839, + "step": 10230 + }, + { + "epoch": 0.6414822245908834, + "grad_norm": 3.6685731410980225, + "learning_rate": 6.01752786031043e-06, + "loss": 0.9314, + "step": 10231 + }, + { + "epoch": 0.6415449244466738, + "grad_norm": 3.3702781200408936, + "learning_rate": 6.015665166607824e-06, + "loss": 1.0244, + "step": 10232 + }, + { + "epoch": 0.6416076243024641, + "grad_norm": 3.5055196285247803, + "learning_rate": 6.013802637219361e-06, + "loss": 0.8909, + "step": 10233 + }, + { + "epoch": 0.6416703241582544, + "grad_norm": 3.4327948093414307, + "learning_rate": 6.01194027222185e-06, + "loss": 1.0908, + "step": 10234 + }, + { + "epoch": 0.6417330240140448, + "grad_norm": 3.468870162963867, + "learning_rate": 6.010078071692098e-06, + "loss": 1.1923, + "step": 10235 + }, + { + "epoch": 0.6417957238698351, + "grad_norm": 3.247189521789551, + "learning_rate": 6.0082160357069e-06, + "loss": 1.2004, + "step": 10236 + }, + { + "epoch": 0.6418584237256254, + "grad_norm": 3.928647518157959, + "learning_rate": 6.006354164343047e-06, + "loss": 1.1608, + "step": 10237 + }, + { + "epoch": 0.6419211235814157, + "grad_norm": 3.44557523727417, + "learning_rate": 6.004492457677322e-06, + "loss": 1.1623, + "step": 10238 + }, + { + "epoch": 0.6419838234372061, + "grad_norm": 3.0997445583343506, + "learning_rate": 6.002630915786502e-06, + "loss": 1.1543, + "step": 10239 + }, + { + "epoch": 0.6420465232929964, + "grad_norm": 2.9227211475372314, + "learning_rate": 6.00076953874736e-06, + "loss": 1.0937, + "step": 10240 + }, + { + "epoch": 0.6421092231487867, + "grad_norm": 3.193230390548706, + "learning_rate": 5.998908326636655e-06, + "loss": 1.0869, + "step": 10241 + }, + { + "epoch": 0.642171923004577, + "grad_norm": 3.75260591506958, + "learning_rate": 5.997047279531145e-06, + "loss": 1.1827, + "step": 10242 + }, + { + "epoch": 0.6422346228603674, + "grad_norm": 3.2134740352630615, + "learning_rate": 5.99518639750758e-06, + "loss": 1.1254, + "step": 10243 + }, + { + "epoch": 0.6422973227161578, + "grad_norm": 3.3940131664276123, + "learning_rate": 5.9933256806427056e-06, + "loss": 0.9782, + "step": 10244 + }, + { + "epoch": 0.6423600225719481, + "grad_norm": 2.919872522354126, + "learning_rate": 5.991465129013252e-06, + "loss": 1.0594, + "step": 10245 + }, + { + "epoch": 0.6424227224277385, + "grad_norm": 3.1901369094848633, + "learning_rate": 5.9896047426959535e-06, + "loss": 0.9956, + "step": 10246 + }, + { + "epoch": 0.6424854222835288, + "grad_norm": 3.3497650623321533, + "learning_rate": 5.987744521767529e-06, + "loss": 1.1081, + "step": 10247 + }, + { + "epoch": 0.6425481221393191, + "grad_norm": 3.443962335586548, + "learning_rate": 5.985884466304695e-06, + "loss": 1.1499, + "step": 10248 + }, + { + "epoch": 0.6426108219951094, + "grad_norm": 3.1183886528015137, + "learning_rate": 5.984024576384163e-06, + "loss": 1.0769, + "step": 10249 + }, + { + "epoch": 0.6426735218508998, + "grad_norm": 3.0530219078063965, + "learning_rate": 5.9821648520826315e-06, + "loss": 1.2066, + "step": 10250 + }, + { + "epoch": 0.6427362217066901, + "grad_norm": 3.2098145484924316, + "learning_rate": 5.980305293476798e-06, + "loss": 1.0994, + "step": 10251 + }, + { + "epoch": 0.6427989215624804, + "grad_norm": 3.148994207382202, + "learning_rate": 5.978445900643348e-06, + "loss": 1.038, + "step": 10252 + }, + { + "epoch": 0.6428616214182707, + "grad_norm": 3.1526846885681152, + "learning_rate": 5.976586673658969e-06, + "loss": 1.1064, + "step": 10253 + }, + { + "epoch": 0.6429243212740611, + "grad_norm": 3.5128285884857178, + "learning_rate": 5.9747276126003265e-06, + "loss": 1.0976, + "step": 10254 + }, + { + "epoch": 0.6429870211298514, + "grad_norm": 3.566089153289795, + "learning_rate": 5.972868717544096e-06, + "loss": 1.0999, + "step": 10255 + }, + { + "epoch": 0.6430497209856417, + "grad_norm": 3.2321083545684814, + "learning_rate": 5.971009988566932e-06, + "loss": 1.0696, + "step": 10256 + }, + { + "epoch": 0.643112420841432, + "grad_norm": 3.3484160900115967, + "learning_rate": 5.969151425745496e-06, + "loss": 1.0811, + "step": 10257 + }, + { + "epoch": 0.6431751206972224, + "grad_norm": 3.6063179969787598, + "learning_rate": 5.967293029156429e-06, + "loss": 0.9855, + "step": 10258 + }, + { + "epoch": 0.6432378205530127, + "grad_norm": 3.896852970123291, + "learning_rate": 5.965434798876372e-06, + "loss": 1.0093, + "step": 10259 + }, + { + "epoch": 0.643300520408803, + "grad_norm": 3.2120280265808105, + "learning_rate": 5.963576734981964e-06, + "loss": 1.1728, + "step": 10260 + }, + { + "epoch": 0.6433632202645934, + "grad_norm": 3.378493070602417, + "learning_rate": 5.961718837549825e-06, + "loss": 1.0185, + "step": 10261 + }, + { + "epoch": 0.6434259201203837, + "grad_norm": 3.2574734687805176, + "learning_rate": 5.959861106656579e-06, + "loss": 0.9645, + "step": 10262 + }, + { + "epoch": 0.643488619976174, + "grad_norm": 3.112872362136841, + "learning_rate": 5.958003542378836e-06, + "loss": 1.2316, + "step": 10263 + }, + { + "epoch": 0.6435513198319643, + "grad_norm": 3.403876304626465, + "learning_rate": 5.956146144793206e-06, + "loss": 1.1227, + "step": 10264 + }, + { + "epoch": 0.6436140196877547, + "grad_norm": 3.4209530353546143, + "learning_rate": 5.954288913976282e-06, + "loss": 0.9875, + "step": 10265 + }, + { + "epoch": 0.643676719543545, + "grad_norm": 3.0777485370635986, + "learning_rate": 5.952431850004663e-06, + "loss": 1.0853, + "step": 10266 + }, + { + "epoch": 0.6437394193993354, + "grad_norm": 3.213702440261841, + "learning_rate": 5.950574952954928e-06, + "loss": 1.0252, + "step": 10267 + }, + { + "epoch": 0.6438021192551258, + "grad_norm": 3.673875570297241, + "learning_rate": 5.948718222903662e-06, + "loss": 1.1699, + "step": 10268 + }, + { + "epoch": 0.6438648191109161, + "grad_norm": 3.204735517501831, + "learning_rate": 5.946861659927432e-06, + "loss": 1.2132, + "step": 10269 + }, + { + "epoch": 0.6439275189667064, + "grad_norm": 3.4728317260742188, + "learning_rate": 5.945005264102803e-06, + "loss": 1.0431, + "step": 10270 + }, + { + "epoch": 0.6439902188224967, + "grad_norm": 3.325852155685425, + "learning_rate": 5.943149035506337e-06, + "loss": 1.0775, + "step": 10271 + }, + { + "epoch": 0.6440529186782871, + "grad_norm": 3.584749937057495, + "learning_rate": 5.941292974214578e-06, + "loss": 1.0644, + "step": 10272 + }, + { + "epoch": 0.6441156185340774, + "grad_norm": 3.4089529514312744, + "learning_rate": 5.939437080304078e-06, + "loss": 1.0609, + "step": 10273 + }, + { + "epoch": 0.6441783183898677, + "grad_norm": 3.562856674194336, + "learning_rate": 5.937581353851367e-06, + "loss": 1.0555, + "step": 10274 + }, + { + "epoch": 0.644241018245658, + "grad_norm": 3.4891505241394043, + "learning_rate": 5.9357257949329805e-06, + "loss": 0.9924, + "step": 10275 + }, + { + "epoch": 0.6443037181014484, + "grad_norm": 3.1721620559692383, + "learning_rate": 5.933870403625438e-06, + "loss": 1.0327, + "step": 10276 + }, + { + "epoch": 0.6443664179572387, + "grad_norm": 3.9592700004577637, + "learning_rate": 5.93201518000526e-06, + "loss": 1.0882, + "step": 10277 + }, + { + "epoch": 0.644429117813029, + "grad_norm": 3.248495101928711, + "learning_rate": 5.930160124148952e-06, + "loss": 0.9647, + "step": 10278 + }, + { + "epoch": 0.6444918176688194, + "grad_norm": 2.9771909713745117, + "learning_rate": 5.928305236133016e-06, + "loss": 1.1337, + "step": 10279 + }, + { + "epoch": 0.6445545175246097, + "grad_norm": 3.2239232063293457, + "learning_rate": 5.926450516033954e-06, + "loss": 1.1424, + "step": 10280 + }, + { + "epoch": 0.6446172173804, + "grad_norm": 2.928879737854004, + "learning_rate": 5.924595963928248e-06, + "loss": 0.9285, + "step": 10281 + }, + { + "epoch": 0.6446799172361903, + "grad_norm": 3.148930788040161, + "learning_rate": 5.922741579892384e-06, + "loss": 1.1607, + "step": 10282 + }, + { + "epoch": 0.6447426170919807, + "grad_norm": 3.119950532913208, + "learning_rate": 5.920887364002833e-06, + "loss": 1.1385, + "step": 10283 + }, + { + "epoch": 0.644805316947771, + "grad_norm": 3.151738405227661, + "learning_rate": 5.919033316336068e-06, + "loss": 1.0596, + "step": 10284 + }, + { + "epoch": 0.6448680168035613, + "grad_norm": 3.3389503955841064, + "learning_rate": 5.917179436968545e-06, + "loss": 1.0744, + "step": 10285 + }, + { + "epoch": 0.6449307166593516, + "grad_norm": 3.4028823375701904, + "learning_rate": 5.915325725976723e-06, + "loss": 1.0548, + "step": 10286 + }, + { + "epoch": 0.644993416515142, + "grad_norm": 3.19010591506958, + "learning_rate": 5.913472183437043e-06, + "loss": 1.0756, + "step": 10287 + }, + { + "epoch": 0.6450561163709323, + "grad_norm": 3.590054988861084, + "learning_rate": 5.911618809425952e-06, + "loss": 1.1639, + "step": 10288 + }, + { + "epoch": 0.6451188162267226, + "grad_norm": 3.1639676094055176, + "learning_rate": 5.909765604019877e-06, + "loss": 0.99, + "step": 10289 + }, + { + "epoch": 0.6451815160825131, + "grad_norm": 3.353036880493164, + "learning_rate": 5.907912567295248e-06, + "loss": 1.1863, + "step": 10290 + }, + { + "epoch": 0.6452442159383034, + "grad_norm": 3.0310451984405518, + "learning_rate": 5.906059699328483e-06, + "loss": 1.0887, + "step": 10291 + }, + { + "epoch": 0.6453069157940937, + "grad_norm": 3.1161606311798096, + "learning_rate": 5.904207000195996e-06, + "loss": 1.0771, + "step": 10292 + }, + { + "epoch": 0.645369615649884, + "grad_norm": 3.4660885334014893, + "learning_rate": 5.9023544699741916e-06, + "loss": 1.081, + "step": 10293 + }, + { + "epoch": 0.6454323155056744, + "grad_norm": 3.203097105026245, + "learning_rate": 5.900502108739466e-06, + "loss": 0.994, + "step": 10294 + }, + { + "epoch": 0.6454950153614647, + "grad_norm": 2.9330034255981445, + "learning_rate": 5.898649916568214e-06, + "loss": 1.2164, + "step": 10295 + }, + { + "epoch": 0.645557715217255, + "grad_norm": 3.410308599472046, + "learning_rate": 5.896797893536817e-06, + "loss": 1.0748, + "step": 10296 + }, + { + "epoch": 0.6456204150730454, + "grad_norm": 3.35781192779541, + "learning_rate": 5.894946039721656e-06, + "loss": 0.913, + "step": 10297 + }, + { + "epoch": 0.6456831149288357, + "grad_norm": 3.351032018661499, + "learning_rate": 5.893094355199098e-06, + "loss": 1.0036, + "step": 10298 + }, + { + "epoch": 0.645745814784626, + "grad_norm": 3.4379916191101074, + "learning_rate": 5.891242840045508e-06, + "loss": 1.0819, + "step": 10299 + }, + { + "epoch": 0.6458085146404163, + "grad_norm": 3.4087655544281006, + "learning_rate": 5.889391494337244e-06, + "loss": 1.0511, + "step": 10300 + }, + { + "epoch": 0.6458712144962067, + "grad_norm": 3.316953420639038, + "learning_rate": 5.887540318150653e-06, + "loss": 1.0754, + "step": 10301 + }, + { + "epoch": 0.645933914351997, + "grad_norm": 3.2947099208831787, + "learning_rate": 5.885689311562082e-06, + "loss": 1.1168, + "step": 10302 + }, + { + "epoch": 0.6459966142077873, + "grad_norm": 3.0948598384857178, + "learning_rate": 5.883838474647858e-06, + "loss": 1.1383, + "step": 10303 + }, + { + "epoch": 0.6460593140635776, + "grad_norm": 3.1757307052612305, + "learning_rate": 5.881987807484319e-06, + "loss": 1.024, + "step": 10304 + }, + { + "epoch": 0.646122013919368, + "grad_norm": 3.064906358718872, + "learning_rate": 5.880137310147782e-06, + "loss": 1.0943, + "step": 10305 + }, + { + "epoch": 0.6461847137751583, + "grad_norm": 2.775890350341797, + "learning_rate": 5.878286982714563e-06, + "loss": 1.0714, + "step": 10306 + }, + { + "epoch": 0.6462474136309486, + "grad_norm": 3.096721649169922, + "learning_rate": 5.876436825260967e-06, + "loss": 1.0827, + "step": 10307 + }, + { + "epoch": 0.646310113486739, + "grad_norm": 3.257810354232788, + "learning_rate": 5.8745868378632986e-06, + "loss": 0.9074, + "step": 10308 + }, + { + "epoch": 0.6463728133425293, + "grad_norm": 3.1671440601348877, + "learning_rate": 5.872737020597847e-06, + "loss": 1.066, + "step": 10309 + }, + { + "epoch": 0.6464355131983196, + "grad_norm": 3.290849447250366, + "learning_rate": 5.8708873735409e-06, + "loss": 1.11, + "step": 10310 + }, + { + "epoch": 0.6464982130541099, + "grad_norm": 3.457350254058838, + "learning_rate": 5.869037896768741e-06, + "loss": 1.0657, + "step": 10311 + }, + { + "epoch": 0.6465609129099003, + "grad_norm": 2.978851556777954, + "learning_rate": 5.8671885903576375e-06, + "loss": 0.9397, + "step": 10312 + }, + { + "epoch": 0.6466236127656907, + "grad_norm": 2.9817779064178467, + "learning_rate": 5.865339454383858e-06, + "loss": 1.2171, + "step": 10313 + }, + { + "epoch": 0.646686312621481, + "grad_norm": 3.3317761421203613, + "learning_rate": 5.86349048892366e-06, + "loss": 1.1574, + "step": 10314 + }, + { + "epoch": 0.6467490124772713, + "grad_norm": 3.4405934810638428, + "learning_rate": 5.861641694053295e-06, + "loss": 1.1481, + "step": 10315 + }, + { + "epoch": 0.6468117123330617, + "grad_norm": 2.957209587097168, + "learning_rate": 5.859793069849007e-06, + "loss": 1.1529, + "step": 10316 + }, + { + "epoch": 0.646874412188852, + "grad_norm": 3.11751389503479, + "learning_rate": 5.857944616387034e-06, + "loss": 1.1058, + "step": 10317 + }, + { + "epoch": 0.6469371120446423, + "grad_norm": 3.0961380004882812, + "learning_rate": 5.856096333743604e-06, + "loss": 1.1547, + "step": 10318 + }, + { + "epoch": 0.6469998119004327, + "grad_norm": 3.428875684738159, + "learning_rate": 5.8542482219949445e-06, + "loss": 1.0927, + "step": 10319 + }, + { + "epoch": 0.647062511756223, + "grad_norm": 3.1600563526153564, + "learning_rate": 5.852400281217268e-06, + "loss": 1.1587, + "step": 10320 + }, + { + "epoch": 0.6471252116120133, + "grad_norm": 2.758554697036743, + "learning_rate": 5.8505525114867845e-06, + "loss": 1.1622, + "step": 10321 + }, + { + "epoch": 0.6471879114678036, + "grad_norm": 3.051469564437866, + "learning_rate": 5.848704912879699e-06, + "loss": 1.0819, + "step": 10322 + }, + { + "epoch": 0.647250611323594, + "grad_norm": 3.3561501502990723, + "learning_rate": 5.846857485472202e-06, + "loss": 1.2193, + "step": 10323 + }, + { + "epoch": 0.6473133111793843, + "grad_norm": 3.196938991546631, + "learning_rate": 5.845010229340484e-06, + "loss": 1.1729, + "step": 10324 + }, + { + "epoch": 0.6473760110351746, + "grad_norm": 2.915321111679077, + "learning_rate": 5.843163144560726e-06, + "loss": 1.0241, + "step": 10325 + }, + { + "epoch": 0.6474387108909649, + "grad_norm": 3.095533847808838, + "learning_rate": 5.841316231209102e-06, + "loss": 1.0502, + "step": 10326 + }, + { + "epoch": 0.6475014107467553, + "grad_norm": 2.9823527336120605, + "learning_rate": 5.8394694893617774e-06, + "loss": 1.0776, + "step": 10327 + }, + { + "epoch": 0.6475641106025456, + "grad_norm": 3.209254026412964, + "learning_rate": 5.837622919094914e-06, + "loss": 1.1751, + "step": 10328 + }, + { + "epoch": 0.6476268104583359, + "grad_norm": 3.232928514480591, + "learning_rate": 5.835776520484664e-06, + "loss": 1.2594, + "step": 10329 + }, + { + "epoch": 0.6476895103141262, + "grad_norm": 3.7044050693511963, + "learning_rate": 5.833930293607167e-06, + "loss": 1.0532, + "step": 10330 + }, + { + "epoch": 0.6477522101699166, + "grad_norm": 2.9943366050720215, + "learning_rate": 5.832084238538571e-06, + "loss": 1.1245, + "step": 10331 + }, + { + "epoch": 0.6478149100257069, + "grad_norm": 2.976597785949707, + "learning_rate": 5.8302383553550025e-06, + "loss": 1.1988, + "step": 10332 + }, + { + "epoch": 0.6478776098814972, + "grad_norm": 3.2583258152008057, + "learning_rate": 5.8283926441325835e-06, + "loss": 1.1311, + "step": 10333 + }, + { + "epoch": 0.6479403097372876, + "grad_norm": 3.1585922241210938, + "learning_rate": 5.8265471049474375e-06, + "loss": 1.031, + "step": 10334 + }, + { + "epoch": 0.6480030095930779, + "grad_norm": 3.1482656002044678, + "learning_rate": 5.824701737875671e-06, + "loss": 1.0626, + "step": 10335 + }, + { + "epoch": 0.6480657094488683, + "grad_norm": 3.135918617248535, + "learning_rate": 5.8228565429933866e-06, + "loss": 1.0398, + "step": 10336 + }, + { + "epoch": 0.6481284093046586, + "grad_norm": 3.5387837886810303, + "learning_rate": 5.821011520376678e-06, + "loss": 1.1212, + "step": 10337 + }, + { + "epoch": 0.648191109160449, + "grad_norm": 3.0226829051971436, + "learning_rate": 5.81916667010164e-06, + "loss": 0.969, + "step": 10338 + }, + { + "epoch": 0.6482538090162393, + "grad_norm": 3.418828248977661, + "learning_rate": 5.8173219922443516e-06, + "loss": 1.1382, + "step": 10339 + }, + { + "epoch": 0.6483165088720296, + "grad_norm": 3.0010628700256348, + "learning_rate": 5.815477486880882e-06, + "loss": 1.2011, + "step": 10340 + }, + { + "epoch": 0.64837920872782, + "grad_norm": 3.154789924621582, + "learning_rate": 5.813633154087308e-06, + "loss": 1.111, + "step": 10341 + }, + { + "epoch": 0.6484419085836103, + "grad_norm": 3.343615770339966, + "learning_rate": 5.811788993939687e-06, + "loss": 0.9797, + "step": 10342 + }, + { + "epoch": 0.6485046084394006, + "grad_norm": 3.458745241165161, + "learning_rate": 5.809945006514069e-06, + "loss": 1.1438, + "step": 10343 + }, + { + "epoch": 0.6485673082951909, + "grad_norm": 2.9424502849578857, + "learning_rate": 5.8081011918865e-06, + "loss": 1.1272, + "step": 10344 + }, + { + "epoch": 0.6486300081509813, + "grad_norm": 3.3386952877044678, + "learning_rate": 5.8062575501330225e-06, + "loss": 1.1456, + "step": 10345 + }, + { + "epoch": 0.6486927080067716, + "grad_norm": 3.26237416267395, + "learning_rate": 5.804414081329668e-06, + "loss": 0.9381, + "step": 10346 + }, + { + "epoch": 0.6487554078625619, + "grad_norm": 3.058377265930176, + "learning_rate": 5.802570785552459e-06, + "loss": 1.0219, + "step": 10347 + }, + { + "epoch": 0.6488181077183522, + "grad_norm": 3.2788941860198975, + "learning_rate": 5.80072766287741e-06, + "loss": 1.0946, + "step": 10348 + }, + { + "epoch": 0.6488808075741426, + "grad_norm": 2.9861655235290527, + "learning_rate": 5.798884713380542e-06, + "loss": 1.2175, + "step": 10349 + }, + { + "epoch": 0.6489435074299329, + "grad_norm": 3.359684944152832, + "learning_rate": 5.797041937137848e-06, + "loss": 0.894, + "step": 10350 + }, + { + "epoch": 0.6490062072857232, + "grad_norm": 3.0373382568359375, + "learning_rate": 5.795199334225328e-06, + "loss": 0.9256, + "step": 10351 + }, + { + "epoch": 0.6490689071415136, + "grad_norm": 3.6913301944732666, + "learning_rate": 5.7933569047189716e-06, + "loss": 1.074, + "step": 10352 + }, + { + "epoch": 0.6491316069973039, + "grad_norm": 3.159964084625244, + "learning_rate": 5.791514648694763e-06, + "loss": 0.9971, + "step": 10353 + }, + { + "epoch": 0.6491943068530942, + "grad_norm": 3.235368251800537, + "learning_rate": 5.789672566228672e-06, + "loss": 1.1988, + "step": 10354 + }, + { + "epoch": 0.6492570067088845, + "grad_norm": 2.960326671600342, + "learning_rate": 5.787830657396667e-06, + "loss": 1.0779, + "step": 10355 + }, + { + "epoch": 0.6493197065646749, + "grad_norm": 3.2344775199890137, + "learning_rate": 5.785988922274711e-06, + "loss": 1.1255, + "step": 10356 + }, + { + "epoch": 0.6493824064204652, + "grad_norm": 3.13084077835083, + "learning_rate": 5.7841473609387565e-06, + "loss": 1.157, + "step": 10357 + }, + { + "epoch": 0.6494451062762555, + "grad_norm": 3.3117620944976807, + "learning_rate": 5.78230597346475e-06, + "loss": 0.9635, + "step": 10358 + }, + { + "epoch": 0.649507806132046, + "grad_norm": 3.4138894081115723, + "learning_rate": 5.780464759928623e-06, + "loss": 1.2465, + "step": 10359 + }, + { + "epoch": 0.6495705059878363, + "grad_norm": 3.2122714519500732, + "learning_rate": 5.77862372040632e-06, + "loss": 1.1307, + "step": 10360 + }, + { + "epoch": 0.6496332058436266, + "grad_norm": 3.196967363357544, + "learning_rate": 5.776782854973758e-06, + "loss": 1.0813, + "step": 10361 + }, + { + "epoch": 0.6496959056994169, + "grad_norm": 3.4685685634613037, + "learning_rate": 5.774942163706852e-06, + "loss": 1.1061, + "step": 10362 + }, + { + "epoch": 0.6497586055552073, + "grad_norm": 3.1231255531311035, + "learning_rate": 5.773101646681519e-06, + "loss": 0.9853, + "step": 10363 + }, + { + "epoch": 0.6498213054109976, + "grad_norm": 3.1620631217956543, + "learning_rate": 5.7712613039736585e-06, + "loss": 1.2254, + "step": 10364 + }, + { + "epoch": 0.6498840052667879, + "grad_norm": 2.9972646236419678, + "learning_rate": 5.769421135659168e-06, + "loss": 1.144, + "step": 10365 + }, + { + "epoch": 0.6499467051225782, + "grad_norm": 3.2005715370178223, + "learning_rate": 5.76758114181393e-06, + "loss": 1.0996, + "step": 10366 + }, + { + "epoch": 0.6500094049783686, + "grad_norm": 3.4801151752471924, + "learning_rate": 5.765741322513833e-06, + "loss": 1.2158, + "step": 10367 + }, + { + "epoch": 0.6500721048341589, + "grad_norm": 3.140625, + "learning_rate": 5.763901677834751e-06, + "loss": 1.1316, + "step": 10368 + }, + { + "epoch": 0.6501348046899492, + "grad_norm": 3.5874576568603516, + "learning_rate": 5.762062207852548e-06, + "loss": 1.0805, + "step": 10369 + }, + { + "epoch": 0.6501975045457395, + "grad_norm": 3.369380235671997, + "learning_rate": 5.76022291264308e-06, + "loss": 1.0073, + "step": 10370 + }, + { + "epoch": 0.6502602044015299, + "grad_norm": 3.323267698287964, + "learning_rate": 5.758383792282208e-06, + "loss": 1.0954, + "step": 10371 + }, + { + "epoch": 0.6503229042573202, + "grad_norm": 3.249764919281006, + "learning_rate": 5.756544846845775e-06, + "loss": 1.0558, + "step": 10372 + }, + { + "epoch": 0.6503856041131105, + "grad_norm": 3.439969301223755, + "learning_rate": 5.754706076409613e-06, + "loss": 1.0389, + "step": 10373 + }, + { + "epoch": 0.6504483039689009, + "grad_norm": 3.802201271057129, + "learning_rate": 5.752867481049562e-06, + "loss": 1.2013, + "step": 10374 + }, + { + "epoch": 0.6505110038246912, + "grad_norm": 3.2939231395721436, + "learning_rate": 5.751029060841441e-06, + "loss": 1.2302, + "step": 10375 + }, + { + "epoch": 0.6505737036804815, + "grad_norm": 3.018012762069702, + "learning_rate": 5.749190815861068e-06, + "loss": 1.1591, + "step": 10376 + }, + { + "epoch": 0.6506364035362718, + "grad_norm": 3.222081184387207, + "learning_rate": 5.747352746184246e-06, + "loss": 0.9369, + "step": 10377 + }, + { + "epoch": 0.6506991033920622, + "grad_norm": 3.346916675567627, + "learning_rate": 5.745514851886788e-06, + "loss": 1.166, + "step": 10378 + }, + { + "epoch": 0.6507618032478525, + "grad_norm": 3.1923749446868896, + "learning_rate": 5.743677133044483e-06, + "loss": 1.1003, + "step": 10379 + }, + { + "epoch": 0.6508245031036428, + "grad_norm": 2.914720296859741, + "learning_rate": 5.741839589733118e-06, + "loss": 1.0639, + "step": 10380 + }, + { + "epoch": 0.6508872029594331, + "grad_norm": 3.0869977474212646, + "learning_rate": 5.740002222028471e-06, + "loss": 1.1728, + "step": 10381 + }, + { + "epoch": 0.6509499028152236, + "grad_norm": 3.494297504425049, + "learning_rate": 5.738165030006322e-06, + "loss": 1.1422, + "step": 10382 + }, + { + "epoch": 0.6510126026710139, + "grad_norm": 3.3115200996398926, + "learning_rate": 5.736328013742434e-06, + "loss": 1.0953, + "step": 10383 + }, + { + "epoch": 0.6510753025268042, + "grad_norm": 2.783238649368286, + "learning_rate": 5.734491173312559e-06, + "loss": 1.1898, + "step": 10384 + }, + { + "epoch": 0.6511380023825946, + "grad_norm": 2.997711658477783, + "learning_rate": 5.732654508792461e-06, + "loss": 1.1146, + "step": 10385 + }, + { + "epoch": 0.6512007022383849, + "grad_norm": 2.945932388305664, + "learning_rate": 5.730818020257876e-06, + "loss": 1.0919, + "step": 10386 + }, + { + "epoch": 0.6512634020941752, + "grad_norm": 3.170175552368164, + "learning_rate": 5.728981707784543e-06, + "loss": 1.0807, + "step": 10387 + }, + { + "epoch": 0.6513261019499655, + "grad_norm": 3.1817502975463867, + "learning_rate": 5.727145571448185e-06, + "loss": 0.991, + "step": 10388 + }, + { + "epoch": 0.6513888018057559, + "grad_norm": 3.4374067783355713, + "learning_rate": 5.725309611324537e-06, + "loss": 1.0752, + "step": 10389 + }, + { + "epoch": 0.6514515016615462, + "grad_norm": 3.183912515640259, + "learning_rate": 5.723473827489301e-06, + "loss": 1.1228, + "step": 10390 + }, + { + "epoch": 0.6515142015173365, + "grad_norm": 3.036808490753174, + "learning_rate": 5.721638220018195e-06, + "loss": 1.0775, + "step": 10391 + }, + { + "epoch": 0.6515769013731268, + "grad_norm": 3.411525249481201, + "learning_rate": 5.719802788986913e-06, + "loss": 1.0767, + "step": 10392 + }, + { + "epoch": 0.6516396012289172, + "grad_norm": 3.1899049282073975, + "learning_rate": 5.717967534471155e-06, + "loss": 1.1831, + "step": 10393 + }, + { + "epoch": 0.6517023010847075, + "grad_norm": 3.642240047454834, + "learning_rate": 5.716132456546602e-06, + "loss": 1.0032, + "step": 10394 + }, + { + "epoch": 0.6517650009404978, + "grad_norm": 2.830488920211792, + "learning_rate": 5.714297555288935e-06, + "loss": 1.2279, + "step": 10395 + }, + { + "epoch": 0.6518277007962882, + "grad_norm": 2.942002058029175, + "learning_rate": 5.712462830773817e-06, + "loss": 1.022, + "step": 10396 + }, + { + "epoch": 0.6518904006520785, + "grad_norm": 3.268582820892334, + "learning_rate": 5.710628283076927e-06, + "loss": 1.0934, + "step": 10397 + }, + { + "epoch": 0.6519531005078688, + "grad_norm": 3.025052070617676, + "learning_rate": 5.708793912273911e-06, + "loss": 1.2275, + "step": 10398 + }, + { + "epoch": 0.6520158003636591, + "grad_norm": 3.188107490539551, + "learning_rate": 5.706959718440423e-06, + "loss": 1.1268, + "step": 10399 + }, + { + "epoch": 0.6520785002194495, + "grad_norm": 3.1877310276031494, + "learning_rate": 5.705125701652099e-06, + "loss": 1.1769, + "step": 10400 + }, + { + "epoch": 0.6521412000752398, + "grad_norm": 3.296293258666992, + "learning_rate": 5.703291861984581e-06, + "loss": 1.1234, + "step": 10401 + }, + { + "epoch": 0.6522038999310301, + "grad_norm": 2.848318338394165, + "learning_rate": 5.701458199513497e-06, + "loss": 1.0084, + "step": 10402 + }, + { + "epoch": 0.6522665997868204, + "grad_norm": 3.214459180831909, + "learning_rate": 5.69962471431446e-06, + "loss": 1.0551, + "step": 10403 + }, + { + "epoch": 0.6523292996426108, + "grad_norm": 3.0179836750030518, + "learning_rate": 5.697791406463091e-06, + "loss": 1.07, + "step": 10404 + }, + { + "epoch": 0.6523919994984011, + "grad_norm": 3.197788715362549, + "learning_rate": 5.695958276034992e-06, + "loss": 1.1425, + "step": 10405 + }, + { + "epoch": 0.6524546993541915, + "grad_norm": 3.087392807006836, + "learning_rate": 5.694125323105761e-06, + "loss": 1.245, + "step": 10406 + }, + { + "epoch": 0.6525173992099819, + "grad_norm": 3.059055805206299, + "learning_rate": 5.692292547750989e-06, + "loss": 1.0522, + "step": 10407 + }, + { + "epoch": 0.6525800990657722, + "grad_norm": 2.9736478328704834, + "learning_rate": 5.690459950046261e-06, + "loss": 1.2639, + "step": 10408 + }, + { + "epoch": 0.6526427989215625, + "grad_norm": 3.511594533920288, + "learning_rate": 5.688627530067154e-06, + "loss": 1.0262, + "step": 10409 + }, + { + "epoch": 0.6527054987773528, + "grad_norm": 3.172421932220459, + "learning_rate": 5.686795287889235e-06, + "loss": 1.049, + "step": 10410 + }, + { + "epoch": 0.6527681986331432, + "grad_norm": 3.3932180404663086, + "learning_rate": 5.684963223588064e-06, + "loss": 1.0117, + "step": 10411 + }, + { + "epoch": 0.6528308984889335, + "grad_norm": 3.3131816387176514, + "learning_rate": 5.6831313372392024e-06, + "loss": 1.0762, + "step": 10412 + }, + { + "epoch": 0.6528935983447238, + "grad_norm": 3.2784810066223145, + "learning_rate": 5.681299628918192e-06, + "loss": 1.1015, + "step": 10413 + }, + { + "epoch": 0.6529562982005142, + "grad_norm": 3.1135871410369873, + "learning_rate": 5.6794680987005705e-06, + "loss": 1.0205, + "step": 10414 + }, + { + "epoch": 0.6530189980563045, + "grad_norm": 3.019117832183838, + "learning_rate": 5.677636746661876e-06, + "loss": 1.0479, + "step": 10415 + }, + { + "epoch": 0.6530816979120948, + "grad_norm": 3.049804210662842, + "learning_rate": 5.6758055728776306e-06, + "loss": 0.947, + "step": 10416 + }, + { + "epoch": 0.6531443977678851, + "grad_norm": 3.496898889541626, + "learning_rate": 5.673974577423354e-06, + "loss": 1.1034, + "step": 10417 + }, + { + "epoch": 0.6532070976236755, + "grad_norm": 3.009047508239746, + "learning_rate": 5.67214376037455e-06, + "loss": 1.1168, + "step": 10418 + }, + { + "epoch": 0.6532697974794658, + "grad_norm": 3.2709758281707764, + "learning_rate": 5.67031312180673e-06, + "loss": 1.2818, + "step": 10419 + }, + { + "epoch": 0.6533324973352561, + "grad_norm": 3.235748052597046, + "learning_rate": 5.668482661795386e-06, + "loss": 1.1433, + "step": 10420 + }, + { + "epoch": 0.6533951971910464, + "grad_norm": 3.561054229736328, + "learning_rate": 5.666652380416002e-06, + "loss": 1.1181, + "step": 10421 + }, + { + "epoch": 0.6534578970468368, + "grad_norm": 3.1603989601135254, + "learning_rate": 5.664822277744067e-06, + "loss": 1.1944, + "step": 10422 + }, + { + "epoch": 0.6535205969026271, + "grad_norm": 3.605726480484009, + "learning_rate": 5.662992353855051e-06, + "loss": 1.1117, + "step": 10423 + }, + { + "epoch": 0.6535832967584174, + "grad_norm": 3.0616350173950195, + "learning_rate": 5.66116260882442e-06, + "loss": 1.0109, + "step": 10424 + }, + { + "epoch": 0.6536459966142077, + "grad_norm": 3.0004518032073975, + "learning_rate": 5.659333042727628e-06, + "loss": 1.2706, + "step": 10425 + }, + { + "epoch": 0.6537086964699981, + "grad_norm": 2.973285436630249, + "learning_rate": 5.657503655640135e-06, + "loss": 1.1435, + "step": 10426 + }, + { + "epoch": 0.6537713963257884, + "grad_norm": 3.296265125274658, + "learning_rate": 5.6556744476373805e-06, + "loss": 1.1152, + "step": 10427 + }, + { + "epoch": 0.6538340961815787, + "grad_norm": 3.149290084838867, + "learning_rate": 5.6538454187948025e-06, + "loss": 1.0876, + "step": 10428 + }, + { + "epoch": 0.6538967960373692, + "grad_norm": 3.5678863525390625, + "learning_rate": 5.652016569187823e-06, + "loss": 0.9958, + "step": 10429 + }, + { + "epoch": 0.6539594958931595, + "grad_norm": 3.2107343673706055, + "learning_rate": 5.650187898891877e-06, + "loss": 1.052, + "step": 10430 + }, + { + "epoch": 0.6540221957489498, + "grad_norm": 3.058105707168579, + "learning_rate": 5.648359407982371e-06, + "loss": 1.0381, + "step": 10431 + }, + { + "epoch": 0.6540848956047401, + "grad_norm": 3.3926138877868652, + "learning_rate": 5.646531096534708e-06, + "loss": 0.9793, + "step": 10432 + }, + { + "epoch": 0.6541475954605305, + "grad_norm": 3.2621257305145264, + "learning_rate": 5.644702964624298e-06, + "loss": 1.141, + "step": 10433 + }, + { + "epoch": 0.6542102953163208, + "grad_norm": 3.4208710193634033, + "learning_rate": 5.642875012326527e-06, + "loss": 1.1819, + "step": 10434 + }, + { + "epoch": 0.6542729951721111, + "grad_norm": 2.8761773109436035, + "learning_rate": 5.641047239716781e-06, + "loss": 0.9914, + "step": 10435 + }, + { + "epoch": 0.6543356950279015, + "grad_norm": 3.2342400550842285, + "learning_rate": 5.639219646870433e-06, + "loss": 1.1389, + "step": 10436 + }, + { + "epoch": 0.6543983948836918, + "grad_norm": 2.889740467071533, + "learning_rate": 5.637392233862862e-06, + "loss": 0.9982, + "step": 10437 + }, + { + "epoch": 0.6544610947394821, + "grad_norm": 3.179049253463745, + "learning_rate": 5.635565000769425e-06, + "loss": 1.2874, + "step": 10438 + }, + { + "epoch": 0.6545237945952724, + "grad_norm": 3.5506534576416016, + "learning_rate": 5.633737947665479e-06, + "loss": 0.9475, + "step": 10439 + }, + { + "epoch": 0.6545864944510628, + "grad_norm": 3.117652177810669, + "learning_rate": 5.631911074626366e-06, + "loss": 0.9969, + "step": 10440 + }, + { + "epoch": 0.6546491943068531, + "grad_norm": 3.2343554496765137, + "learning_rate": 5.630084381727434e-06, + "loss": 0.9257, + "step": 10441 + }, + { + "epoch": 0.6547118941626434, + "grad_norm": 3.2974321842193604, + "learning_rate": 5.628257869044014e-06, + "loss": 0.9788, + "step": 10442 + }, + { + "epoch": 0.6547745940184337, + "grad_norm": 3.4192967414855957, + "learning_rate": 5.626431536651427e-06, + "loss": 1.0182, + "step": 10443 + }, + { + "epoch": 0.6548372938742241, + "grad_norm": 3.1953225135803223, + "learning_rate": 5.624605384624997e-06, + "loss": 1.1037, + "step": 10444 + }, + { + "epoch": 0.6548999937300144, + "grad_norm": 3.3051700592041016, + "learning_rate": 5.622779413040034e-06, + "loss": 1.0535, + "step": 10445 + }, + { + "epoch": 0.6549626935858047, + "grad_norm": 3.290735960006714, + "learning_rate": 5.620953621971837e-06, + "loss": 1.0695, + "step": 10446 + }, + { + "epoch": 0.655025393441595, + "grad_norm": 3.1908888816833496, + "learning_rate": 5.6191280114957e-06, + "loss": 1.183, + "step": 10447 + }, + { + "epoch": 0.6550880932973854, + "grad_norm": 2.9134609699249268, + "learning_rate": 5.617302581686921e-06, + "loss": 1.1477, + "step": 10448 + }, + { + "epoch": 0.6551507931531757, + "grad_norm": 3.351120948791504, + "learning_rate": 5.615477332620774e-06, + "loss": 1.1154, + "step": 10449 + }, + { + "epoch": 0.655213493008966, + "grad_norm": 3.092517137527466, + "learning_rate": 5.613652264372533e-06, + "loss": 1.1718, + "step": 10450 + }, + { + "epoch": 0.6552761928647564, + "grad_norm": 3.376924991607666, + "learning_rate": 5.611827377017461e-06, + "loss": 1.1247, + "step": 10451 + }, + { + "epoch": 0.6553388927205468, + "grad_norm": 3.702293872833252, + "learning_rate": 5.610002670630822e-06, + "loss": 1.0576, + "step": 10452 + }, + { + "epoch": 0.6554015925763371, + "grad_norm": 3.668400287628174, + "learning_rate": 5.608178145287866e-06, + "loss": 1.0761, + "step": 10453 + }, + { + "epoch": 0.6554642924321274, + "grad_norm": 3.79475998878479, + "learning_rate": 5.60635380106383e-06, + "loss": 0.979, + "step": 10454 + }, + { + "epoch": 0.6555269922879178, + "grad_norm": 3.2877357006073, + "learning_rate": 5.60452963803396e-06, + "loss": 1.0925, + "step": 10455 + }, + { + "epoch": 0.6555896921437081, + "grad_norm": 3.1125855445861816, + "learning_rate": 5.60270565627348e-06, + "loss": 1.1801, + "step": 10456 + }, + { + "epoch": 0.6556523919994984, + "grad_norm": 3.309922456741333, + "learning_rate": 5.60088185585761e-06, + "loss": 1.1146, + "step": 10457 + }, + { + "epoch": 0.6557150918552888, + "grad_norm": 3.3562495708465576, + "learning_rate": 5.599058236861559e-06, + "loss": 1.0888, + "step": 10458 + }, + { + "epoch": 0.6557777917110791, + "grad_norm": 3.617208957672119, + "learning_rate": 5.597234799360545e-06, + "loss": 1.0612, + "step": 10459 + }, + { + "epoch": 0.6558404915668694, + "grad_norm": 3.252471685409546, + "learning_rate": 5.595411543429758e-06, + "loss": 1.1841, + "step": 10460 + }, + { + "epoch": 0.6559031914226597, + "grad_norm": 3.274935007095337, + "learning_rate": 5.593588469144393e-06, + "loss": 0.9745, + "step": 10461 + }, + { + "epoch": 0.6559658912784501, + "grad_norm": 3.0467522144317627, + "learning_rate": 5.591765576579627e-06, + "loss": 1.2323, + "step": 10462 + }, + { + "epoch": 0.6560285911342404, + "grad_norm": 3.204113245010376, + "learning_rate": 5.589942865810646e-06, + "loss": 1.1617, + "step": 10463 + }, + { + "epoch": 0.6560912909900307, + "grad_norm": 3.4729959964752197, + "learning_rate": 5.588120336912613e-06, + "loss": 1.0714, + "step": 10464 + }, + { + "epoch": 0.656153990845821, + "grad_norm": 3.068758010864258, + "learning_rate": 5.5862979899606866e-06, + "loss": 1.1168, + "step": 10465 + }, + { + "epoch": 0.6562166907016114, + "grad_norm": 3.363892078399658, + "learning_rate": 5.584475825030027e-06, + "loss": 1.0627, + "step": 10466 + }, + { + "epoch": 0.6562793905574017, + "grad_norm": 3.3219404220581055, + "learning_rate": 5.582653842195777e-06, + "loss": 1.0063, + "step": 10467 + }, + { + "epoch": 0.656342090413192, + "grad_norm": 3.2615625858306885, + "learning_rate": 5.580832041533077e-06, + "loss": 1.1001, + "step": 10468 + }, + { + "epoch": 0.6564047902689824, + "grad_norm": 3.5587189197540283, + "learning_rate": 5.579010423117052e-06, + "loss": 1.1421, + "step": 10469 + }, + { + "epoch": 0.6564674901247727, + "grad_norm": 3.2877633571624756, + "learning_rate": 5.577188987022835e-06, + "loss": 1.2477, + "step": 10470 + }, + { + "epoch": 0.656530189980563, + "grad_norm": 2.6757516860961914, + "learning_rate": 5.575367733325535e-06, + "loss": 1.0546, + "step": 10471 + }, + { + "epoch": 0.6565928898363533, + "grad_norm": 3.367380380630493, + "learning_rate": 5.57354666210026e-06, + "loss": 0.9885, + "step": 10472 + }, + { + "epoch": 0.6566555896921437, + "grad_norm": 3.0127949714660645, + "learning_rate": 5.571725773422119e-06, + "loss": 1.1387, + "step": 10473 + }, + { + "epoch": 0.656718289547934, + "grad_norm": 3.5069119930267334, + "learning_rate": 5.569905067366199e-06, + "loss": 1.0503, + "step": 10474 + }, + { + "epoch": 0.6567809894037244, + "grad_norm": 3.3708126544952393, + "learning_rate": 5.5680845440075885e-06, + "loss": 1.118, + "step": 10475 + }, + { + "epoch": 0.6568436892595148, + "grad_norm": 3.472055435180664, + "learning_rate": 5.566264203421361e-06, + "loss": 1.1117, + "step": 10476 + }, + { + "epoch": 0.6569063891153051, + "grad_norm": 3.694863796234131, + "learning_rate": 5.564444045682594e-06, + "loss": 1.0722, + "step": 10477 + }, + { + "epoch": 0.6569690889710954, + "grad_norm": 3.0589914321899414, + "learning_rate": 5.562624070866349e-06, + "loss": 1.1499, + "step": 10478 + }, + { + "epoch": 0.6570317888268857, + "grad_norm": 3.0871758460998535, + "learning_rate": 5.560804279047682e-06, + "loss": 1.1887, + "step": 10479 + }, + { + "epoch": 0.6570944886826761, + "grad_norm": 3.3414061069488525, + "learning_rate": 5.5589846703016345e-06, + "loss": 1.1942, + "step": 10480 + }, + { + "epoch": 0.6571571885384664, + "grad_norm": 3.416633129119873, + "learning_rate": 5.55716524470326e-06, + "loss": 1.0848, + "step": 10481 + }, + { + "epoch": 0.6572198883942567, + "grad_norm": 3.322493314743042, + "learning_rate": 5.555346002327582e-06, + "loss": 1.0495, + "step": 10482 + }, + { + "epoch": 0.657282588250047, + "grad_norm": 3.1160812377929688, + "learning_rate": 5.553526943249626e-06, + "loss": 1.0811, + "step": 10483 + }, + { + "epoch": 0.6573452881058374, + "grad_norm": 3.0108721256256104, + "learning_rate": 5.551708067544418e-06, + "loss": 1.2488, + "step": 10484 + }, + { + "epoch": 0.6574079879616277, + "grad_norm": 3.0567359924316406, + "learning_rate": 5.549889375286963e-06, + "loss": 1.1576, + "step": 10485 + }, + { + "epoch": 0.657470687817418, + "grad_norm": 2.8855152130126953, + "learning_rate": 5.548070866552264e-06, + "loss": 1.0332, + "step": 10486 + }, + { + "epoch": 0.6575333876732083, + "grad_norm": 3.008845567703247, + "learning_rate": 5.546252541415313e-06, + "loss": 1.0061, + "step": 10487 + }, + { + "epoch": 0.6575960875289987, + "grad_norm": 3.326906681060791, + "learning_rate": 5.544434399951106e-06, + "loss": 1.1787, + "step": 10488 + }, + { + "epoch": 0.657658787384789, + "grad_norm": 3.1076791286468506, + "learning_rate": 5.542616442234618e-06, + "loss": 1.0805, + "step": 10489 + }, + { + "epoch": 0.6577214872405793, + "grad_norm": 3.490325689315796, + "learning_rate": 5.540798668340823e-06, + "loss": 1.0618, + "step": 10490 + }, + { + "epoch": 0.6577841870963697, + "grad_norm": 2.974147319793701, + "learning_rate": 5.538981078344683e-06, + "loss": 0.9745, + "step": 10491 + }, + { + "epoch": 0.65784688695216, + "grad_norm": 3.1549582481384277, + "learning_rate": 5.537163672321161e-06, + "loss": 0.9791, + "step": 10492 + }, + { + "epoch": 0.6579095868079503, + "grad_norm": 3.3088817596435547, + "learning_rate": 5.535346450345204e-06, + "loss": 1.0226, + "step": 10493 + }, + { + "epoch": 0.6579722866637406, + "grad_norm": 3.5563457012176514, + "learning_rate": 5.533529412491749e-06, + "loss": 1.0778, + "step": 10494 + }, + { + "epoch": 0.658034986519531, + "grad_norm": 3.4260618686676025, + "learning_rate": 5.531712558835742e-06, + "loss": 0.9459, + "step": 10495 + }, + { + "epoch": 0.6580976863753213, + "grad_norm": 3.229663610458374, + "learning_rate": 5.529895889452104e-06, + "loss": 1.2869, + "step": 10496 + }, + { + "epoch": 0.6581603862311116, + "grad_norm": 3.014742851257324, + "learning_rate": 5.528079404415755e-06, + "loss": 1.1775, + "step": 10497 + }, + { + "epoch": 0.658223086086902, + "grad_norm": 3.5384366512298584, + "learning_rate": 5.526263103801603e-06, + "loss": 1.0601, + "step": 10498 + }, + { + "epoch": 0.6582857859426924, + "grad_norm": 3.035388708114624, + "learning_rate": 5.524446987684557e-06, + "loss": 1.2013, + "step": 10499 + }, + { + "epoch": 0.6583484857984827, + "grad_norm": 3.1182565689086914, + "learning_rate": 5.522631056139516e-06, + "loss": 1.2566, + "step": 10500 + }, + { + "epoch": 0.658411185654273, + "grad_norm": 3.369271755218506, + "learning_rate": 5.5208153092413655e-06, + "loss": 0.9058, + "step": 10501 + }, + { + "epoch": 0.6584738855100634, + "grad_norm": 3.4728622436523438, + "learning_rate": 5.518999747064981e-06, + "loss": 0.9571, + "step": 10502 + }, + { + "epoch": 0.6585365853658537, + "grad_norm": 3.4539198875427246, + "learning_rate": 5.517184369685249e-06, + "loss": 0.9122, + "step": 10503 + }, + { + "epoch": 0.658599285221644, + "grad_norm": 3.22058367729187, + "learning_rate": 5.515369177177028e-06, + "loss": 1.2148, + "step": 10504 + }, + { + "epoch": 0.6586619850774343, + "grad_norm": 3.241835117340088, + "learning_rate": 5.513554169615174e-06, + "loss": 1.191, + "step": 10505 + }, + { + "epoch": 0.6587246849332247, + "grad_norm": 3.442044496536255, + "learning_rate": 5.511739347074546e-06, + "loss": 1.0154, + "step": 10506 + }, + { + "epoch": 0.658787384789015, + "grad_norm": 3.3316457271575928, + "learning_rate": 5.5099247096299834e-06, + "loss": 1.0748, + "step": 10507 + }, + { + "epoch": 0.6588500846448053, + "grad_norm": 3.217472553253174, + "learning_rate": 5.508110257356323e-06, + "loss": 1.1951, + "step": 10508 + }, + { + "epoch": 0.6589127845005956, + "grad_norm": 3.0095598697662354, + "learning_rate": 5.5062959903283855e-06, + "loss": 1.1028, + "step": 10509 + }, + { + "epoch": 0.658975484356386, + "grad_norm": 3.5060651302337646, + "learning_rate": 5.5044819086210035e-06, + "loss": 1.0685, + "step": 10510 + }, + { + "epoch": 0.6590381842121763, + "grad_norm": 3.3183207511901855, + "learning_rate": 5.502668012308983e-06, + "loss": 1.1784, + "step": 10511 + }, + { + "epoch": 0.6591008840679666, + "grad_norm": 3.0460891723632812, + "learning_rate": 5.500854301467131e-06, + "loss": 1.2394, + "step": 10512 + }, + { + "epoch": 0.659163583923757, + "grad_norm": 2.961156129837036, + "learning_rate": 5.499040776170239e-06, + "loss": 0.9239, + "step": 10513 + }, + { + "epoch": 0.6592262837795473, + "grad_norm": 3.402672052383423, + "learning_rate": 5.497227436493107e-06, + "loss": 1.1968, + "step": 10514 + }, + { + "epoch": 0.6592889836353376, + "grad_norm": 3.2369892597198486, + "learning_rate": 5.495414282510511e-06, + "loss": 1.1432, + "step": 10515 + }, + { + "epoch": 0.6593516834911279, + "grad_norm": 3.1694254875183105, + "learning_rate": 5.4936013142972236e-06, + "loss": 0.9808, + "step": 10516 + }, + { + "epoch": 0.6594143833469183, + "grad_norm": 3.0692758560180664, + "learning_rate": 5.4917885319280174e-06, + "loss": 1.0073, + "step": 10517 + }, + { + "epoch": 0.6594770832027086, + "grad_norm": 3.2652721405029297, + "learning_rate": 5.489975935477651e-06, + "loss": 1.0617, + "step": 10518 + }, + { + "epoch": 0.6595397830584989, + "grad_norm": 3.510155200958252, + "learning_rate": 5.488163525020873e-06, + "loss": 1.0104, + "step": 10519 + }, + { + "epoch": 0.6596024829142892, + "grad_norm": 3.6226584911346436, + "learning_rate": 5.486351300632424e-06, + "loss": 1.136, + "step": 10520 + }, + { + "epoch": 0.6596651827700797, + "grad_norm": 3.309175968170166, + "learning_rate": 5.484539262387048e-06, + "loss": 1.0223, + "step": 10521 + }, + { + "epoch": 0.65972788262587, + "grad_norm": 3.4341094493865967, + "learning_rate": 5.482727410359472e-06, + "loss": 0.9404, + "step": 10522 + }, + { + "epoch": 0.6597905824816603, + "grad_norm": 3.499265193939209, + "learning_rate": 5.480915744624409e-06, + "loss": 1.0311, + "step": 10523 + }, + { + "epoch": 0.6598532823374507, + "grad_norm": 3.045057773590088, + "learning_rate": 5.479104265256583e-06, + "loss": 1.0705, + "step": 10524 + }, + { + "epoch": 0.659915982193241, + "grad_norm": 3.3291656970977783, + "learning_rate": 5.477292972330695e-06, + "loss": 1.037, + "step": 10525 + }, + { + "epoch": 0.6599786820490313, + "grad_norm": 3.0614171028137207, + "learning_rate": 5.475481865921441e-06, + "loss": 1.2346, + "step": 10526 + }, + { + "epoch": 0.6600413819048216, + "grad_norm": 3.1753323078155518, + "learning_rate": 5.473670946103508e-06, + "loss": 1.0546, + "step": 10527 + }, + { + "epoch": 0.660104081760612, + "grad_norm": 3.339674234390259, + "learning_rate": 5.471860212951588e-06, + "loss": 1.174, + "step": 10528 + }, + { + "epoch": 0.6601667816164023, + "grad_norm": 3.2562594413757324, + "learning_rate": 5.470049666540352e-06, + "loss": 1.1038, + "step": 10529 + }, + { + "epoch": 0.6602294814721926, + "grad_norm": 3.407419443130493, + "learning_rate": 5.4682393069444626e-06, + "loss": 1.2734, + "step": 10530 + }, + { + "epoch": 0.660292181327983, + "grad_norm": 3.348510265350342, + "learning_rate": 5.46642913423858e-06, + "loss": 1.1207, + "step": 10531 + }, + { + "epoch": 0.6603548811837733, + "grad_norm": 3.3657708168029785, + "learning_rate": 5.4646191484973605e-06, + "loss": 1.1501, + "step": 10532 + }, + { + "epoch": 0.6604175810395636, + "grad_norm": 3.285571813583374, + "learning_rate": 5.462809349795446e-06, + "loss": 1.1352, + "step": 10533 + }, + { + "epoch": 0.6604802808953539, + "grad_norm": 3.118091583251953, + "learning_rate": 5.4609997382074684e-06, + "loss": 1.0612, + "step": 10534 + }, + { + "epoch": 0.6605429807511443, + "grad_norm": 3.0286242961883545, + "learning_rate": 5.459190313808064e-06, + "loss": 1.1381, + "step": 10535 + }, + { + "epoch": 0.6606056806069346, + "grad_norm": 2.968540668487549, + "learning_rate": 5.457381076671848e-06, + "loss": 1.196, + "step": 10536 + }, + { + "epoch": 0.6606683804627249, + "grad_norm": 3.0823287963867188, + "learning_rate": 5.4555720268734365e-06, + "loss": 1.0366, + "step": 10537 + }, + { + "epoch": 0.6607310803185152, + "grad_norm": 3.1900570392608643, + "learning_rate": 5.453763164487429e-06, + "loss": 1.1705, + "step": 10538 + }, + { + "epoch": 0.6607937801743056, + "grad_norm": 3.4090776443481445, + "learning_rate": 5.4519544895884304e-06, + "loss": 1.2045, + "step": 10539 + }, + { + "epoch": 0.6608564800300959, + "grad_norm": 3.071141004562378, + "learning_rate": 5.450146002251027e-06, + "loss": 1.132, + "step": 10540 + }, + { + "epoch": 0.6609191798858862, + "grad_norm": 3.318979263305664, + "learning_rate": 5.448337702549802e-06, + "loss": 1.12, + "step": 10541 + }, + { + "epoch": 0.6609818797416765, + "grad_norm": 3.4295122623443604, + "learning_rate": 5.446529590559324e-06, + "loss": 1.0086, + "step": 10542 + }, + { + "epoch": 0.6610445795974669, + "grad_norm": 2.911186695098877, + "learning_rate": 5.444721666354169e-06, + "loss": 1.2616, + "step": 10543 + }, + { + "epoch": 0.6611072794532573, + "grad_norm": 3.2112793922424316, + "learning_rate": 5.442913930008891e-06, + "loss": 1.0806, + "step": 10544 + }, + { + "epoch": 0.6611699793090476, + "grad_norm": 2.963204860687256, + "learning_rate": 5.441106381598038e-06, + "loss": 1.0562, + "step": 10545 + }, + { + "epoch": 0.661232679164838, + "grad_norm": 3.30016827583313, + "learning_rate": 5.4392990211961604e-06, + "loss": 1.1836, + "step": 10546 + }, + { + "epoch": 0.6612953790206283, + "grad_norm": 3.62215518951416, + "learning_rate": 5.437491848877791e-06, + "loss": 1.0413, + "step": 10547 + }, + { + "epoch": 0.6613580788764186, + "grad_norm": 3.013028383255005, + "learning_rate": 5.435684864717458e-06, + "loss": 1.1355, + "step": 10548 + }, + { + "epoch": 0.661420778732209, + "grad_norm": 2.8437063694000244, + "learning_rate": 5.4338780687896755e-06, + "loss": 1.0573, + "step": 10549 + }, + { + "epoch": 0.6614834785879993, + "grad_norm": 2.9715983867645264, + "learning_rate": 5.432071461168964e-06, + "loss": 1.1919, + "step": 10550 + }, + { + "epoch": 0.6615461784437896, + "grad_norm": 3.208627939224243, + "learning_rate": 5.430265041929825e-06, + "loss": 1.0723, + "step": 10551 + }, + { + "epoch": 0.6616088782995799, + "grad_norm": 3.148531913757324, + "learning_rate": 5.4284588111467565e-06, + "loss": 0.981, + "step": 10552 + }, + { + "epoch": 0.6616715781553703, + "grad_norm": 3.3544631004333496, + "learning_rate": 5.4266527688942405e-06, + "loss": 1.0827, + "step": 10553 + }, + { + "epoch": 0.6617342780111606, + "grad_norm": 3.375331163406372, + "learning_rate": 5.42484691524677e-06, + "loss": 1.177, + "step": 10554 + }, + { + "epoch": 0.6617969778669509, + "grad_norm": 2.9249837398529053, + "learning_rate": 5.423041250278811e-06, + "loss": 1.0904, + "step": 10555 + }, + { + "epoch": 0.6618596777227412, + "grad_norm": 3.177800416946411, + "learning_rate": 5.421235774064828e-06, + "loss": 1.266, + "step": 10556 + }, + { + "epoch": 0.6619223775785316, + "grad_norm": 3.2197749614715576, + "learning_rate": 5.419430486679285e-06, + "loss": 1.0102, + "step": 10557 + }, + { + "epoch": 0.6619850774343219, + "grad_norm": 3.2056984901428223, + "learning_rate": 5.4176253881966275e-06, + "loss": 1.1949, + "step": 10558 + }, + { + "epoch": 0.6620477772901122, + "grad_norm": 3.2022898197174072, + "learning_rate": 5.415820478691301e-06, + "loss": 1.1677, + "step": 10559 + }, + { + "epoch": 0.6621104771459025, + "grad_norm": 3.1380085945129395, + "learning_rate": 5.414015758237734e-06, + "loss": 1.1165, + "step": 10560 + }, + { + "epoch": 0.6621731770016929, + "grad_norm": 3.22953200340271, + "learning_rate": 5.41221122691036e-06, + "loss": 1.271, + "step": 10561 + }, + { + "epoch": 0.6622358768574832, + "grad_norm": 3.258842945098877, + "learning_rate": 5.410406884783597e-06, + "loss": 1.0586, + "step": 10562 + }, + { + "epoch": 0.6622985767132735, + "grad_norm": 3.162393569946289, + "learning_rate": 5.408602731931853e-06, + "loss": 0.9954, + "step": 10563 + }, + { + "epoch": 0.6623612765690639, + "grad_norm": 3.1391704082489014, + "learning_rate": 5.40679876842953e-06, + "loss": 1.0786, + "step": 10564 + }, + { + "epoch": 0.6624239764248542, + "grad_norm": 3.2885046005249023, + "learning_rate": 5.404994994351029e-06, + "loss": 1.0306, + "step": 10565 + }, + { + "epoch": 0.6624866762806445, + "grad_norm": 2.9874861240386963, + "learning_rate": 5.403191409770736e-06, + "loss": 1.062, + "step": 10566 + }, + { + "epoch": 0.6625493761364348, + "grad_norm": 3.458341360092163, + "learning_rate": 5.401388014763027e-06, + "loss": 0.8066, + "step": 10567 + }, + { + "epoch": 0.6626120759922253, + "grad_norm": 3.271042823791504, + "learning_rate": 5.39958480940228e-06, + "loss": 1.049, + "step": 10568 + }, + { + "epoch": 0.6626747758480156, + "grad_norm": 3.3830795288085938, + "learning_rate": 5.397781793762856e-06, + "loss": 1.2036, + "step": 10569 + }, + { + "epoch": 0.6627374757038059, + "grad_norm": 3.051724433898926, + "learning_rate": 5.395978967919112e-06, + "loss": 1.0647, + "step": 10570 + }, + { + "epoch": 0.6628001755595962, + "grad_norm": 3.3879058361053467, + "learning_rate": 5.3941763319453936e-06, + "loss": 1.2395, + "step": 10571 + }, + { + "epoch": 0.6628628754153866, + "grad_norm": 3.3692569732666016, + "learning_rate": 5.392373885916047e-06, + "loss": 1.0688, + "step": 10572 + }, + { + "epoch": 0.6629255752711769, + "grad_norm": 3.4893059730529785, + "learning_rate": 5.390571629905404e-06, + "loss": 0.999, + "step": 10573 + }, + { + "epoch": 0.6629882751269672, + "grad_norm": 2.9660210609436035, + "learning_rate": 5.388769563987786e-06, + "loss": 1.0036, + "step": 10574 + }, + { + "epoch": 0.6630509749827576, + "grad_norm": 2.8700003623962402, + "learning_rate": 5.38696768823751e-06, + "loss": 1.0697, + "step": 10575 + }, + { + "epoch": 0.6631136748385479, + "grad_norm": 3.1192281246185303, + "learning_rate": 5.385166002728892e-06, + "loss": 1.0131, + "step": 10576 + }, + { + "epoch": 0.6631763746943382, + "grad_norm": 3.0969276428222656, + "learning_rate": 5.3833645075362295e-06, + "loss": 0.9708, + "step": 10577 + }, + { + "epoch": 0.6632390745501285, + "grad_norm": 3.0665249824523926, + "learning_rate": 5.381563202733813e-06, + "loss": 1.0192, + "step": 10578 + }, + { + "epoch": 0.6633017744059189, + "grad_norm": 3.483731985092163, + "learning_rate": 5.379762088395935e-06, + "loss": 1.0088, + "step": 10579 + }, + { + "epoch": 0.6633644742617092, + "grad_norm": 2.9191322326660156, + "learning_rate": 5.3779611645968696e-06, + "loss": 1.0812, + "step": 10580 + }, + { + "epoch": 0.6634271741174995, + "grad_norm": 3.023737668991089, + "learning_rate": 5.3761604314108885e-06, + "loss": 1.084, + "step": 10581 + }, + { + "epoch": 0.6634898739732898, + "grad_norm": 3.186194658279419, + "learning_rate": 5.3743598889122485e-06, + "loss": 1.1608, + "step": 10582 + }, + { + "epoch": 0.6635525738290802, + "grad_norm": 3.247446298599243, + "learning_rate": 5.372559537175212e-06, + "loss": 1.1572, + "step": 10583 + }, + { + "epoch": 0.6636152736848705, + "grad_norm": 3.004187822341919, + "learning_rate": 5.370759376274024e-06, + "loss": 1.0662, + "step": 10584 + }, + { + "epoch": 0.6636779735406608, + "grad_norm": 3.2001185417175293, + "learning_rate": 5.368959406282917e-06, + "loss": 1.1406, + "step": 10585 + }, + { + "epoch": 0.6637406733964512, + "grad_norm": 3.0654499530792236, + "learning_rate": 5.3671596272761304e-06, + "loss": 1.2092, + "step": 10586 + }, + { + "epoch": 0.6638033732522415, + "grad_norm": 3.481685161590576, + "learning_rate": 5.365360039327883e-06, + "loss": 1.0358, + "step": 10587 + }, + { + "epoch": 0.6638660731080318, + "grad_norm": 3.190424680709839, + "learning_rate": 5.363560642512389e-06, + "loss": 1.1196, + "step": 10588 + }, + { + "epoch": 0.6639287729638221, + "grad_norm": 3.457348585128784, + "learning_rate": 5.361761436903854e-06, + "loss": 0.87, + "step": 10589 + }, + { + "epoch": 0.6639914728196125, + "grad_norm": 3.378230094909668, + "learning_rate": 5.359962422576484e-06, + "loss": 1.0389, + "step": 10590 + }, + { + "epoch": 0.6640541726754029, + "grad_norm": 3.098067283630371, + "learning_rate": 5.358163599604464e-06, + "loss": 1.1198, + "step": 10591 + }, + { + "epoch": 0.6641168725311932, + "grad_norm": 3.1074626445770264, + "learning_rate": 5.3563649680619825e-06, + "loss": 0.9445, + "step": 10592 + }, + { + "epoch": 0.6641795723869836, + "grad_norm": 3.3009095191955566, + "learning_rate": 5.354566528023207e-06, + "loss": 1.0842, + "step": 10593 + }, + { + "epoch": 0.6642422722427739, + "grad_norm": 3.394300937652588, + "learning_rate": 5.352768279562315e-06, + "loss": 1.1027, + "step": 10594 + }, + { + "epoch": 0.6643049720985642, + "grad_norm": 3.0560190677642822, + "learning_rate": 5.350970222753462e-06, + "loss": 1.1234, + "step": 10595 + }, + { + "epoch": 0.6643676719543545, + "grad_norm": 3.1647472381591797, + "learning_rate": 5.3491723576707975e-06, + "loss": 1.1368, + "step": 10596 + }, + { + "epoch": 0.6644303718101449, + "grad_norm": 3.194413185119629, + "learning_rate": 5.347374684388471e-06, + "loss": 1.0316, + "step": 10597 + }, + { + "epoch": 0.6644930716659352, + "grad_norm": 3.01580548286438, + "learning_rate": 5.345577202980615e-06, + "loss": 1.0639, + "step": 10598 + }, + { + "epoch": 0.6645557715217255, + "grad_norm": 3.4119598865509033, + "learning_rate": 5.343779913521359e-06, + "loss": 1.0748, + "step": 10599 + }, + { + "epoch": 0.6646184713775158, + "grad_norm": 3.0811541080474854, + "learning_rate": 5.341982816084819e-06, + "loss": 1.0987, + "step": 10600 + }, + { + "epoch": 0.6646811712333062, + "grad_norm": 3.857017993927002, + "learning_rate": 5.340185910745115e-06, + "loss": 1.2219, + "step": 10601 + }, + { + "epoch": 0.6647438710890965, + "grad_norm": 3.461688280105591, + "learning_rate": 5.338389197576347e-06, + "loss": 1.0595, + "step": 10602 + }, + { + "epoch": 0.6648065709448868, + "grad_norm": 3.415849208831787, + "learning_rate": 5.336592676652612e-06, + "loss": 0.9083, + "step": 10603 + }, + { + "epoch": 0.6648692708006771, + "grad_norm": 3.134913206100464, + "learning_rate": 5.334796348047995e-06, + "loss": 1.1541, + "step": 10604 + }, + { + "epoch": 0.6649319706564675, + "grad_norm": 2.990360736846924, + "learning_rate": 5.333000211836585e-06, + "loss": 1.1682, + "step": 10605 + }, + { + "epoch": 0.6649946705122578, + "grad_norm": 3.6217334270477295, + "learning_rate": 5.331204268092449e-06, + "loss": 1.0617, + "step": 10606 + }, + { + "epoch": 0.6650573703680481, + "grad_norm": 3.2859046459198, + "learning_rate": 5.329408516889649e-06, + "loss": 1.1499, + "step": 10607 + }, + { + "epoch": 0.6651200702238385, + "grad_norm": 3.0513076782226562, + "learning_rate": 5.32761295830225e-06, + "loss": 1.1302, + "step": 10608 + }, + { + "epoch": 0.6651827700796288, + "grad_norm": 3.532027244567871, + "learning_rate": 5.325817592404297e-06, + "loss": 0.9502, + "step": 10609 + }, + { + "epoch": 0.6652454699354191, + "grad_norm": 3.40156626701355, + "learning_rate": 5.324022419269831e-06, + "loss": 1.2671, + "step": 10610 + }, + { + "epoch": 0.6653081697912094, + "grad_norm": 3.234879732131958, + "learning_rate": 5.32222743897288e-06, + "loss": 1.0968, + "step": 10611 + }, + { + "epoch": 0.6653708696469998, + "grad_norm": 3.4069344997406006, + "learning_rate": 5.320432651587477e-06, + "loss": 1.0449, + "step": 10612 + }, + { + "epoch": 0.6654335695027901, + "grad_norm": 3.4003689289093018, + "learning_rate": 5.318638057187637e-06, + "loss": 1.1246, + "step": 10613 + }, + { + "epoch": 0.6654962693585805, + "grad_norm": 3.8236570358276367, + "learning_rate": 5.316843655847368e-06, + "loss": 0.924, + "step": 10614 + }, + { + "epoch": 0.6655589692143709, + "grad_norm": 3.1518192291259766, + "learning_rate": 5.3150494476406655e-06, + "loss": 0.9699, + "step": 10615 + }, + { + "epoch": 0.6656216690701612, + "grad_norm": 3.5314273834228516, + "learning_rate": 5.313255432641533e-06, + "loss": 0.9929, + "step": 10616 + }, + { + "epoch": 0.6656843689259515, + "grad_norm": 2.976710796356201, + "learning_rate": 5.311461610923951e-06, + "loss": 1.0434, + "step": 10617 + }, + { + "epoch": 0.6657470687817418, + "grad_norm": 2.9314780235290527, + "learning_rate": 5.309667982561895e-06, + "loss": 1.0716, + "step": 10618 + }, + { + "epoch": 0.6658097686375322, + "grad_norm": 3.2415432929992676, + "learning_rate": 5.307874547629339e-06, + "loss": 1.2338, + "step": 10619 + }, + { + "epoch": 0.6658724684933225, + "grad_norm": 2.98350191116333, + "learning_rate": 5.306081306200242e-06, + "loss": 1.2288, + "step": 10620 + }, + { + "epoch": 0.6659351683491128, + "grad_norm": 3.57680606842041, + "learning_rate": 5.3042882583485555e-06, + "loss": 1.2042, + "step": 10621 + }, + { + "epoch": 0.6659978682049031, + "grad_norm": 3.3575687408447266, + "learning_rate": 5.302495404148223e-06, + "loss": 1.0791, + "step": 10622 + }, + { + "epoch": 0.6660605680606935, + "grad_norm": 2.92527437210083, + "learning_rate": 5.300702743673188e-06, + "loss": 1.088, + "step": 10623 + }, + { + "epoch": 0.6661232679164838, + "grad_norm": 2.9565744400024414, + "learning_rate": 5.2989102769973785e-06, + "loss": 1.0969, + "step": 10624 + }, + { + "epoch": 0.6661859677722741, + "grad_norm": 3.2361979484558105, + "learning_rate": 5.297118004194714e-06, + "loss": 1.2132, + "step": 10625 + }, + { + "epoch": 0.6662486676280645, + "grad_norm": 3.695530652999878, + "learning_rate": 5.295325925339105e-06, + "loss": 0.9626, + "step": 10626 + }, + { + "epoch": 0.6663113674838548, + "grad_norm": 3.304067611694336, + "learning_rate": 5.293534040504464e-06, + "loss": 0.9632, + "step": 10627 + }, + { + "epoch": 0.6663740673396451, + "grad_norm": 2.967451333999634, + "learning_rate": 5.2917423497646834e-06, + "loss": 1.202, + "step": 10628 + }, + { + "epoch": 0.6664367671954354, + "grad_norm": 2.94107723236084, + "learning_rate": 5.2899508531936526e-06, + "loss": 1.063, + "step": 10629 + }, + { + "epoch": 0.6664994670512258, + "grad_norm": 3.1837615966796875, + "learning_rate": 5.2881595508652565e-06, + "loss": 1.0338, + "step": 10630 + }, + { + "epoch": 0.6665621669070161, + "grad_norm": 3.454002857208252, + "learning_rate": 5.286368442853367e-06, + "loss": 1.0303, + "step": 10631 + }, + { + "epoch": 0.6666248667628064, + "grad_norm": 3.216582775115967, + "learning_rate": 5.284577529231847e-06, + "loss": 1.0701, + "step": 10632 + }, + { + "epoch": 0.6666875666185967, + "grad_norm": 3.2291371822357178, + "learning_rate": 5.282786810074554e-06, + "loss": 0.9752, + "step": 10633 + }, + { + "epoch": 0.6667502664743871, + "grad_norm": 3.170440673828125, + "learning_rate": 5.280996285455342e-06, + "loss": 1.1254, + "step": 10634 + }, + { + "epoch": 0.6668129663301774, + "grad_norm": 3.471094846725464, + "learning_rate": 5.279205955448048e-06, + "loss": 1.0852, + "step": 10635 + }, + { + "epoch": 0.6668756661859677, + "grad_norm": 3.219698667526245, + "learning_rate": 5.277415820126503e-06, + "loss": 1.086, + "step": 10636 + }, + { + "epoch": 0.6669383660417582, + "grad_norm": 3.1295177936553955, + "learning_rate": 5.275625879564541e-06, + "loss": 1.1075, + "step": 10637 + }, + { + "epoch": 0.6670010658975485, + "grad_norm": 3.558588743209839, + "learning_rate": 5.273836133835975e-06, + "loss": 0.8559, + "step": 10638 + }, + { + "epoch": 0.6670637657533388, + "grad_norm": 3.4948408603668213, + "learning_rate": 5.272046583014612e-06, + "loss": 1.0847, + "step": 10639 + }, + { + "epoch": 0.6671264656091291, + "grad_norm": 2.927227020263672, + "learning_rate": 5.270257227174249e-06, + "loss": 1.0593, + "step": 10640 + }, + { + "epoch": 0.6671891654649195, + "grad_norm": 3.1533002853393555, + "learning_rate": 5.26846806638869e-06, + "loss": 1.1903, + "step": 10641 + }, + { + "epoch": 0.6672518653207098, + "grad_norm": 3.173297643661499, + "learning_rate": 5.266679100731714e-06, + "loss": 1.086, + "step": 10642 + }, + { + "epoch": 0.6673145651765001, + "grad_norm": 3.5223257541656494, + "learning_rate": 5.2648903302771e-06, + "loss": 1.0124, + "step": 10643 + }, + { + "epoch": 0.6673772650322904, + "grad_norm": 3.3536295890808105, + "learning_rate": 5.26310175509861e-06, + "loss": 1.2071, + "step": 10644 + }, + { + "epoch": 0.6674399648880808, + "grad_norm": 3.6164608001708984, + "learning_rate": 5.2613133752700145e-06, + "loss": 1.0346, + "step": 10645 + }, + { + "epoch": 0.6675026647438711, + "grad_norm": 3.3266007900238037, + "learning_rate": 5.259525190865062e-06, + "loss": 1.1601, + "step": 10646 + }, + { + "epoch": 0.6675653645996614, + "grad_norm": 3.4925947189331055, + "learning_rate": 5.257737201957496e-06, + "loss": 1.1852, + "step": 10647 + }, + { + "epoch": 0.6676280644554518, + "grad_norm": 3.1337621212005615, + "learning_rate": 5.255949408621056e-06, + "loss": 1.1479, + "step": 10648 + }, + { + "epoch": 0.6676907643112421, + "grad_norm": 3.0039587020874023, + "learning_rate": 5.254161810929471e-06, + "loss": 1.1715, + "step": 10649 + }, + { + "epoch": 0.6677534641670324, + "grad_norm": 2.862135171890259, + "learning_rate": 5.25237440895646e-06, + "loss": 1.1213, + "step": 10650 + }, + { + "epoch": 0.6678161640228227, + "grad_norm": 3.6409268379211426, + "learning_rate": 5.250587202775732e-06, + "loss": 1.272, + "step": 10651 + }, + { + "epoch": 0.6678788638786131, + "grad_norm": 3.4482834339141846, + "learning_rate": 5.248800192460999e-06, + "loss": 1.1489, + "step": 10652 + }, + { + "epoch": 0.6679415637344034, + "grad_norm": 2.9151346683502197, + "learning_rate": 5.2470133780859535e-06, + "loss": 1.1486, + "step": 10653 + }, + { + "epoch": 0.6680042635901937, + "grad_norm": 3.2709665298461914, + "learning_rate": 5.245226759724283e-06, + "loss": 1.2432, + "step": 10654 + }, + { + "epoch": 0.668066963445984, + "grad_norm": 3.348846673965454, + "learning_rate": 5.243440337449666e-06, + "loss": 1.0354, + "step": 10655 + }, + { + "epoch": 0.6681296633017744, + "grad_norm": 3.12796688079834, + "learning_rate": 5.241654111335781e-06, + "loss": 1.0153, + "step": 10656 + }, + { + "epoch": 0.6681923631575647, + "grad_norm": 3.3274245262145996, + "learning_rate": 5.2398680814562875e-06, + "loss": 0.9968, + "step": 10657 + }, + { + "epoch": 0.668255063013355, + "grad_norm": 2.92645263671875, + "learning_rate": 5.238082247884838e-06, + "loss": 1.1577, + "step": 10658 + }, + { + "epoch": 0.6683177628691453, + "grad_norm": 3.3951256275177, + "learning_rate": 5.236296610695088e-06, + "loss": 0.9906, + "step": 10659 + }, + { + "epoch": 0.6683804627249358, + "grad_norm": 3.3733325004577637, + "learning_rate": 5.234511169960677e-06, + "loss": 1.082, + "step": 10660 + }, + { + "epoch": 0.6684431625807261, + "grad_norm": 3.0732245445251465, + "learning_rate": 5.23272592575523e-06, + "loss": 1.1025, + "step": 10661 + }, + { + "epoch": 0.6685058624365164, + "grad_norm": 3.2284882068634033, + "learning_rate": 5.230940878152371e-06, + "loss": 1.0244, + "step": 10662 + }, + { + "epoch": 0.6685685622923068, + "grad_norm": 2.9696171283721924, + "learning_rate": 5.229156027225724e-06, + "loss": 1.2003, + "step": 10663 + }, + { + "epoch": 0.6686312621480971, + "grad_norm": 3.2022507190704346, + "learning_rate": 5.2273713730488886e-06, + "loss": 1.0622, + "step": 10664 + }, + { + "epoch": 0.6686939620038874, + "grad_norm": 3.2516238689422607, + "learning_rate": 5.225586915695468e-06, + "loss": 1.0512, + "step": 10665 + }, + { + "epoch": 0.6687566618596777, + "grad_norm": 3.1645288467407227, + "learning_rate": 5.223802655239045e-06, + "loss": 1.0562, + "step": 10666 + }, + { + "epoch": 0.6688193617154681, + "grad_norm": 3.8756797313690186, + "learning_rate": 5.222018591753215e-06, + "loss": 1.0211, + "step": 10667 + }, + { + "epoch": 0.6688820615712584, + "grad_norm": 3.266103744506836, + "learning_rate": 5.220234725311547e-06, + "loss": 1.1095, + "step": 10668 + }, + { + "epoch": 0.6689447614270487, + "grad_norm": 3.1093201637268066, + "learning_rate": 5.218451055987603e-06, + "loss": 1.1575, + "step": 10669 + }, + { + "epoch": 0.669007461282839, + "grad_norm": 3.38667893409729, + "learning_rate": 5.21666758385495e-06, + "loss": 1.1438, + "step": 10670 + }, + { + "epoch": 0.6690701611386294, + "grad_norm": 3.547478199005127, + "learning_rate": 5.214884308987136e-06, + "loss": 1.0942, + "step": 10671 + }, + { + "epoch": 0.6691328609944197, + "grad_norm": 3.274028778076172, + "learning_rate": 5.213101231457702e-06, + "loss": 1.1815, + "step": 10672 + }, + { + "epoch": 0.66919556085021, + "grad_norm": 3.0988447666168213, + "learning_rate": 5.2113183513401775e-06, + "loss": 1.0453, + "step": 10673 + }, + { + "epoch": 0.6692582607060004, + "grad_norm": 3.7410027980804443, + "learning_rate": 5.209535668708097e-06, + "loss": 0.9433, + "step": 10674 + }, + { + "epoch": 0.6693209605617907, + "grad_norm": 3.568603754043579, + "learning_rate": 5.207753183634976e-06, + "loss": 0.9156, + "step": 10675 + }, + { + "epoch": 0.669383660417581, + "grad_norm": 3.201312780380249, + "learning_rate": 5.205970896194321e-06, + "loss": 1.1164, + "step": 10676 + }, + { + "epoch": 0.6694463602733713, + "grad_norm": 2.990257978439331, + "learning_rate": 5.2041888064596335e-06, + "loss": 1.1032, + "step": 10677 + }, + { + "epoch": 0.6695090601291617, + "grad_norm": 3.4195196628570557, + "learning_rate": 5.202406914504412e-06, + "loss": 1.2387, + "step": 10678 + }, + { + "epoch": 0.669571759984952, + "grad_norm": 3.2957797050476074, + "learning_rate": 5.200625220402139e-06, + "loss": 1.1057, + "step": 10679 + }, + { + "epoch": 0.6696344598407423, + "grad_norm": 3.396575689315796, + "learning_rate": 5.198843724226288e-06, + "loss": 0.945, + "step": 10680 + }, + { + "epoch": 0.6696971596965327, + "grad_norm": 3.4575929641723633, + "learning_rate": 5.1970624260503346e-06, + "loss": 1.1839, + "step": 10681 + }, + { + "epoch": 0.669759859552323, + "grad_norm": 2.9002833366394043, + "learning_rate": 5.1952813259477375e-06, + "loss": 1.1217, + "step": 10682 + }, + { + "epoch": 0.6698225594081134, + "grad_norm": 3.1189470291137695, + "learning_rate": 5.193500423991948e-06, + "loss": 1.1184, + "step": 10683 + }, + { + "epoch": 0.6698852592639037, + "grad_norm": 3.2918450832366943, + "learning_rate": 5.191719720256407e-06, + "loss": 1.0059, + "step": 10684 + }, + { + "epoch": 0.6699479591196941, + "grad_norm": 3.160783052444458, + "learning_rate": 5.189939214814558e-06, + "loss": 1.1666, + "step": 10685 + }, + { + "epoch": 0.6700106589754844, + "grad_norm": 3.5450260639190674, + "learning_rate": 5.188158907739826e-06, + "loss": 0.998, + "step": 10686 + }, + { + "epoch": 0.6700733588312747, + "grad_norm": 3.574434757232666, + "learning_rate": 5.1863787991056266e-06, + "loss": 1.1962, + "step": 10687 + }, + { + "epoch": 0.670136058687065, + "grad_norm": 3.411588430404663, + "learning_rate": 5.184598888985382e-06, + "loss": 1.0648, + "step": 10688 + }, + { + "epoch": 0.6701987585428554, + "grad_norm": 3.6256470680236816, + "learning_rate": 5.182819177452487e-06, + "loss": 1.1747, + "step": 10689 + }, + { + "epoch": 0.6702614583986457, + "grad_norm": 3.3053436279296875, + "learning_rate": 5.18103966458034e-06, + "loss": 1.1049, + "step": 10690 + }, + { + "epoch": 0.670324158254436, + "grad_norm": 3.124746799468994, + "learning_rate": 5.179260350442323e-06, + "loss": 1.1538, + "step": 10691 + }, + { + "epoch": 0.6703868581102264, + "grad_norm": 3.0620930194854736, + "learning_rate": 5.177481235111825e-06, + "loss": 1.2238, + "step": 10692 + }, + { + "epoch": 0.6704495579660167, + "grad_norm": 3.2102630138397217, + "learning_rate": 5.1757023186622125e-06, + "loss": 0.9506, + "step": 10693 + }, + { + "epoch": 0.670512257821807, + "grad_norm": 3.165156602859497, + "learning_rate": 5.173923601166844e-06, + "loss": 1.1778, + "step": 10694 + }, + { + "epoch": 0.6705749576775973, + "grad_norm": 3.65224289894104, + "learning_rate": 5.172145082699076e-06, + "loss": 1.0884, + "step": 10695 + }, + { + "epoch": 0.6706376575333877, + "grad_norm": 3.608851194381714, + "learning_rate": 5.1703667633322575e-06, + "loss": 1.0912, + "step": 10696 + }, + { + "epoch": 0.670700357389178, + "grad_norm": 3.2371737957000732, + "learning_rate": 5.168588643139724e-06, + "loss": 1.0413, + "step": 10697 + }, + { + "epoch": 0.6707630572449683, + "grad_norm": 3.04880428314209, + "learning_rate": 5.166810722194804e-06, + "loss": 0.9874, + "step": 10698 + }, + { + "epoch": 0.6708257571007586, + "grad_norm": 3.0480542182922363, + "learning_rate": 5.165033000570825e-06, + "loss": 1.1361, + "step": 10699 + }, + { + "epoch": 0.670888456956549, + "grad_norm": 3.3231608867645264, + "learning_rate": 5.163255478341094e-06, + "loss": 1.1181, + "step": 10700 + }, + { + "epoch": 0.6709511568123393, + "grad_norm": 3.2833333015441895, + "learning_rate": 5.161478155578921e-06, + "loss": 1.0296, + "step": 10701 + }, + { + "epoch": 0.6710138566681296, + "grad_norm": 3.272735357284546, + "learning_rate": 5.159701032357594e-06, + "loss": 1.2474, + "step": 10702 + }, + { + "epoch": 0.67107655652392, + "grad_norm": 3.3508098125457764, + "learning_rate": 5.157924108750412e-06, + "loss": 1.0497, + "step": 10703 + }, + { + "epoch": 0.6711392563797103, + "grad_norm": 3.442284345626831, + "learning_rate": 5.156147384830652e-06, + "loss": 1.0336, + "step": 10704 + }, + { + "epoch": 0.6712019562355006, + "grad_norm": 3.3399147987365723, + "learning_rate": 5.154370860671584e-06, + "loss": 1.2198, + "step": 10705 + }, + { + "epoch": 0.671264656091291, + "grad_norm": 3.318002939224243, + "learning_rate": 5.152594536346471e-06, + "loss": 0.9683, + "step": 10706 + }, + { + "epoch": 0.6713273559470814, + "grad_norm": 3.199887990951538, + "learning_rate": 5.150818411928574e-06, + "loss": 1.1936, + "step": 10707 + }, + { + "epoch": 0.6713900558028717, + "grad_norm": 3.634536027908325, + "learning_rate": 5.149042487491138e-06, + "loss": 1.1737, + "step": 10708 + }, + { + "epoch": 0.671452755658662, + "grad_norm": 3.3724935054779053, + "learning_rate": 5.147266763107397e-06, + "loss": 1.1317, + "step": 10709 + }, + { + "epoch": 0.6715154555144524, + "grad_norm": 3.2017903327941895, + "learning_rate": 5.145491238850592e-06, + "loss": 1.0426, + "step": 10710 + }, + { + "epoch": 0.6715781553702427, + "grad_norm": 3.1748616695404053, + "learning_rate": 5.143715914793939e-06, + "loss": 1.1041, + "step": 10711 + }, + { + "epoch": 0.671640855226033, + "grad_norm": 3.019047260284424, + "learning_rate": 5.141940791010654e-06, + "loss": 1.0119, + "step": 10712 + }, + { + "epoch": 0.6717035550818233, + "grad_norm": 3.0312697887420654, + "learning_rate": 5.14016586757394e-06, + "loss": 1.3215, + "step": 10713 + }, + { + "epoch": 0.6717662549376137, + "grad_norm": 3.2874786853790283, + "learning_rate": 5.138391144557e-06, + "loss": 1.0005, + "step": 10714 + }, + { + "epoch": 0.671828954793404, + "grad_norm": 3.0078885555267334, + "learning_rate": 5.136616622033023e-06, + "loss": 1.1789, + "step": 10715 + }, + { + "epoch": 0.6718916546491943, + "grad_norm": 3.0103211402893066, + "learning_rate": 5.134842300075188e-06, + "loss": 1.1494, + "step": 10716 + }, + { + "epoch": 0.6719543545049846, + "grad_norm": 3.039278507232666, + "learning_rate": 5.133068178756666e-06, + "loss": 1.1278, + "step": 10717 + }, + { + "epoch": 0.672017054360775, + "grad_norm": 3.5210423469543457, + "learning_rate": 5.1312942581506275e-06, + "loss": 1.0754, + "step": 10718 + }, + { + "epoch": 0.6720797542165653, + "grad_norm": 3.410390615463257, + "learning_rate": 5.129520538330227e-06, + "loss": 1.0538, + "step": 10719 + }, + { + "epoch": 0.6721424540723556, + "grad_norm": 3.432077646255493, + "learning_rate": 5.127747019368609e-06, + "loss": 1.0389, + "step": 10720 + }, + { + "epoch": 0.672205153928146, + "grad_norm": 3.257520914077759, + "learning_rate": 5.12597370133892e-06, + "loss": 1.1137, + "step": 10721 + }, + { + "epoch": 0.6722678537839363, + "grad_norm": 3.824169158935547, + "learning_rate": 5.124200584314287e-06, + "loss": 1.0755, + "step": 10722 + }, + { + "epoch": 0.6723305536397266, + "grad_norm": 3.064331531524658, + "learning_rate": 5.122427668367838e-06, + "loss": 0.9814, + "step": 10723 + }, + { + "epoch": 0.6723932534955169, + "grad_norm": 3.1828346252441406, + "learning_rate": 5.120654953572678e-06, + "loss": 1.1523, + "step": 10724 + }, + { + "epoch": 0.6724559533513073, + "grad_norm": 3.1499650478363037, + "learning_rate": 5.118882440001926e-06, + "loss": 1.0652, + "step": 10725 + }, + { + "epoch": 0.6725186532070976, + "grad_norm": 3.405853748321533, + "learning_rate": 5.117110127728675e-06, + "loss": 1.0144, + "step": 10726 + }, + { + "epoch": 0.6725813530628879, + "grad_norm": 3.0993597507476807, + "learning_rate": 5.115338016826017e-06, + "loss": 1.0714, + "step": 10727 + }, + { + "epoch": 0.6726440529186782, + "grad_norm": 3.6425940990448, + "learning_rate": 5.1135661073670275e-06, + "loss": 0.9928, + "step": 10728 + }, + { + "epoch": 0.6727067527744686, + "grad_norm": 3.0566868782043457, + "learning_rate": 5.1117943994247875e-06, + "loss": 1.127, + "step": 10729 + }, + { + "epoch": 0.672769452630259, + "grad_norm": 3.3737680912017822, + "learning_rate": 5.110022893072361e-06, + "loss": 1.1542, + "step": 10730 + }, + { + "epoch": 0.6728321524860493, + "grad_norm": 3.4370529651641846, + "learning_rate": 5.108251588382802e-06, + "loss": 1.1215, + "step": 10731 + }, + { + "epoch": 0.6728948523418397, + "grad_norm": 3.715378522872925, + "learning_rate": 5.106480485429162e-06, + "loss": 1.1977, + "step": 10732 + }, + { + "epoch": 0.67295755219763, + "grad_norm": 3.1434361934661865, + "learning_rate": 5.104709584284483e-06, + "loss": 1.1713, + "step": 10733 + }, + { + "epoch": 0.6730202520534203, + "grad_norm": 3.212432861328125, + "learning_rate": 5.1029388850217935e-06, + "loss": 1.1156, + "step": 10734 + }, + { + "epoch": 0.6730829519092106, + "grad_norm": 3.920046806335449, + "learning_rate": 5.101168387714114e-06, + "loss": 1.0945, + "step": 10735 + }, + { + "epoch": 0.673145651765001, + "grad_norm": 3.6109962463378906, + "learning_rate": 5.099398092434469e-06, + "loss": 0.9255, + "step": 10736 + }, + { + "epoch": 0.6732083516207913, + "grad_norm": 3.392313003540039, + "learning_rate": 5.097627999255859e-06, + "loss": 1.106, + "step": 10737 + }, + { + "epoch": 0.6732710514765816, + "grad_norm": 3.2157204151153564, + "learning_rate": 5.095858108251286e-06, + "loss": 1.1472, + "step": 10738 + }, + { + "epoch": 0.6733337513323719, + "grad_norm": 3.1528377532958984, + "learning_rate": 5.094088419493734e-06, + "loss": 1.1199, + "step": 10739 + }, + { + "epoch": 0.6733964511881623, + "grad_norm": 3.313138008117676, + "learning_rate": 5.092318933056194e-06, + "loss": 0.9886, + "step": 10740 + }, + { + "epoch": 0.6734591510439526, + "grad_norm": 3.1269521713256836, + "learning_rate": 5.0905496490116355e-06, + "loss": 1.1018, + "step": 10741 + }, + { + "epoch": 0.6735218508997429, + "grad_norm": 3.7676734924316406, + "learning_rate": 5.08878056743302e-06, + "loss": 0.9412, + "step": 10742 + }, + { + "epoch": 0.6735845507555333, + "grad_norm": 3.2394397258758545, + "learning_rate": 5.087011688393313e-06, + "loss": 1.0646, + "step": 10743 + }, + { + "epoch": 0.6736472506113236, + "grad_norm": 3.1371686458587646, + "learning_rate": 5.085243011965457e-06, + "loss": 1.0756, + "step": 10744 + }, + { + "epoch": 0.6737099504671139, + "grad_norm": 3.5586984157562256, + "learning_rate": 5.083474538222395e-06, + "loss": 1.0504, + "step": 10745 + }, + { + "epoch": 0.6737726503229042, + "grad_norm": 3.2882823944091797, + "learning_rate": 5.081706267237053e-06, + "loss": 1.0661, + "step": 10746 + }, + { + "epoch": 0.6738353501786946, + "grad_norm": 3.411317825317383, + "learning_rate": 5.079938199082363e-06, + "loss": 0.9999, + "step": 10747 + }, + { + "epoch": 0.6738980500344849, + "grad_norm": 3.226238250732422, + "learning_rate": 5.078170333831238e-06, + "loss": 1.1204, + "step": 10748 + }, + { + "epoch": 0.6739607498902752, + "grad_norm": 3.3115999698638916, + "learning_rate": 5.076402671556578e-06, + "loss": 1.1807, + "step": 10749 + }, + { + "epoch": 0.6740234497460655, + "grad_norm": 3.4010069370269775, + "learning_rate": 5.0746352123312914e-06, + "loss": 1.0554, + "step": 10750 + }, + { + "epoch": 0.6740861496018559, + "grad_norm": 3.0611138343811035, + "learning_rate": 5.072867956228265e-06, + "loss": 1.1093, + "step": 10751 + }, + { + "epoch": 0.6741488494576462, + "grad_norm": 3.2691328525543213, + "learning_rate": 5.0711009033203775e-06, + "loss": 1.1147, + "step": 10752 + }, + { + "epoch": 0.6742115493134366, + "grad_norm": 3.3138740062713623, + "learning_rate": 5.0693340536805e-06, + "loss": 1.0279, + "step": 10753 + }, + { + "epoch": 0.674274249169227, + "grad_norm": 3.2982192039489746, + "learning_rate": 5.067567407381507e-06, + "loss": 1.1457, + "step": 10754 + }, + { + "epoch": 0.6743369490250173, + "grad_norm": 3.1262121200561523, + "learning_rate": 5.065800964496248e-06, + "loss": 1.0508, + "step": 10755 + }, + { + "epoch": 0.6743996488808076, + "grad_norm": 3.6611900329589844, + "learning_rate": 5.064034725097573e-06, + "loss": 1.0393, + "step": 10756 + }, + { + "epoch": 0.6744623487365979, + "grad_norm": 3.5697882175445557, + "learning_rate": 5.062268689258317e-06, + "loss": 1.1576, + "step": 10757 + }, + { + "epoch": 0.6745250485923883, + "grad_norm": 3.2572598457336426, + "learning_rate": 5.0605028570513215e-06, + "loss": 1.1612, + "step": 10758 + }, + { + "epoch": 0.6745877484481786, + "grad_norm": 3.0745527744293213, + "learning_rate": 5.058737228549402e-06, + "loss": 1.1537, + "step": 10759 + }, + { + "epoch": 0.6746504483039689, + "grad_norm": 3.3951494693756104, + "learning_rate": 5.056971803825372e-06, + "loss": 0.9512, + "step": 10760 + }, + { + "epoch": 0.6747131481597592, + "grad_norm": 3.293943405151367, + "learning_rate": 5.0552065829520445e-06, + "loss": 1.1454, + "step": 10761 + }, + { + "epoch": 0.6747758480155496, + "grad_norm": 3.2229061126708984, + "learning_rate": 5.053441566002214e-06, + "loss": 1.1065, + "step": 10762 + }, + { + "epoch": 0.6748385478713399, + "grad_norm": 3.2503855228424072, + "learning_rate": 5.051676753048669e-06, + "loss": 1.0345, + "step": 10763 + }, + { + "epoch": 0.6749012477271302, + "grad_norm": 2.9576423168182373, + "learning_rate": 5.049912144164186e-06, + "loss": 1.0017, + "step": 10764 + }, + { + "epoch": 0.6749639475829206, + "grad_norm": 3.2853074073791504, + "learning_rate": 5.048147739421548e-06, + "loss": 1.1879, + "step": 10765 + }, + { + "epoch": 0.6750266474387109, + "grad_norm": 2.9674360752105713, + "learning_rate": 5.046383538893512e-06, + "loss": 0.9777, + "step": 10766 + }, + { + "epoch": 0.6750893472945012, + "grad_norm": 3.2857069969177246, + "learning_rate": 5.044619542652835e-06, + "loss": 1.2604, + "step": 10767 + }, + { + "epoch": 0.6751520471502915, + "grad_norm": 3.6066818237304688, + "learning_rate": 5.042855750772262e-06, + "loss": 0.9299, + "step": 10768 + }, + { + "epoch": 0.6752147470060819, + "grad_norm": 3.3697338104248047, + "learning_rate": 5.041092163324537e-06, + "loss": 1.0082, + "step": 10769 + }, + { + "epoch": 0.6752774468618722, + "grad_norm": 3.151648998260498, + "learning_rate": 5.039328780382389e-06, + "loss": 1.0682, + "step": 10770 + }, + { + "epoch": 0.6753401467176625, + "grad_norm": 3.095632314682007, + "learning_rate": 5.037565602018534e-06, + "loss": 0.9954, + "step": 10771 + }, + { + "epoch": 0.6754028465734528, + "grad_norm": 3.2478315830230713, + "learning_rate": 5.035802628305694e-06, + "loss": 1.0188, + "step": 10772 + }, + { + "epoch": 0.6754655464292432, + "grad_norm": 4.000274181365967, + "learning_rate": 5.03403985931657e-06, + "loss": 1.2419, + "step": 10773 + }, + { + "epoch": 0.6755282462850335, + "grad_norm": 3.4591221809387207, + "learning_rate": 5.0322772951238595e-06, + "loss": 1.0714, + "step": 10774 + }, + { + "epoch": 0.6755909461408238, + "grad_norm": 3.396099328994751, + "learning_rate": 5.030514935800247e-06, + "loss": 0.9886, + "step": 10775 + }, + { + "epoch": 0.6756536459966143, + "grad_norm": 3.5087578296661377, + "learning_rate": 5.028752781418419e-06, + "loss": 0.8479, + "step": 10776 + }, + { + "epoch": 0.6757163458524046, + "grad_norm": 3.3424463272094727, + "learning_rate": 5.026990832051044e-06, + "loss": 1.0384, + "step": 10777 + }, + { + "epoch": 0.6757790457081949, + "grad_norm": 3.4173879623413086, + "learning_rate": 5.025229087770783e-06, + "loss": 0.9331, + "step": 10778 + }, + { + "epoch": 0.6758417455639852, + "grad_norm": 3.3341400623321533, + "learning_rate": 5.023467548650288e-06, + "loss": 1.1137, + "step": 10779 + }, + { + "epoch": 0.6759044454197756, + "grad_norm": 3.106017827987671, + "learning_rate": 5.021706214762214e-06, + "loss": 1.0934, + "step": 10780 + }, + { + "epoch": 0.6759671452755659, + "grad_norm": 3.372760057449341, + "learning_rate": 5.019945086179192e-06, + "loss": 1.2816, + "step": 10781 + }, + { + "epoch": 0.6760298451313562, + "grad_norm": 2.986182451248169, + "learning_rate": 5.018184162973847e-06, + "loss": 1.1483, + "step": 10782 + }, + { + "epoch": 0.6760925449871465, + "grad_norm": 3.289806842803955, + "learning_rate": 5.01642344521881e-06, + "loss": 1.1092, + "step": 10783 + }, + { + "epoch": 0.6761552448429369, + "grad_norm": 3.2590630054473877, + "learning_rate": 5.014662932986688e-06, + "loss": 1.1754, + "step": 10784 + }, + { + "epoch": 0.6762179446987272, + "grad_norm": 3.1313092708587646, + "learning_rate": 5.0129026263500845e-06, + "loss": 0.9566, + "step": 10785 + }, + { + "epoch": 0.6762806445545175, + "grad_norm": 3.335696220397949, + "learning_rate": 5.011142525381589e-06, + "loss": 0.8967, + "step": 10786 + }, + { + "epoch": 0.6763433444103079, + "grad_norm": 3.38550066947937, + "learning_rate": 5.009382630153798e-06, + "loss": 1.0686, + "step": 10787 + }, + { + "epoch": 0.6764060442660982, + "grad_norm": 3.792372465133667, + "learning_rate": 5.007622940739288e-06, + "loss": 0.9913, + "step": 10788 + }, + { + "epoch": 0.6764687441218885, + "grad_norm": 3.53946590423584, + "learning_rate": 5.005863457210622e-06, + "loss": 0.9868, + "step": 10789 + }, + { + "epoch": 0.6765314439776788, + "grad_norm": 3.580089569091797, + "learning_rate": 5.004104179640363e-06, + "loss": 1.1775, + "step": 10790 + }, + { + "epoch": 0.6765941438334692, + "grad_norm": 3.268561363220215, + "learning_rate": 5.00234510810107e-06, + "loss": 1.1124, + "step": 10791 + }, + { + "epoch": 0.6766568436892595, + "grad_norm": 3.480064868927002, + "learning_rate": 5.000586242665283e-06, + "loss": 1.1112, + "step": 10792 + }, + { + "epoch": 0.6767195435450498, + "grad_norm": 3.5867748260498047, + "learning_rate": 4.998827583405533e-06, + "loss": 1.1304, + "step": 10793 + }, + { + "epoch": 0.6767822434008401, + "grad_norm": 3.4953153133392334, + "learning_rate": 4.997069130394356e-06, + "loss": 1.2095, + "step": 10794 + }, + { + "epoch": 0.6768449432566305, + "grad_norm": 3.4559998512268066, + "learning_rate": 4.995310883704267e-06, + "loss": 1.096, + "step": 10795 + }, + { + "epoch": 0.6769076431124208, + "grad_norm": 3.5235071182250977, + "learning_rate": 4.993552843407776e-06, + "loss": 1.1284, + "step": 10796 + }, + { + "epoch": 0.6769703429682111, + "grad_norm": 3.2207798957824707, + "learning_rate": 4.99179500957738e-06, + "loss": 1.1391, + "step": 10797 + }, + { + "epoch": 0.6770330428240015, + "grad_norm": 3.440598726272583, + "learning_rate": 4.9900373822855805e-06, + "loss": 1.0589, + "step": 10798 + }, + { + "epoch": 0.6770957426797919, + "grad_norm": 3.286820888519287, + "learning_rate": 4.988279961604858e-06, + "loss": 0.9811, + "step": 10799 + }, + { + "epoch": 0.6771584425355822, + "grad_norm": 3.3891522884368896, + "learning_rate": 4.986522747607685e-06, + "loss": 1.0261, + "step": 10800 + }, + { + "epoch": 0.6772211423913725, + "grad_norm": 3.356290817260742, + "learning_rate": 4.9847657403665365e-06, + "loss": 1.0342, + "step": 10801 + }, + { + "epoch": 0.6772838422471629, + "grad_norm": 2.961015224456787, + "learning_rate": 4.983008939953867e-06, + "loss": 1.0938, + "step": 10802 + }, + { + "epoch": 0.6773465421029532, + "grad_norm": 3.5555615425109863, + "learning_rate": 4.981252346442129e-06, + "loss": 1.0733, + "step": 10803 + }, + { + "epoch": 0.6774092419587435, + "grad_norm": 3.5227203369140625, + "learning_rate": 4.979495959903759e-06, + "loss": 0.9146, + "step": 10804 + }, + { + "epoch": 0.6774719418145339, + "grad_norm": 3.3607215881347656, + "learning_rate": 4.9777397804112e-06, + "loss": 1.1687, + "step": 10805 + }, + { + "epoch": 0.6775346416703242, + "grad_norm": 3.143085241317749, + "learning_rate": 4.9759838080368705e-06, + "loss": 0.9901, + "step": 10806 + }, + { + "epoch": 0.6775973415261145, + "grad_norm": 3.1744132041931152, + "learning_rate": 4.974228042853189e-06, + "loss": 1.1162, + "step": 10807 + }, + { + "epoch": 0.6776600413819048, + "grad_norm": 3.4621243476867676, + "learning_rate": 4.972472484932558e-06, + "loss": 0.9041, + "step": 10808 + }, + { + "epoch": 0.6777227412376952, + "grad_norm": 3.3658628463745117, + "learning_rate": 4.9707171343473845e-06, + "loss": 1.0608, + "step": 10809 + }, + { + "epoch": 0.6777854410934855, + "grad_norm": 3.2843430042266846, + "learning_rate": 4.968961991170058e-06, + "loss": 1.2091, + "step": 10810 + }, + { + "epoch": 0.6778481409492758, + "grad_norm": 3.168783187866211, + "learning_rate": 4.967207055472953e-06, + "loss": 1.0572, + "step": 10811 + }, + { + "epoch": 0.6779108408050661, + "grad_norm": 3.5790395736694336, + "learning_rate": 4.965452327328454e-06, + "loss": 1.0141, + "step": 10812 + }, + { + "epoch": 0.6779735406608565, + "grad_norm": 3.293534278869629, + "learning_rate": 4.96369780680892e-06, + "loss": 1.1215, + "step": 10813 + }, + { + "epoch": 0.6780362405166468, + "grad_norm": 2.962618827819824, + "learning_rate": 4.961943493986709e-06, + "loss": 1.3082, + "step": 10814 + }, + { + "epoch": 0.6780989403724371, + "grad_norm": 3.5806586742401123, + "learning_rate": 4.960189388934163e-06, + "loss": 0.9811, + "step": 10815 + }, + { + "epoch": 0.6781616402282274, + "grad_norm": 3.8460400104522705, + "learning_rate": 4.958435491723632e-06, + "loss": 1.0212, + "step": 10816 + }, + { + "epoch": 0.6782243400840178, + "grad_norm": 3.431983470916748, + "learning_rate": 4.956681802427441e-06, + "loss": 1.2001, + "step": 10817 + }, + { + "epoch": 0.6782870399398081, + "grad_norm": 3.373911142349243, + "learning_rate": 4.954928321117913e-06, + "loss": 1.0813, + "step": 10818 + }, + { + "epoch": 0.6783497397955984, + "grad_norm": 3.3263583183288574, + "learning_rate": 4.953175047867357e-06, + "loss": 1.0105, + "step": 10819 + }, + { + "epoch": 0.6784124396513888, + "grad_norm": 3.4118728637695312, + "learning_rate": 4.951421982748087e-06, + "loss": 1.1371, + "step": 10820 + }, + { + "epoch": 0.6784751395071791, + "grad_norm": 3.2620508670806885, + "learning_rate": 4.949669125832394e-06, + "loss": 1.0934, + "step": 10821 + }, + { + "epoch": 0.6785378393629695, + "grad_norm": 3.6274936199188232, + "learning_rate": 4.947916477192564e-06, + "loss": 1.0699, + "step": 10822 + }, + { + "epoch": 0.6786005392187598, + "grad_norm": 3.193506956100464, + "learning_rate": 4.946164036900882e-06, + "loss": 1.0668, + "step": 10823 + }, + { + "epoch": 0.6786632390745502, + "grad_norm": 3.2918765544891357, + "learning_rate": 4.9444118050296165e-06, + "loss": 1.1032, + "step": 10824 + }, + { + "epoch": 0.6787259389303405, + "grad_norm": 3.105626106262207, + "learning_rate": 4.942659781651028e-06, + "loss": 1.1145, + "step": 10825 + }, + { + "epoch": 0.6787886387861308, + "grad_norm": 3.148793935775757, + "learning_rate": 4.940907966837368e-06, + "loss": 0.9327, + "step": 10826 + }, + { + "epoch": 0.6788513386419212, + "grad_norm": 3.183741569519043, + "learning_rate": 4.939156360660888e-06, + "loss": 1.1487, + "step": 10827 + }, + { + "epoch": 0.6789140384977115, + "grad_norm": 3.235567569732666, + "learning_rate": 4.9374049631938205e-06, + "loss": 1.1386, + "step": 10828 + }, + { + "epoch": 0.6789767383535018, + "grad_norm": 3.330669641494751, + "learning_rate": 4.935653774508393e-06, + "loss": 1.2598, + "step": 10829 + }, + { + "epoch": 0.6790394382092921, + "grad_norm": 3.3048319816589355, + "learning_rate": 4.933902794676823e-06, + "loss": 1.0239, + "step": 10830 + }, + { + "epoch": 0.6791021380650825, + "grad_norm": 3.701045513153076, + "learning_rate": 4.932152023771325e-06, + "loss": 0.9763, + "step": 10831 + }, + { + "epoch": 0.6791648379208728, + "grad_norm": 3.691720485687256, + "learning_rate": 4.930401461864099e-06, + "loss": 1.0774, + "step": 10832 + }, + { + "epoch": 0.6792275377766631, + "grad_norm": 3.3990187644958496, + "learning_rate": 4.928651109027335e-06, + "loss": 1.0855, + "step": 10833 + }, + { + "epoch": 0.6792902376324534, + "grad_norm": 3.3936331272125244, + "learning_rate": 4.926900965333225e-06, + "loss": 1.0337, + "step": 10834 + }, + { + "epoch": 0.6793529374882438, + "grad_norm": 3.3029801845550537, + "learning_rate": 4.92515103085394e-06, + "loss": 1.1515, + "step": 10835 + }, + { + "epoch": 0.6794156373440341, + "grad_norm": 3.1268110275268555, + "learning_rate": 4.923401305661647e-06, + "loss": 1.1489, + "step": 10836 + }, + { + "epoch": 0.6794783371998244, + "grad_norm": 3.1714260578155518, + "learning_rate": 4.921651789828503e-06, + "loss": 1.1201, + "step": 10837 + }, + { + "epoch": 0.6795410370556147, + "grad_norm": 3.0183451175689697, + "learning_rate": 4.919902483426665e-06, + "loss": 1.1961, + "step": 10838 + }, + { + "epoch": 0.6796037369114051, + "grad_norm": 3.244901657104492, + "learning_rate": 4.918153386528271e-06, + "loss": 1.0085, + "step": 10839 + }, + { + "epoch": 0.6796664367671954, + "grad_norm": 3.4960544109344482, + "learning_rate": 4.916404499205452e-06, + "loss": 1.1519, + "step": 10840 + }, + { + "epoch": 0.6797291366229857, + "grad_norm": 3.0775539875030518, + "learning_rate": 4.914655821530331e-06, + "loss": 1.2008, + "step": 10841 + }, + { + "epoch": 0.6797918364787761, + "grad_norm": 3.383758068084717, + "learning_rate": 4.9129073535750285e-06, + "loss": 0.9498, + "step": 10842 + }, + { + "epoch": 0.6798545363345664, + "grad_norm": 3.1271543502807617, + "learning_rate": 4.91115909541165e-06, + "loss": 1.0949, + "step": 10843 + }, + { + "epoch": 0.6799172361903567, + "grad_norm": 2.960472345352173, + "learning_rate": 4.909411047112288e-06, + "loss": 1.0787, + "step": 10844 + }, + { + "epoch": 0.6799799360461471, + "grad_norm": 3.533719778060913, + "learning_rate": 4.9076632087490405e-06, + "loss": 0.9787, + "step": 10845 + }, + { + "epoch": 0.6800426359019375, + "grad_norm": 3.840238571166992, + "learning_rate": 4.905915580393985e-06, + "loss": 1.1335, + "step": 10846 + }, + { + "epoch": 0.6801053357577278, + "grad_norm": 3.1318650245666504, + "learning_rate": 4.904168162119193e-06, + "loss": 1.0789, + "step": 10847 + }, + { + "epoch": 0.6801680356135181, + "grad_norm": 3.01953125, + "learning_rate": 4.902420953996725e-06, + "loss": 1.0107, + "step": 10848 + }, + { + "epoch": 0.6802307354693085, + "grad_norm": 3.2054779529571533, + "learning_rate": 4.900673956098644e-06, + "loss": 1.1343, + "step": 10849 + }, + { + "epoch": 0.6802934353250988, + "grad_norm": 3.23744535446167, + "learning_rate": 4.898927168496991e-06, + "loss": 1.2119, + "step": 10850 + }, + { + "epoch": 0.6803561351808891, + "grad_norm": 3.6558268070220947, + "learning_rate": 4.897180591263801e-06, + "loss": 1.0698, + "step": 10851 + }, + { + "epoch": 0.6804188350366794, + "grad_norm": 3.3929288387298584, + "learning_rate": 4.895434224471108e-06, + "loss": 1.1667, + "step": 10852 + }, + { + "epoch": 0.6804815348924698, + "grad_norm": 3.5561561584472656, + "learning_rate": 4.893688068190933e-06, + "loss": 1.1419, + "step": 10853 + }, + { + "epoch": 0.6805442347482601, + "grad_norm": 3.359703779220581, + "learning_rate": 4.8919421224952845e-06, + "loss": 1.162, + "step": 10854 + }, + { + "epoch": 0.6806069346040504, + "grad_norm": 3.1945641040802, + "learning_rate": 4.890196387456162e-06, + "loss": 1.1932, + "step": 10855 + }, + { + "epoch": 0.6806696344598407, + "grad_norm": 3.1366453170776367, + "learning_rate": 4.888450863145568e-06, + "loss": 1.1775, + "step": 10856 + }, + { + "epoch": 0.6807323343156311, + "grad_norm": 3.446922779083252, + "learning_rate": 4.886705549635482e-06, + "loss": 1.0252, + "step": 10857 + }, + { + "epoch": 0.6807950341714214, + "grad_norm": 3.3171026706695557, + "learning_rate": 4.8849604469978824e-06, + "loss": 1.1524, + "step": 10858 + }, + { + "epoch": 0.6808577340272117, + "grad_norm": 3.3332905769348145, + "learning_rate": 4.883215555304735e-06, + "loss": 1.1273, + "step": 10859 + }, + { + "epoch": 0.680920433883002, + "grad_norm": 3.2164745330810547, + "learning_rate": 4.881470874628005e-06, + "loss": 1.0129, + "step": 10860 + }, + { + "epoch": 0.6809831337387924, + "grad_norm": 3.4107553958892822, + "learning_rate": 4.8797264050396395e-06, + "loss": 1.1205, + "step": 10861 + }, + { + "epoch": 0.6810458335945827, + "grad_norm": 3.588473320007324, + "learning_rate": 4.877982146611577e-06, + "loss": 1.2288, + "step": 10862 + }, + { + "epoch": 0.681108533450373, + "grad_norm": 3.368147611618042, + "learning_rate": 4.876238099415758e-06, + "loss": 0.9437, + "step": 10863 + }, + { + "epoch": 0.6811712333061634, + "grad_norm": 3.292442798614502, + "learning_rate": 4.874494263524103e-06, + "loss": 1.2582, + "step": 10864 + }, + { + "epoch": 0.6812339331619537, + "grad_norm": 3.40106201171875, + "learning_rate": 4.87275063900853e-06, + "loss": 1.1045, + "step": 10865 + }, + { + "epoch": 0.681296633017744, + "grad_norm": 3.860872983932495, + "learning_rate": 4.87100722594094e-06, + "loss": 0.9958, + "step": 10866 + }, + { + "epoch": 0.6813593328735343, + "grad_norm": 3.3695945739746094, + "learning_rate": 4.8692640243932385e-06, + "loss": 1.1609, + "step": 10867 + }, + { + "epoch": 0.6814220327293247, + "grad_norm": 3.3033666610717773, + "learning_rate": 4.867521034437315e-06, + "loss": 0.9765, + "step": 10868 + }, + { + "epoch": 0.6814847325851151, + "grad_norm": 3.5917928218841553, + "learning_rate": 4.865778256145046e-06, + "loss": 1.0153, + "step": 10869 + }, + { + "epoch": 0.6815474324409054, + "grad_norm": 3.0942087173461914, + "learning_rate": 4.864035689588303e-06, + "loss": 1.2215, + "step": 10870 + }, + { + "epoch": 0.6816101322966958, + "grad_norm": 3.372734785079956, + "learning_rate": 4.862293334838957e-06, + "loss": 1.0722, + "step": 10871 + }, + { + "epoch": 0.6816728321524861, + "grad_norm": 3.168579578399658, + "learning_rate": 4.860551191968858e-06, + "loss": 1.1819, + "step": 10872 + }, + { + "epoch": 0.6817355320082764, + "grad_norm": 3.1596179008483887, + "learning_rate": 4.8588092610498475e-06, + "loss": 1.3514, + "step": 10873 + }, + { + "epoch": 0.6817982318640667, + "grad_norm": 2.799159288406372, + "learning_rate": 4.8570675421537685e-06, + "loss": 1.2361, + "step": 10874 + }, + { + "epoch": 0.6818609317198571, + "grad_norm": 2.9023656845092773, + "learning_rate": 4.8553260353524525e-06, + "loss": 1.0122, + "step": 10875 + }, + { + "epoch": 0.6819236315756474, + "grad_norm": 3.185220956802368, + "learning_rate": 4.853584740717714e-06, + "loss": 1.2255, + "step": 10876 + }, + { + "epoch": 0.6819863314314377, + "grad_norm": 3.3439130783081055, + "learning_rate": 4.8518436583213665e-06, + "loss": 1.0401, + "step": 10877 + }, + { + "epoch": 0.682049031287228, + "grad_norm": 3.0748612880706787, + "learning_rate": 4.850102788235208e-06, + "loss": 1.1798, + "step": 10878 + }, + { + "epoch": 0.6821117311430184, + "grad_norm": 3.2143874168395996, + "learning_rate": 4.848362130531039e-06, + "loss": 1.2729, + "step": 10879 + }, + { + "epoch": 0.6821744309988087, + "grad_norm": 3.0741803646087646, + "learning_rate": 4.84662168528064e-06, + "loss": 1.1688, + "step": 10880 + }, + { + "epoch": 0.682237130854599, + "grad_norm": 3.5900180339813232, + "learning_rate": 4.844881452555785e-06, + "loss": 0.9774, + "step": 10881 + }, + { + "epoch": 0.6822998307103894, + "grad_norm": 3.5382590293884277, + "learning_rate": 4.843141432428248e-06, + "loss": 1.1053, + "step": 10882 + }, + { + "epoch": 0.6823625305661797, + "grad_norm": 3.3784215450286865, + "learning_rate": 4.841401624969782e-06, + "loss": 1.0154, + "step": 10883 + }, + { + "epoch": 0.68242523042197, + "grad_norm": 3.3010025024414062, + "learning_rate": 4.839662030252139e-06, + "loss": 0.9664, + "step": 10884 + }, + { + "epoch": 0.6824879302777603, + "grad_norm": 3.12508487701416, + "learning_rate": 4.837922648347056e-06, + "loss": 0.9182, + "step": 10885 + }, + { + "epoch": 0.6825506301335507, + "grad_norm": 3.196585178375244, + "learning_rate": 4.8361834793262714e-06, + "loss": 1.1753, + "step": 10886 + }, + { + "epoch": 0.682613329989341, + "grad_norm": 3.5104994773864746, + "learning_rate": 4.834444523261507e-06, + "loss": 1.1745, + "step": 10887 + }, + { + "epoch": 0.6826760298451313, + "grad_norm": 3.2605276107788086, + "learning_rate": 4.8327057802244744e-06, + "loss": 1.2869, + "step": 10888 + }, + { + "epoch": 0.6827387297009216, + "grad_norm": 3.2346835136413574, + "learning_rate": 4.830967250286878e-06, + "loss": 1.098, + "step": 10889 + }, + { + "epoch": 0.682801429556712, + "grad_norm": 3.126851797103882, + "learning_rate": 4.829228933520422e-06, + "loss": 1.1131, + "step": 10890 + }, + { + "epoch": 0.6828641294125023, + "grad_norm": 3.342305898666382, + "learning_rate": 4.8274908299967905e-06, + "loss": 1.091, + "step": 10891 + }, + { + "epoch": 0.6829268292682927, + "grad_norm": 3.2929205894470215, + "learning_rate": 4.825752939787659e-06, + "loss": 1.0289, + "step": 10892 + }, + { + "epoch": 0.6829895291240831, + "grad_norm": 3.271022319793701, + "learning_rate": 4.824015262964705e-06, + "loss": 1.1125, + "step": 10893 + }, + { + "epoch": 0.6830522289798734, + "grad_norm": 3.071239948272705, + "learning_rate": 4.822277799599589e-06, + "loss": 1.0786, + "step": 10894 + }, + { + "epoch": 0.6831149288356637, + "grad_norm": 3.077238082885742, + "learning_rate": 4.820540549763962e-06, + "loss": 1.0645, + "step": 10895 + }, + { + "epoch": 0.683177628691454, + "grad_norm": 3.083616018295288, + "learning_rate": 4.818803513529465e-06, + "loss": 1.2231, + "step": 10896 + }, + { + "epoch": 0.6832403285472444, + "grad_norm": 3.3386640548706055, + "learning_rate": 4.817066690967742e-06, + "loss": 0.9106, + "step": 10897 + }, + { + "epoch": 0.6833030284030347, + "grad_norm": 3.1285293102264404, + "learning_rate": 4.815330082150414e-06, + "loss": 0.9989, + "step": 10898 + }, + { + "epoch": 0.683365728258825, + "grad_norm": 3.251660108566284, + "learning_rate": 4.813593687149102e-06, + "loss": 1.0743, + "step": 10899 + }, + { + "epoch": 0.6834284281146153, + "grad_norm": 3.1659936904907227, + "learning_rate": 4.811857506035407e-06, + "loss": 1.1797, + "step": 10900 + }, + { + "epoch": 0.6834911279704057, + "grad_norm": 3.0628926753997803, + "learning_rate": 4.810121538880939e-06, + "loss": 1.2119, + "step": 10901 + }, + { + "epoch": 0.683553827826196, + "grad_norm": 3.497875452041626, + "learning_rate": 4.8083857857572856e-06, + "loss": 1.1809, + "step": 10902 + }, + { + "epoch": 0.6836165276819863, + "grad_norm": 3.0271575450897217, + "learning_rate": 4.806650246736025e-06, + "loss": 1.1942, + "step": 10903 + }, + { + "epoch": 0.6836792275377767, + "grad_norm": 3.918569803237915, + "learning_rate": 4.804914921888739e-06, + "loss": 0.9552, + "step": 10904 + }, + { + "epoch": 0.683741927393567, + "grad_norm": 3.264897346496582, + "learning_rate": 4.803179811286989e-06, + "loss": 1.0891, + "step": 10905 + }, + { + "epoch": 0.6838046272493573, + "grad_norm": 3.3614070415496826, + "learning_rate": 4.80144491500233e-06, + "loss": 1.0896, + "step": 10906 + }, + { + "epoch": 0.6838673271051476, + "grad_norm": 3.41487455368042, + "learning_rate": 4.799710233106306e-06, + "loss": 1.0797, + "step": 10907 + }, + { + "epoch": 0.683930026960938, + "grad_norm": 3.171536922454834, + "learning_rate": 4.797975765670463e-06, + "loss": 1.0786, + "step": 10908 + }, + { + "epoch": 0.6839927268167283, + "grad_norm": 3.789177417755127, + "learning_rate": 4.7962415127663265e-06, + "loss": 1.081, + "step": 10909 + }, + { + "epoch": 0.6840554266725186, + "grad_norm": 3.246398687362671, + "learning_rate": 4.794507474465418e-06, + "loss": 1.086, + "step": 10910 + }, + { + "epoch": 0.684118126528309, + "grad_norm": 3.3774566650390625, + "learning_rate": 4.7927736508392445e-06, + "loss": 1.0113, + "step": 10911 + }, + { + "epoch": 0.6841808263840993, + "grad_norm": 3.6329331398010254, + "learning_rate": 4.791040041959316e-06, + "loss": 1.1785, + "step": 10912 + }, + { + "epoch": 0.6842435262398896, + "grad_norm": 3.486414909362793, + "learning_rate": 4.789306647897124e-06, + "loss": 0.995, + "step": 10913 + }, + { + "epoch": 0.6843062260956799, + "grad_norm": 3.370966672897339, + "learning_rate": 4.787573468724149e-06, + "loss": 1.0694, + "step": 10914 + }, + { + "epoch": 0.6843689259514704, + "grad_norm": 2.984605312347412, + "learning_rate": 4.785840504511877e-06, + "loss": 1.1374, + "step": 10915 + }, + { + "epoch": 0.6844316258072607, + "grad_norm": 3.201775074005127, + "learning_rate": 4.784107755331769e-06, + "loss": 1.2751, + "step": 10916 + }, + { + "epoch": 0.684494325663051, + "grad_norm": 3.0883851051330566, + "learning_rate": 4.7823752212552855e-06, + "loss": 1.0098, + "step": 10917 + }, + { + "epoch": 0.6845570255188413, + "grad_norm": 2.9471445083618164, + "learning_rate": 4.780642902353872e-06, + "loss": 1.0472, + "step": 10918 + }, + { + "epoch": 0.6846197253746317, + "grad_norm": 3.45666766166687, + "learning_rate": 4.778910798698977e-06, + "loss": 1.1113, + "step": 10919 + }, + { + "epoch": 0.684682425230422, + "grad_norm": 3.2243072986602783, + "learning_rate": 4.777178910362028e-06, + "loss": 1.0836, + "step": 10920 + }, + { + "epoch": 0.6847451250862123, + "grad_norm": 3.2398765087127686, + "learning_rate": 4.77544723741445e-06, + "loss": 1.0908, + "step": 10921 + }, + { + "epoch": 0.6848078249420027, + "grad_norm": 3.460723638534546, + "learning_rate": 4.773715779927651e-06, + "loss": 1.2958, + "step": 10922 + }, + { + "epoch": 0.684870524797793, + "grad_norm": 3.536470413208008, + "learning_rate": 4.771984537973046e-06, + "loss": 1.0545, + "step": 10923 + }, + { + "epoch": 0.6849332246535833, + "grad_norm": 3.489135265350342, + "learning_rate": 4.7702535116220274e-06, + "loss": 1.0818, + "step": 10924 + }, + { + "epoch": 0.6849959245093736, + "grad_norm": 3.7540252208709717, + "learning_rate": 4.768522700945978e-06, + "loss": 1.0032, + "step": 10925 + }, + { + "epoch": 0.685058624365164, + "grad_norm": 3.446387767791748, + "learning_rate": 4.766792106016285e-06, + "loss": 1.1832, + "step": 10926 + }, + { + "epoch": 0.6851213242209543, + "grad_norm": 2.893603563308716, + "learning_rate": 4.765061726904313e-06, + "loss": 1.2693, + "step": 10927 + }, + { + "epoch": 0.6851840240767446, + "grad_norm": 3.1506423950195312, + "learning_rate": 4.7633315636814245e-06, + "loss": 1.0393, + "step": 10928 + }, + { + "epoch": 0.6852467239325349, + "grad_norm": 3.174945116043091, + "learning_rate": 4.761601616418968e-06, + "loss": 1.0986, + "step": 10929 + }, + { + "epoch": 0.6853094237883253, + "grad_norm": 3.7191901206970215, + "learning_rate": 4.759871885188293e-06, + "loss": 1.1678, + "step": 10930 + }, + { + "epoch": 0.6853721236441156, + "grad_norm": 3.5602853298187256, + "learning_rate": 4.75814237006073e-06, + "loss": 1.2264, + "step": 10931 + }, + { + "epoch": 0.6854348234999059, + "grad_norm": 3.4375267028808594, + "learning_rate": 4.7564130711076015e-06, + "loss": 1.0892, + "step": 10932 + }, + { + "epoch": 0.6854975233556962, + "grad_norm": 2.9889283180236816, + "learning_rate": 4.75468398840023e-06, + "loss": 1.2327, + "step": 10933 + }, + { + "epoch": 0.6855602232114866, + "grad_norm": 3.267592191696167, + "learning_rate": 4.75295512200992e-06, + "loss": 1.1319, + "step": 10934 + }, + { + "epoch": 0.6856229230672769, + "grad_norm": 3.4432191848754883, + "learning_rate": 4.75122647200797e-06, + "loss": 0.938, + "step": 10935 + }, + { + "epoch": 0.6856856229230672, + "grad_norm": 2.889784574508667, + "learning_rate": 4.7494980384656656e-06, + "loss": 1.1216, + "step": 10936 + }, + { + "epoch": 0.6857483227788576, + "grad_norm": 3.5799031257629395, + "learning_rate": 4.747769821454295e-06, + "loss": 1.0532, + "step": 10937 + }, + { + "epoch": 0.685811022634648, + "grad_norm": 3.6989614963531494, + "learning_rate": 4.7460418210451265e-06, + "loss": 1.096, + "step": 10938 + }, + { + "epoch": 0.6858737224904383, + "grad_norm": 3.5535242557525635, + "learning_rate": 4.744314037309423e-06, + "loss": 1.123, + "step": 10939 + }, + { + "epoch": 0.6859364223462286, + "grad_norm": 3.629136562347412, + "learning_rate": 4.742586470318434e-06, + "loss": 1.2448, + "step": 10940 + }, + { + "epoch": 0.685999122202019, + "grad_norm": 3.665311813354492, + "learning_rate": 4.7408591201434126e-06, + "loss": 1.0544, + "step": 10941 + }, + { + "epoch": 0.6860618220578093, + "grad_norm": 3.448338031768799, + "learning_rate": 4.73913198685559e-06, + "loss": 1.1117, + "step": 10942 + }, + { + "epoch": 0.6861245219135996, + "grad_norm": 3.5982556343078613, + "learning_rate": 4.737405070526192e-06, + "loss": 1.1706, + "step": 10943 + }, + { + "epoch": 0.68618722176939, + "grad_norm": 3.3026020526885986, + "learning_rate": 4.7356783712264405e-06, + "loss": 1.0491, + "step": 10944 + }, + { + "epoch": 0.6862499216251803, + "grad_norm": 3.1805293560028076, + "learning_rate": 4.733951889027545e-06, + "loss": 0.9264, + "step": 10945 + }, + { + "epoch": 0.6863126214809706, + "grad_norm": 3.2808146476745605, + "learning_rate": 4.732225624000701e-06, + "loss": 0.9737, + "step": 10946 + }, + { + "epoch": 0.6863753213367609, + "grad_norm": 3.728804588317871, + "learning_rate": 4.7304995762171005e-06, + "loss": 1.0884, + "step": 10947 + }, + { + "epoch": 0.6864380211925513, + "grad_norm": 3.2633609771728516, + "learning_rate": 4.728773745747931e-06, + "loss": 1.1542, + "step": 10948 + }, + { + "epoch": 0.6865007210483416, + "grad_norm": 3.573291540145874, + "learning_rate": 4.727048132664362e-06, + "loss": 0.92, + "step": 10949 + }, + { + "epoch": 0.6865634209041319, + "grad_norm": 3.2229466438293457, + "learning_rate": 4.725322737037558e-06, + "loss": 1.1518, + "step": 10950 + }, + { + "epoch": 0.6866261207599222, + "grad_norm": 3.3265812397003174, + "learning_rate": 4.7235975589386715e-06, + "loss": 1.0708, + "step": 10951 + }, + { + "epoch": 0.6866888206157126, + "grad_norm": 3.1256752014160156, + "learning_rate": 4.721872598438856e-06, + "loss": 1.0943, + "step": 10952 + }, + { + "epoch": 0.6867515204715029, + "grad_norm": 3.0519213676452637, + "learning_rate": 4.720147855609245e-06, + "loss": 1.0511, + "step": 10953 + }, + { + "epoch": 0.6868142203272932, + "grad_norm": 3.2017877101898193, + "learning_rate": 4.718423330520963e-06, + "loss": 1.2233, + "step": 10954 + }, + { + "epoch": 0.6868769201830836, + "grad_norm": 3.8068723678588867, + "learning_rate": 4.716699023245137e-06, + "loss": 1.0436, + "step": 10955 + }, + { + "epoch": 0.6869396200388739, + "grad_norm": 3.117074489593506, + "learning_rate": 4.714974933852874e-06, + "loss": 1.0614, + "step": 10956 + }, + { + "epoch": 0.6870023198946642, + "grad_norm": 3.3525216579437256, + "learning_rate": 4.713251062415276e-06, + "loss": 1.0419, + "step": 10957 + }, + { + "epoch": 0.6870650197504545, + "grad_norm": 2.9285101890563965, + "learning_rate": 4.71152740900343e-06, + "loss": 1.095, + "step": 10958 + }, + { + "epoch": 0.6871277196062449, + "grad_norm": 3.4965381622314453, + "learning_rate": 4.70980397368843e-06, + "loss": 1.0271, + "step": 10959 + }, + { + "epoch": 0.6871904194620352, + "grad_norm": 3.307586431503296, + "learning_rate": 4.708080756541345e-06, + "loss": 1.0262, + "step": 10960 + }, + { + "epoch": 0.6872531193178256, + "grad_norm": 2.7802863121032715, + "learning_rate": 4.70635775763324e-06, + "loss": 1.0727, + "step": 10961 + }, + { + "epoch": 0.687315819173616, + "grad_norm": 3.2446393966674805, + "learning_rate": 4.704634977035169e-06, + "loss": 0.9811, + "step": 10962 + }, + { + "epoch": 0.6873785190294063, + "grad_norm": 3.503032922744751, + "learning_rate": 4.702912414818186e-06, + "loss": 1.0871, + "step": 10963 + }, + { + "epoch": 0.6874412188851966, + "grad_norm": 3.553558826446533, + "learning_rate": 4.701190071053327e-06, + "loss": 0.9884, + "step": 10964 + }, + { + "epoch": 0.6875039187409869, + "grad_norm": 3.3166284561157227, + "learning_rate": 4.6994679458116165e-06, + "loss": 0.9735, + "step": 10965 + }, + { + "epoch": 0.6875666185967773, + "grad_norm": 3.23773455619812, + "learning_rate": 4.697746039164083e-06, + "loss": 1.0946, + "step": 10966 + }, + { + "epoch": 0.6876293184525676, + "grad_norm": 3.2743592262268066, + "learning_rate": 4.696024351181735e-06, + "loss": 1.1339, + "step": 10967 + }, + { + "epoch": 0.6876920183083579, + "grad_norm": 3.21391224861145, + "learning_rate": 4.694302881935574e-06, + "loss": 1.1136, + "step": 10968 + }, + { + "epoch": 0.6877547181641482, + "grad_norm": 3.100649833679199, + "learning_rate": 4.69258163149659e-06, + "loss": 1.0408, + "step": 10969 + }, + { + "epoch": 0.6878174180199386, + "grad_norm": 3.239107608795166, + "learning_rate": 4.690860599935776e-06, + "loss": 0.9465, + "step": 10970 + }, + { + "epoch": 0.6878801178757289, + "grad_norm": 3.0642714500427246, + "learning_rate": 4.689139787324102e-06, + "loss": 0.9465, + "step": 10971 + }, + { + "epoch": 0.6879428177315192, + "grad_norm": 3.3755929470062256, + "learning_rate": 4.687419193732534e-06, + "loss": 1.0851, + "step": 10972 + }, + { + "epoch": 0.6880055175873095, + "grad_norm": 4.068801403045654, + "learning_rate": 4.6856988192320285e-06, + "loss": 1.0661, + "step": 10973 + }, + { + "epoch": 0.6880682174430999, + "grad_norm": 3.3816280364990234, + "learning_rate": 4.683978663893539e-06, + "loss": 1.2092, + "step": 10974 + }, + { + "epoch": 0.6881309172988902, + "grad_norm": 3.444105863571167, + "learning_rate": 4.6822587277880015e-06, + "loss": 1.14, + "step": 10975 + }, + { + "epoch": 0.6881936171546805, + "grad_norm": 3.563495635986328, + "learning_rate": 4.680539010986342e-06, + "loss": 1.1072, + "step": 10976 + }, + { + "epoch": 0.6882563170104709, + "grad_norm": 3.517007827758789, + "learning_rate": 4.678819513559491e-06, + "loss": 1.1867, + "step": 10977 + }, + { + "epoch": 0.6883190168662612, + "grad_norm": 3.4981188774108887, + "learning_rate": 4.677100235578355e-06, + "loss": 1.083, + "step": 10978 + }, + { + "epoch": 0.6883817167220515, + "grad_norm": 3.124108076095581, + "learning_rate": 4.675381177113837e-06, + "loss": 1.0872, + "step": 10979 + }, + { + "epoch": 0.6884444165778418, + "grad_norm": 3.0101702213287354, + "learning_rate": 4.673662338236828e-06, + "loss": 1.2136, + "step": 10980 + }, + { + "epoch": 0.6885071164336322, + "grad_norm": 3.1993677616119385, + "learning_rate": 4.671943719018221e-06, + "loss": 1.0775, + "step": 10981 + }, + { + "epoch": 0.6885698162894225, + "grad_norm": 3.062798261642456, + "learning_rate": 4.670225319528888e-06, + "loss": 1.0014, + "step": 10982 + }, + { + "epoch": 0.6886325161452128, + "grad_norm": 3.5680582523345947, + "learning_rate": 4.668507139839692e-06, + "loss": 1.069, + "step": 10983 + }, + { + "epoch": 0.6886952160010033, + "grad_norm": 3.4987833499908447, + "learning_rate": 4.666789180021497e-06, + "loss": 1.1534, + "step": 10984 + }, + { + "epoch": 0.6887579158567936, + "grad_norm": 3.440042734146118, + "learning_rate": 4.66507144014515e-06, + "loss": 0.9763, + "step": 10985 + }, + { + "epoch": 0.6888206157125839, + "grad_norm": 3.200596809387207, + "learning_rate": 4.66335392028149e-06, + "loss": 1.042, + "step": 10986 + }, + { + "epoch": 0.6888833155683742, + "grad_norm": 3.0986361503601074, + "learning_rate": 4.661636620501343e-06, + "loss": 1.2213, + "step": 10987 + }, + { + "epoch": 0.6889460154241646, + "grad_norm": 3.264667272567749, + "learning_rate": 4.659919540875539e-06, + "loss": 1.1448, + "step": 10988 + }, + { + "epoch": 0.6890087152799549, + "grad_norm": 3.2774124145507812, + "learning_rate": 4.658202681474886e-06, + "loss": 1.046, + "step": 10989 + }, + { + "epoch": 0.6890714151357452, + "grad_norm": 3.2499873638153076, + "learning_rate": 4.656486042370188e-06, + "loss": 1.1479, + "step": 10990 + }, + { + "epoch": 0.6891341149915355, + "grad_norm": 3.1451258659362793, + "learning_rate": 4.654769623632236e-06, + "loss": 1.1015, + "step": 10991 + }, + { + "epoch": 0.6891968148473259, + "grad_norm": 3.623178720474243, + "learning_rate": 4.65305342533182e-06, + "loss": 1.0927, + "step": 10992 + }, + { + "epoch": 0.6892595147031162, + "grad_norm": 3.6494245529174805, + "learning_rate": 4.651337447539716e-06, + "loss": 1.1773, + "step": 10993 + }, + { + "epoch": 0.6893222145589065, + "grad_norm": 3.3666086196899414, + "learning_rate": 4.6496216903266846e-06, + "loss": 1.1029, + "step": 10994 + }, + { + "epoch": 0.6893849144146968, + "grad_norm": 3.0200448036193848, + "learning_rate": 4.647906153763493e-06, + "loss": 1.0856, + "step": 10995 + }, + { + "epoch": 0.6894476142704872, + "grad_norm": 3.400649309158325, + "learning_rate": 4.646190837920884e-06, + "loss": 0.9768, + "step": 10996 + }, + { + "epoch": 0.6895103141262775, + "grad_norm": 2.954608917236328, + "learning_rate": 4.6444757428696e-06, + "loss": 1.1199, + "step": 10997 + }, + { + "epoch": 0.6895730139820678, + "grad_norm": 3.353335380554199, + "learning_rate": 4.6427608686803646e-06, + "loss": 0.9276, + "step": 10998 + }, + { + "epoch": 0.6896357138378582, + "grad_norm": 3.028113842010498, + "learning_rate": 4.64104621542391e-06, + "loss": 1.3011, + "step": 10999 + }, + { + "epoch": 0.6896984136936485, + "grad_norm": 3.347550392150879, + "learning_rate": 4.6393317831709435e-06, + "loss": 1.0467, + "step": 11000 + }, + { + "epoch": 0.6896984136936485, + "eval_loss": 1.1049566268920898, + "eval_runtime": 144.1179, + "eval_samples_per_second": 4.371, + "eval_steps_per_second": 1.096, + "step": 11000 + }, + { + "epoch": 0.6897611135494388, + "grad_norm": 2.9524145126342773, + "learning_rate": 4.637617571992169e-06, + "loss": 1.1879, + "step": 11001 + }, + { + "epoch": 0.6898238134052291, + "grad_norm": 3.1465938091278076, + "learning_rate": 4.635903581958276e-06, + "loss": 1.1565, + "step": 11002 + }, + { + "epoch": 0.6898865132610195, + "grad_norm": 2.916475534439087, + "learning_rate": 4.634189813139955e-06, + "loss": 1.0505, + "step": 11003 + }, + { + "epoch": 0.6899492131168098, + "grad_norm": 3.66227126121521, + "learning_rate": 4.632476265607882e-06, + "loss": 1.3922, + "step": 11004 + }, + { + "epoch": 0.6900119129726001, + "grad_norm": 3.2390553951263428, + "learning_rate": 4.630762939432719e-06, + "loss": 1.0539, + "step": 11005 + }, + { + "epoch": 0.6900746128283904, + "grad_norm": 3.326521635055542, + "learning_rate": 4.629049834685131e-06, + "loss": 1.0471, + "step": 11006 + }, + { + "epoch": 0.6901373126841809, + "grad_norm": 3.400775671005249, + "learning_rate": 4.627336951435762e-06, + "loss": 0.8672, + "step": 11007 + }, + { + "epoch": 0.6902000125399712, + "grad_norm": 3.4218716621398926, + "learning_rate": 4.625624289755251e-06, + "loss": 0.9249, + "step": 11008 + }, + { + "epoch": 0.6902627123957615, + "grad_norm": 3.010505199432373, + "learning_rate": 4.623911849714226e-06, + "loss": 1.1013, + "step": 11009 + }, + { + "epoch": 0.6903254122515519, + "grad_norm": 3.959834575653076, + "learning_rate": 4.622199631383314e-06, + "loss": 1.0202, + "step": 11010 + }, + { + "epoch": 0.6903881121073422, + "grad_norm": 3.6369223594665527, + "learning_rate": 4.620487634833124e-06, + "loss": 0.9657, + "step": 11011 + }, + { + "epoch": 0.6904508119631325, + "grad_norm": 3.148219108581543, + "learning_rate": 4.618775860134259e-06, + "loss": 0.9588, + "step": 11012 + }, + { + "epoch": 0.6905135118189228, + "grad_norm": 3.371793031692505, + "learning_rate": 4.6170643073573094e-06, + "loss": 0.994, + "step": 11013 + }, + { + "epoch": 0.6905762116747132, + "grad_norm": 3.1681690216064453, + "learning_rate": 4.615352976572867e-06, + "loss": 1.0062, + "step": 11014 + }, + { + "epoch": 0.6906389115305035, + "grad_norm": 3.424860715866089, + "learning_rate": 4.613641867851502e-06, + "loss": 0.9892, + "step": 11015 + }, + { + "epoch": 0.6907016113862938, + "grad_norm": 3.2451705932617188, + "learning_rate": 4.611930981263778e-06, + "loss": 0.9905, + "step": 11016 + }, + { + "epoch": 0.6907643112420842, + "grad_norm": 3.2200520038604736, + "learning_rate": 4.6102203168802594e-06, + "loss": 1.1149, + "step": 11017 + }, + { + "epoch": 0.6908270110978745, + "grad_norm": 3.677713632583618, + "learning_rate": 4.60850987477149e-06, + "loss": 0.9469, + "step": 11018 + }, + { + "epoch": 0.6908897109536648, + "grad_norm": 3.6750338077545166, + "learning_rate": 4.606799655008009e-06, + "loss": 1.0794, + "step": 11019 + }, + { + "epoch": 0.6909524108094551, + "grad_norm": 2.923609495162964, + "learning_rate": 4.605089657660343e-06, + "loss": 1.032, + "step": 11020 + }, + { + "epoch": 0.6910151106652455, + "grad_norm": 3.2881219387054443, + "learning_rate": 4.603379882799018e-06, + "loss": 1.049, + "step": 11021 + }, + { + "epoch": 0.6910778105210358, + "grad_norm": 3.7647078037261963, + "learning_rate": 4.601670330494543e-06, + "loss": 1.0133, + "step": 11022 + }, + { + "epoch": 0.6911405103768261, + "grad_norm": 3.251293420791626, + "learning_rate": 4.599961000817419e-06, + "loss": 1.1573, + "step": 11023 + }, + { + "epoch": 0.6912032102326164, + "grad_norm": 3.0554120540618896, + "learning_rate": 4.598251893838135e-06, + "loss": 1.108, + "step": 11024 + }, + { + "epoch": 0.6912659100884068, + "grad_norm": 3.3668580055236816, + "learning_rate": 4.596543009627183e-06, + "loss": 1.1743, + "step": 11025 + }, + { + "epoch": 0.6913286099441971, + "grad_norm": 3.305535316467285, + "learning_rate": 4.5948343482550316e-06, + "loss": 1.1878, + "step": 11026 + }, + { + "epoch": 0.6913913097999874, + "grad_norm": 3.6625051498413086, + "learning_rate": 4.593125909792145e-06, + "loss": 0.971, + "step": 11027 + }, + { + "epoch": 0.6914540096557777, + "grad_norm": 3.0569264888763428, + "learning_rate": 4.591417694308987e-06, + "loss": 1.0787, + "step": 11028 + }, + { + "epoch": 0.6915167095115681, + "grad_norm": 3.2147328853607178, + "learning_rate": 4.589709701875999e-06, + "loss": 1.0095, + "step": 11029 + }, + { + "epoch": 0.6915794093673584, + "grad_norm": 3.5475735664367676, + "learning_rate": 4.588001932563618e-06, + "loss": 1.0532, + "step": 11030 + }, + { + "epoch": 0.6916421092231488, + "grad_norm": 3.470532178878784, + "learning_rate": 4.586294386442271e-06, + "loss": 1.185, + "step": 11031 + }, + { + "epoch": 0.6917048090789392, + "grad_norm": 3.4322569370269775, + "learning_rate": 4.584587063582383e-06, + "loss": 0.9828, + "step": 11032 + }, + { + "epoch": 0.6917675089347295, + "grad_norm": 3.388345718383789, + "learning_rate": 4.582879964054361e-06, + "loss": 1.0727, + "step": 11033 + }, + { + "epoch": 0.6918302087905198, + "grad_norm": 2.796020746231079, + "learning_rate": 4.581173087928603e-06, + "loss": 1.0382, + "step": 11034 + }, + { + "epoch": 0.6918929086463101, + "grad_norm": 3.342163562774658, + "learning_rate": 4.579466435275506e-06, + "loss": 1.0841, + "step": 11035 + }, + { + "epoch": 0.6919556085021005, + "grad_norm": 3.425821304321289, + "learning_rate": 4.5777600061654505e-06, + "loss": 0.9595, + "step": 11036 + }, + { + "epoch": 0.6920183083578908, + "grad_norm": 3.5084683895111084, + "learning_rate": 4.576053800668809e-06, + "loss": 1.1945, + "step": 11037 + }, + { + "epoch": 0.6920810082136811, + "grad_norm": 3.218015670776367, + "learning_rate": 4.574347818855942e-06, + "loss": 1.2012, + "step": 11038 + }, + { + "epoch": 0.6921437080694715, + "grad_norm": 2.982084035873413, + "learning_rate": 4.572642060797212e-06, + "loss": 1.2626, + "step": 11039 + }, + { + "epoch": 0.6922064079252618, + "grad_norm": 3.3766865730285645, + "learning_rate": 4.57093652656296e-06, + "loss": 1.0634, + "step": 11040 + }, + { + "epoch": 0.6922691077810521, + "grad_norm": 3.5269694328308105, + "learning_rate": 4.569231216223523e-06, + "loss": 0.9394, + "step": 11041 + }, + { + "epoch": 0.6923318076368424, + "grad_norm": 3.0248568058013916, + "learning_rate": 4.5675261298492235e-06, + "loss": 1.3471, + "step": 11042 + }, + { + "epoch": 0.6923945074926328, + "grad_norm": 2.834207534790039, + "learning_rate": 4.565821267510389e-06, + "loss": 1.2564, + "step": 11043 + }, + { + "epoch": 0.6924572073484231, + "grad_norm": 3.3830161094665527, + "learning_rate": 4.564116629277321e-06, + "loss": 1.0859, + "step": 11044 + }, + { + "epoch": 0.6925199072042134, + "grad_norm": 3.202681303024292, + "learning_rate": 4.562412215220316e-06, + "loss": 0.9569, + "step": 11045 + }, + { + "epoch": 0.6925826070600037, + "grad_norm": 3.071838140487671, + "learning_rate": 4.560708025409675e-06, + "loss": 1.039, + "step": 11046 + }, + { + "epoch": 0.6926453069157941, + "grad_norm": 3.202669382095337, + "learning_rate": 4.559004059915672e-06, + "loss": 0.986, + "step": 11047 + }, + { + "epoch": 0.6927080067715844, + "grad_norm": 3.113264322280884, + "learning_rate": 4.557300318808578e-06, + "loss": 1.1336, + "step": 11048 + }, + { + "epoch": 0.6927707066273747, + "grad_norm": 2.978537082672119, + "learning_rate": 4.555596802158653e-06, + "loss": 1.0252, + "step": 11049 + }, + { + "epoch": 0.692833406483165, + "grad_norm": 3.3404135704040527, + "learning_rate": 4.553893510036158e-06, + "loss": 1.0335, + "step": 11050 + }, + { + "epoch": 0.6928961063389554, + "grad_norm": 3.1167356967926025, + "learning_rate": 4.552190442511332e-06, + "loss": 1.0248, + "step": 11051 + }, + { + "epoch": 0.6929588061947457, + "grad_norm": 3.2110629081726074, + "learning_rate": 4.55048759965441e-06, + "loss": 1.1906, + "step": 11052 + }, + { + "epoch": 0.693021506050536, + "grad_norm": 2.975839853286743, + "learning_rate": 4.5487849815356145e-06, + "loss": 1.1193, + "step": 11053 + }, + { + "epoch": 0.6930842059063265, + "grad_norm": 3.819227933883667, + "learning_rate": 4.547082588225167e-06, + "loss": 1.1222, + "step": 11054 + }, + { + "epoch": 0.6931469057621168, + "grad_norm": 3.106616258621216, + "learning_rate": 4.5453804197932726e-06, + "loss": 1.0738, + "step": 11055 + }, + { + "epoch": 0.6932096056179071, + "grad_norm": 3.1850876808166504, + "learning_rate": 4.543678476310124e-06, + "loss": 0.9537, + "step": 11056 + }, + { + "epoch": 0.6932723054736974, + "grad_norm": 3.513012647628784, + "learning_rate": 4.541976757845916e-06, + "loss": 1.2648, + "step": 11057 + }, + { + "epoch": 0.6933350053294878, + "grad_norm": 3.0189099311828613, + "learning_rate": 4.540275264470826e-06, + "loss": 0.9478, + "step": 11058 + }, + { + "epoch": 0.6933977051852781, + "grad_norm": 3.407491445541382, + "learning_rate": 4.538573996255022e-06, + "loss": 1.0975, + "step": 11059 + }, + { + "epoch": 0.6934604050410684, + "grad_norm": 3.114531993865967, + "learning_rate": 4.536872953268662e-06, + "loss": 1.0992, + "step": 11060 + }, + { + "epoch": 0.6935231048968588, + "grad_norm": 3.5050785541534424, + "learning_rate": 4.535172135581904e-06, + "loss": 1.0798, + "step": 11061 + }, + { + "epoch": 0.6935858047526491, + "grad_norm": 3.2379322052001953, + "learning_rate": 4.533471543264884e-06, + "loss": 1.1708, + "step": 11062 + }, + { + "epoch": 0.6936485046084394, + "grad_norm": 3.372934341430664, + "learning_rate": 4.531771176387737e-06, + "loss": 1.1451, + "step": 11063 + }, + { + "epoch": 0.6937112044642297, + "grad_norm": 3.108868360519409, + "learning_rate": 4.530071035020582e-06, + "loss": 1.0078, + "step": 11064 + }, + { + "epoch": 0.6937739043200201, + "grad_norm": 3.275102376937866, + "learning_rate": 4.52837111923354e-06, + "loss": 1.0814, + "step": 11065 + }, + { + "epoch": 0.6938366041758104, + "grad_norm": 3.2633373737335205, + "learning_rate": 4.5266714290967115e-06, + "loss": 1.1506, + "step": 11066 + }, + { + "epoch": 0.6938993040316007, + "grad_norm": 3.463639974594116, + "learning_rate": 4.524971964680189e-06, + "loss": 1.1501, + "step": 11067 + }, + { + "epoch": 0.693962003887391, + "grad_norm": 3.16222882270813, + "learning_rate": 4.523272726054067e-06, + "loss": 1.0441, + "step": 11068 + }, + { + "epoch": 0.6940247037431814, + "grad_norm": 3.5700480937957764, + "learning_rate": 4.521573713288414e-06, + "loss": 1.1416, + "step": 11069 + }, + { + "epoch": 0.6940874035989717, + "grad_norm": 3.1876275539398193, + "learning_rate": 4.519874926453303e-06, + "loss": 1.0024, + "step": 11070 + }, + { + "epoch": 0.694150103454762, + "grad_norm": 3.6217856407165527, + "learning_rate": 4.518176365618784e-06, + "loss": 1.0208, + "step": 11071 + }, + { + "epoch": 0.6942128033105524, + "grad_norm": 3.1510136127471924, + "learning_rate": 4.516478030854915e-06, + "loss": 1.2016, + "step": 11072 + }, + { + "epoch": 0.6942755031663427, + "grad_norm": 3.493025302886963, + "learning_rate": 4.514779922231732e-06, + "loss": 1.2261, + "step": 11073 + }, + { + "epoch": 0.694338203022133, + "grad_norm": 3.116908550262451, + "learning_rate": 4.5130820398192645e-06, + "loss": 0.9433, + "step": 11074 + }, + { + "epoch": 0.6944009028779233, + "grad_norm": 3.6409997940063477, + "learning_rate": 4.511384383687529e-06, + "loss": 1.2719, + "step": 11075 + }, + { + "epoch": 0.6944636027337137, + "grad_norm": 3.381683111190796, + "learning_rate": 4.509686953906546e-06, + "loss": 0.9921, + "step": 11076 + }, + { + "epoch": 0.6945263025895041, + "grad_norm": 2.9800972938537598, + "learning_rate": 4.507989750546311e-06, + "loss": 1.136, + "step": 11077 + }, + { + "epoch": 0.6945890024452944, + "grad_norm": 3.7059414386749268, + "learning_rate": 4.506292773676816e-06, + "loss": 1.1243, + "step": 11078 + }, + { + "epoch": 0.6946517023010848, + "grad_norm": 3.8757166862487793, + "learning_rate": 4.504596023368051e-06, + "loss": 1.2197, + "step": 11079 + }, + { + "epoch": 0.6947144021568751, + "grad_norm": 3.618549108505249, + "learning_rate": 4.502899499689986e-06, + "loss": 1.1611, + "step": 11080 + }, + { + "epoch": 0.6947771020126654, + "grad_norm": 3.1859896183013916, + "learning_rate": 4.501203202712584e-06, + "loss": 0.9711, + "step": 11081 + }, + { + "epoch": 0.6948398018684557, + "grad_norm": 3.3307607173919678, + "learning_rate": 4.4995071325058e-06, + "loss": 1.1805, + "step": 11082 + }, + { + "epoch": 0.6949025017242461, + "grad_norm": 3.1736488342285156, + "learning_rate": 4.497811289139585e-06, + "loss": 0.8585, + "step": 11083 + }, + { + "epoch": 0.6949652015800364, + "grad_norm": 3.035975694656372, + "learning_rate": 4.4961156726838725e-06, + "loss": 1.108, + "step": 11084 + }, + { + "epoch": 0.6950279014358267, + "grad_norm": 3.41348934173584, + "learning_rate": 4.494420283208592e-06, + "loss": 0.9681, + "step": 11085 + }, + { + "epoch": 0.695090601291617, + "grad_norm": 3.185452461242676, + "learning_rate": 4.492725120783653e-06, + "loss": 0.9882, + "step": 11086 + }, + { + "epoch": 0.6951533011474074, + "grad_norm": 3.601417064666748, + "learning_rate": 4.491030185478976e-06, + "loss": 1.1648, + "step": 11087 + }, + { + "epoch": 0.6952160010031977, + "grad_norm": 2.9173696041107178, + "learning_rate": 4.489335477364454e-06, + "loss": 0.947, + "step": 11088 + }, + { + "epoch": 0.695278700858988, + "grad_norm": 3.085341215133667, + "learning_rate": 4.487640996509974e-06, + "loss": 0.9602, + "step": 11089 + }, + { + "epoch": 0.6953414007147783, + "grad_norm": 3.2297708988189697, + "learning_rate": 4.485946742985425e-06, + "loss": 1.134, + "step": 11090 + }, + { + "epoch": 0.6954041005705687, + "grad_norm": 3.7234346866607666, + "learning_rate": 4.484252716860671e-06, + "loss": 1.0348, + "step": 11091 + }, + { + "epoch": 0.695466800426359, + "grad_norm": 3.5338997840881348, + "learning_rate": 4.482558918205576e-06, + "loss": 0.9411, + "step": 11092 + }, + { + "epoch": 0.6955295002821493, + "grad_norm": 3.562089681625366, + "learning_rate": 4.48086534708999e-06, + "loss": 1.2086, + "step": 11093 + }, + { + "epoch": 0.6955922001379397, + "grad_norm": 2.8083503246307373, + "learning_rate": 4.47917200358376e-06, + "loss": 1.1822, + "step": 11094 + }, + { + "epoch": 0.69565489999373, + "grad_norm": 3.470494508743286, + "learning_rate": 4.47747888775672e-06, + "loss": 1.0699, + "step": 11095 + }, + { + "epoch": 0.6957175998495203, + "grad_norm": 3.1596896648406982, + "learning_rate": 4.475785999678687e-06, + "loss": 1.1498, + "step": 11096 + }, + { + "epoch": 0.6957802997053106, + "grad_norm": 3.626613140106201, + "learning_rate": 4.474093339419484e-06, + "loss": 1.1887, + "step": 11097 + }, + { + "epoch": 0.695842999561101, + "grad_norm": 3.109142303466797, + "learning_rate": 4.4724009070489125e-06, + "loss": 1.0004, + "step": 11098 + }, + { + "epoch": 0.6959056994168913, + "grad_norm": 3.0717146396636963, + "learning_rate": 4.47070870263677e-06, + "loss": 1.0592, + "step": 11099 + }, + { + "epoch": 0.6959683992726817, + "grad_norm": 3.54931378364563, + "learning_rate": 4.469016726252838e-06, + "loss": 1.0417, + "step": 11100 + }, + { + "epoch": 0.696031099128472, + "grad_norm": 3.5705831050872803, + "learning_rate": 4.4673249779669015e-06, + "loss": 1.0544, + "step": 11101 + }, + { + "epoch": 0.6960937989842624, + "grad_norm": 3.654322385787964, + "learning_rate": 4.465633457848723e-06, + "loss": 0.938, + "step": 11102 + }, + { + "epoch": 0.6961564988400527, + "grad_norm": 3.2870402336120605, + "learning_rate": 4.463942165968065e-06, + "loss": 1.0524, + "step": 11103 + }, + { + "epoch": 0.696219198695843, + "grad_norm": 3.042788505554199, + "learning_rate": 4.462251102394669e-06, + "loss": 1.117, + "step": 11104 + }, + { + "epoch": 0.6962818985516334, + "grad_norm": 3.5571422576904297, + "learning_rate": 4.460560267198283e-06, + "loss": 1.0846, + "step": 11105 + }, + { + "epoch": 0.6963445984074237, + "grad_norm": 3.3431649208068848, + "learning_rate": 4.458869660448634e-06, + "loss": 1.0281, + "step": 11106 + }, + { + "epoch": 0.696407298263214, + "grad_norm": 3.0944714546203613, + "learning_rate": 4.457179282215438e-06, + "loss": 1.1957, + "step": 11107 + }, + { + "epoch": 0.6964699981190043, + "grad_norm": 3.144390821456909, + "learning_rate": 4.455489132568413e-06, + "loss": 1.0875, + "step": 11108 + }, + { + "epoch": 0.6965326979747947, + "grad_norm": 3.6092159748077393, + "learning_rate": 4.45379921157726e-06, + "loss": 1.0247, + "step": 11109 + }, + { + "epoch": 0.696595397830585, + "grad_norm": 3.3720366954803467, + "learning_rate": 4.45210951931167e-06, + "loss": 1.0385, + "step": 11110 + }, + { + "epoch": 0.6966580976863753, + "grad_norm": 3.5288405418395996, + "learning_rate": 4.450420055841322e-06, + "loss": 1.122, + "step": 11111 + }, + { + "epoch": 0.6967207975421656, + "grad_norm": 3.3993313312530518, + "learning_rate": 4.448730821235895e-06, + "loss": 1.0407, + "step": 11112 + }, + { + "epoch": 0.696783497397956, + "grad_norm": 3.047236919403076, + "learning_rate": 4.447041815565053e-06, + "loss": 1.0531, + "step": 11113 + }, + { + "epoch": 0.6968461972537463, + "grad_norm": 3.1173763275146484, + "learning_rate": 4.445353038898448e-06, + "loss": 0.997, + "step": 11114 + }, + { + "epoch": 0.6969088971095366, + "grad_norm": 3.383941888809204, + "learning_rate": 4.443664491305723e-06, + "loss": 1.0941, + "step": 11115 + }, + { + "epoch": 0.696971596965327, + "grad_norm": 3.2706539630889893, + "learning_rate": 4.441976172856522e-06, + "loss": 1.0132, + "step": 11116 + }, + { + "epoch": 0.6970342968211173, + "grad_norm": 3.016763925552368, + "learning_rate": 4.440288083620464e-06, + "loss": 1.0125, + "step": 11117 + }, + { + "epoch": 0.6970969966769076, + "grad_norm": 3.9813308715820312, + "learning_rate": 4.438600223667165e-06, + "loss": 1.0177, + "step": 11118 + }, + { + "epoch": 0.6971596965326979, + "grad_norm": 3.416445255279541, + "learning_rate": 4.436912593066241e-06, + "loss": 0.9296, + "step": 11119 + }, + { + "epoch": 0.6972223963884883, + "grad_norm": 3.395991802215576, + "learning_rate": 4.435225191887284e-06, + "loss": 1.0016, + "step": 11120 + }, + { + "epoch": 0.6972850962442786, + "grad_norm": 3.370161294937134, + "learning_rate": 4.433538020199882e-06, + "loss": 1.2484, + "step": 11121 + }, + { + "epoch": 0.6973477961000689, + "grad_norm": 3.520833969116211, + "learning_rate": 4.431851078073613e-06, + "loss": 1.1006, + "step": 11122 + }, + { + "epoch": 0.6974104959558594, + "grad_norm": 3.2241837978363037, + "learning_rate": 4.4301643655780515e-06, + "loss": 1.2558, + "step": 11123 + }, + { + "epoch": 0.6974731958116497, + "grad_norm": 2.9510116577148438, + "learning_rate": 4.428477882782754e-06, + "loss": 1.145, + "step": 11124 + }, + { + "epoch": 0.69753589566744, + "grad_norm": 3.4944746494293213, + "learning_rate": 4.4267916297572734e-06, + "loss": 1.0127, + "step": 11125 + }, + { + "epoch": 0.6975985955232303, + "grad_norm": 3.469684362411499, + "learning_rate": 4.425105606571145e-06, + "loss": 0.9824, + "step": 11126 + }, + { + "epoch": 0.6976612953790207, + "grad_norm": 3.343759298324585, + "learning_rate": 4.423419813293909e-06, + "loss": 0.9434, + "step": 11127 + }, + { + "epoch": 0.697723995234811, + "grad_norm": 2.8846139907836914, + "learning_rate": 4.421734249995082e-06, + "loss": 1.1576, + "step": 11128 + }, + { + "epoch": 0.6977866950906013, + "grad_norm": 3.11810302734375, + "learning_rate": 4.420048916744176e-06, + "loss": 1.196, + "step": 11129 + }, + { + "epoch": 0.6978493949463916, + "grad_norm": 3.1250827312469482, + "learning_rate": 4.4183638136106996e-06, + "loss": 1.1904, + "step": 11130 + }, + { + "epoch": 0.697912094802182, + "grad_norm": 3.7623164653778076, + "learning_rate": 4.416678940664143e-06, + "loss": 1.1494, + "step": 11131 + }, + { + "epoch": 0.6979747946579723, + "grad_norm": 3.660860538482666, + "learning_rate": 4.414994297973991e-06, + "loss": 0.9759, + "step": 11132 + }, + { + "epoch": 0.6980374945137626, + "grad_norm": 3.4494848251342773, + "learning_rate": 4.4133098856097146e-06, + "loss": 1.1436, + "step": 11133 + }, + { + "epoch": 0.698100194369553, + "grad_norm": 3.2119100093841553, + "learning_rate": 4.411625703640785e-06, + "loss": 1.086, + "step": 11134 + }, + { + "epoch": 0.6981628942253433, + "grad_norm": 3.0950067043304443, + "learning_rate": 4.409941752136656e-06, + "loss": 1.0758, + "step": 11135 + }, + { + "epoch": 0.6982255940811336, + "grad_norm": 3.4473133087158203, + "learning_rate": 4.4082580311667746e-06, + "loss": 1.1207, + "step": 11136 + }, + { + "epoch": 0.6982882939369239, + "grad_norm": 3.1501476764678955, + "learning_rate": 4.406574540800571e-06, + "loss": 1.1465, + "step": 11137 + }, + { + "epoch": 0.6983509937927143, + "grad_norm": 3.4128482341766357, + "learning_rate": 4.404891281107482e-06, + "loss": 1.0823, + "step": 11138 + }, + { + "epoch": 0.6984136936485046, + "grad_norm": 3.0848560333251953, + "learning_rate": 4.403208252156921e-06, + "loss": 1.2348, + "step": 11139 + }, + { + "epoch": 0.6984763935042949, + "grad_norm": 3.5783958435058594, + "learning_rate": 4.401525454018292e-06, + "loss": 1.151, + "step": 11140 + }, + { + "epoch": 0.6985390933600852, + "grad_norm": 3.3161869049072266, + "learning_rate": 4.399842886761001e-06, + "loss": 1.1012, + "step": 11141 + }, + { + "epoch": 0.6986017932158756, + "grad_norm": 3.2708022594451904, + "learning_rate": 4.398160550454435e-06, + "loss": 1.1406, + "step": 11142 + }, + { + "epoch": 0.6986644930716659, + "grad_norm": 3.594372272491455, + "learning_rate": 4.396478445167971e-06, + "loss": 1.1143, + "step": 11143 + }, + { + "epoch": 0.6987271929274562, + "grad_norm": 3.2679872512817383, + "learning_rate": 4.394796570970978e-06, + "loss": 0.9143, + "step": 11144 + }, + { + "epoch": 0.6987898927832465, + "grad_norm": 3.0778136253356934, + "learning_rate": 4.393114927932822e-06, + "loss": 1.0228, + "step": 11145 + }, + { + "epoch": 0.698852592639037, + "grad_norm": 2.9674630165100098, + "learning_rate": 4.391433516122853e-06, + "loss": 1.0771, + "step": 11146 + }, + { + "epoch": 0.6989152924948273, + "grad_norm": 3.2966394424438477, + "learning_rate": 4.389752335610405e-06, + "loss": 0.9613, + "step": 11147 + }, + { + "epoch": 0.6989779923506176, + "grad_norm": 3.4752357006073, + "learning_rate": 4.388071386464822e-06, + "loss": 0.9573, + "step": 11148 + }, + { + "epoch": 0.699040692206408, + "grad_norm": 3.4606759548187256, + "learning_rate": 4.386390668755418e-06, + "loss": 1.0686, + "step": 11149 + }, + { + "epoch": 0.6991033920621983, + "grad_norm": 3.6836845874786377, + "learning_rate": 4.384710182551508e-06, + "loss": 1.0104, + "step": 11150 + }, + { + "epoch": 0.6991660919179886, + "grad_norm": 3.224224328994751, + "learning_rate": 4.383029927922392e-06, + "loss": 1.0457, + "step": 11151 + }, + { + "epoch": 0.699228791773779, + "grad_norm": 3.0980591773986816, + "learning_rate": 4.381349904937371e-06, + "loss": 1.0828, + "step": 11152 + }, + { + "epoch": 0.6992914916295693, + "grad_norm": 3.554476022720337, + "learning_rate": 4.379670113665725e-06, + "loss": 1.1756, + "step": 11153 + }, + { + "epoch": 0.6993541914853596, + "grad_norm": 3.493090867996216, + "learning_rate": 4.377990554176729e-06, + "loss": 1.0975, + "step": 11154 + }, + { + "epoch": 0.6994168913411499, + "grad_norm": 3.458008050918579, + "learning_rate": 4.3763112265396445e-06, + "loss": 1.1232, + "step": 11155 + }, + { + "epoch": 0.6994795911969403, + "grad_norm": 3.3916454315185547, + "learning_rate": 4.3746321308237335e-06, + "loss": 1.1581, + "step": 11156 + }, + { + "epoch": 0.6995422910527306, + "grad_norm": 3.3783631324768066, + "learning_rate": 4.37295326709824e-06, + "loss": 1.0163, + "step": 11157 + }, + { + "epoch": 0.6996049909085209, + "grad_norm": 3.431952714920044, + "learning_rate": 4.371274635432396e-06, + "loss": 1.1019, + "step": 11158 + }, + { + "epoch": 0.6996676907643112, + "grad_norm": 3.65128493309021, + "learning_rate": 4.369596235895435e-06, + "loss": 0.883, + "step": 11159 + }, + { + "epoch": 0.6997303906201016, + "grad_norm": 3.5960609912872314, + "learning_rate": 4.367918068556571e-06, + "loss": 1.0091, + "step": 11160 + }, + { + "epoch": 0.6997930904758919, + "grad_norm": 3.377363920211792, + "learning_rate": 4.366240133485012e-06, + "loss": 0.9172, + "step": 11161 + }, + { + "epoch": 0.6998557903316822, + "grad_norm": 3.3066673278808594, + "learning_rate": 4.364562430749952e-06, + "loss": 1.0407, + "step": 11162 + }, + { + "epoch": 0.6999184901874725, + "grad_norm": 3.2548487186431885, + "learning_rate": 4.362884960420588e-06, + "loss": 0.9763, + "step": 11163 + }, + { + "epoch": 0.6999811900432629, + "grad_norm": 3.2097504138946533, + "learning_rate": 4.361207722566094e-06, + "loss": 1.1563, + "step": 11164 + }, + { + "epoch": 0.7000438898990532, + "grad_norm": 3.1079843044281006, + "learning_rate": 4.359530717255639e-06, + "loss": 1.1576, + "step": 11165 + }, + { + "epoch": 0.7001065897548435, + "grad_norm": 3.211353063583374, + "learning_rate": 4.357853944558381e-06, + "loss": 1.1512, + "step": 11166 + }, + { + "epoch": 0.7001692896106338, + "grad_norm": 3.210646152496338, + "learning_rate": 4.3561774045434755e-06, + "loss": 1.1845, + "step": 11167 + }, + { + "epoch": 0.7002319894664242, + "grad_norm": 3.391942262649536, + "learning_rate": 4.354501097280061e-06, + "loss": 1.2194, + "step": 11168 + }, + { + "epoch": 0.7002946893222146, + "grad_norm": 3.2991037368774414, + "learning_rate": 4.352825022837264e-06, + "loss": 1.1666, + "step": 11169 + }, + { + "epoch": 0.7003573891780049, + "grad_norm": 3.588144302368164, + "learning_rate": 4.351149181284214e-06, + "loss": 1.1503, + "step": 11170 + }, + { + "epoch": 0.7004200890337953, + "grad_norm": 3.6084535121917725, + "learning_rate": 4.349473572690016e-06, + "loss": 0.9645, + "step": 11171 + }, + { + "epoch": 0.7004827888895856, + "grad_norm": 3.7400827407836914, + "learning_rate": 4.347798197123777e-06, + "loss": 1.0413, + "step": 11172 + }, + { + "epoch": 0.7005454887453759, + "grad_norm": 3.599977731704712, + "learning_rate": 4.346123054654582e-06, + "loss": 1.0625, + "step": 11173 + }, + { + "epoch": 0.7006081886011662, + "grad_norm": 3.2042126655578613, + "learning_rate": 4.3444481453515245e-06, + "loss": 1.1284, + "step": 11174 + }, + { + "epoch": 0.7006708884569566, + "grad_norm": 3.296038866043091, + "learning_rate": 4.342773469283671e-06, + "loss": 1.1582, + "step": 11175 + }, + { + "epoch": 0.7007335883127469, + "grad_norm": 3.1122593879699707, + "learning_rate": 4.341099026520087e-06, + "loss": 0.956, + "step": 11176 + }, + { + "epoch": 0.7007962881685372, + "grad_norm": 3.1830344200134277, + "learning_rate": 4.339424817129822e-06, + "loss": 1.1081, + "step": 11177 + }, + { + "epoch": 0.7008589880243276, + "grad_norm": 3.3666601181030273, + "learning_rate": 4.33775084118193e-06, + "loss": 1.1731, + "step": 11178 + }, + { + "epoch": 0.7009216878801179, + "grad_norm": 3.5909276008605957, + "learning_rate": 4.336077098745439e-06, + "loss": 0.9634, + "step": 11179 + }, + { + "epoch": 0.7009843877359082, + "grad_norm": 3.0361523628234863, + "learning_rate": 4.334403589889373e-06, + "loss": 1.0536, + "step": 11180 + }, + { + "epoch": 0.7010470875916985, + "grad_norm": 3.5797791481018066, + "learning_rate": 4.332730314682754e-06, + "loss": 1.0097, + "step": 11181 + }, + { + "epoch": 0.7011097874474889, + "grad_norm": 3.0486679077148438, + "learning_rate": 4.331057273194584e-06, + "loss": 1.1618, + "step": 11182 + }, + { + "epoch": 0.7011724873032792, + "grad_norm": 3.537477970123291, + "learning_rate": 4.32938446549386e-06, + "loss": 0.9855, + "step": 11183 + }, + { + "epoch": 0.7012351871590695, + "grad_norm": 3.694657325744629, + "learning_rate": 4.327711891649564e-06, + "loss": 1.0618, + "step": 11184 + }, + { + "epoch": 0.7012978870148598, + "grad_norm": 3.427849292755127, + "learning_rate": 4.326039551730682e-06, + "loss": 0.9238, + "step": 11185 + }, + { + "epoch": 0.7013605868706502, + "grad_norm": 3.512465715408325, + "learning_rate": 4.3243674458061755e-06, + "loss": 1.1185, + "step": 11186 + }, + { + "epoch": 0.7014232867264405, + "grad_norm": 3.5662853717803955, + "learning_rate": 4.322695573945005e-06, + "loss": 0.9403, + "step": 11187 + }, + { + "epoch": 0.7014859865822308, + "grad_norm": 3.252289295196533, + "learning_rate": 4.321023936216112e-06, + "loss": 1.0637, + "step": 11188 + }, + { + "epoch": 0.7015486864380212, + "grad_norm": 3.129913330078125, + "learning_rate": 4.319352532688444e-06, + "loss": 1.0623, + "step": 11189 + }, + { + "epoch": 0.7016113862938115, + "grad_norm": 2.9978315830230713, + "learning_rate": 4.317681363430924e-06, + "loss": 0.9979, + "step": 11190 + }, + { + "epoch": 0.7016740861496018, + "grad_norm": 3.3700478076934814, + "learning_rate": 4.316010428512472e-06, + "loss": 1.2191, + "step": 11191 + }, + { + "epoch": 0.7017367860053921, + "grad_norm": 3.1551170349121094, + "learning_rate": 4.3143397280019995e-06, + "loss": 1.0424, + "step": 11192 + }, + { + "epoch": 0.7017994858611826, + "grad_norm": 3.0717763900756836, + "learning_rate": 4.312669261968406e-06, + "loss": 1.0329, + "step": 11193 + }, + { + "epoch": 0.7018621857169729, + "grad_norm": 3.3580737113952637, + "learning_rate": 4.31099903048058e-06, + "loss": 1.1803, + "step": 11194 + }, + { + "epoch": 0.7019248855727632, + "grad_norm": 3.153548002243042, + "learning_rate": 4.309329033607399e-06, + "loss": 1.0726, + "step": 11195 + }, + { + "epoch": 0.7019875854285536, + "grad_norm": 3.6728570461273193, + "learning_rate": 4.307659271417741e-06, + "loss": 1.0007, + "step": 11196 + }, + { + "epoch": 0.7020502852843439, + "grad_norm": 3.5738742351531982, + "learning_rate": 4.305989743980463e-06, + "loss": 1.044, + "step": 11197 + }, + { + "epoch": 0.7021129851401342, + "grad_norm": 2.801403522491455, + "learning_rate": 4.304320451364413e-06, + "loss": 1.1202, + "step": 11198 + }, + { + "epoch": 0.7021756849959245, + "grad_norm": 3.470717430114746, + "learning_rate": 4.30265139363844e-06, + "loss": 1.1187, + "step": 11199 + }, + { + "epoch": 0.7022383848517149, + "grad_norm": 3.3167214393615723, + "learning_rate": 4.3009825708713725e-06, + "loss": 1.1581, + "step": 11200 + }, + { + "epoch": 0.7023010847075052, + "grad_norm": 3.1101503372192383, + "learning_rate": 4.299313983132032e-06, + "loss": 1.0565, + "step": 11201 + }, + { + "epoch": 0.7023637845632955, + "grad_norm": 3.030729293823242, + "learning_rate": 4.297645630489229e-06, + "loss": 1.0899, + "step": 11202 + }, + { + "epoch": 0.7024264844190858, + "grad_norm": 3.2455925941467285, + "learning_rate": 4.295977513011772e-06, + "loss": 0.8951, + "step": 11203 + }, + { + "epoch": 0.7024891842748762, + "grad_norm": 3.5164949893951416, + "learning_rate": 4.294309630768452e-06, + "loss": 1.0613, + "step": 11204 + }, + { + "epoch": 0.7025518841306665, + "grad_norm": 3.299463987350464, + "learning_rate": 4.292641983828052e-06, + "loss": 1.0883, + "step": 11205 + }, + { + "epoch": 0.7026145839864568, + "grad_norm": 3.353776454925537, + "learning_rate": 4.290974572259342e-06, + "loss": 1.1257, + "step": 11206 + }, + { + "epoch": 0.7026772838422471, + "grad_norm": 3.6071646213531494, + "learning_rate": 4.2893073961310935e-06, + "loss": 1.1284, + "step": 11207 + }, + { + "epoch": 0.7027399836980375, + "grad_norm": 4.147754192352295, + "learning_rate": 4.287640455512058e-06, + "loss": 1.0138, + "step": 11208 + }, + { + "epoch": 0.7028026835538278, + "grad_norm": 3.099760055541992, + "learning_rate": 4.285973750470976e-06, + "loss": 1.1999, + "step": 11209 + }, + { + "epoch": 0.7028653834096181, + "grad_norm": 3.2528319358825684, + "learning_rate": 4.284307281076588e-06, + "loss": 1.0574, + "step": 11210 + }, + { + "epoch": 0.7029280832654085, + "grad_norm": 3.171602964401245, + "learning_rate": 4.282641047397619e-06, + "loss": 1.0276, + "step": 11211 + }, + { + "epoch": 0.7029907831211988, + "grad_norm": 3.438288927078247, + "learning_rate": 4.2809750495027824e-06, + "loss": 1.0339, + "step": 11212 + }, + { + "epoch": 0.7030534829769891, + "grad_norm": 3.3842337131500244, + "learning_rate": 4.279309287460781e-06, + "loss": 1.1646, + "step": 11213 + }, + { + "epoch": 0.7031161828327794, + "grad_norm": 3.1689937114715576, + "learning_rate": 4.277643761340318e-06, + "loss": 1.186, + "step": 11214 + }, + { + "epoch": 0.7031788826885698, + "grad_norm": 2.8148670196533203, + "learning_rate": 4.275978471210076e-06, + "loss": 0.8992, + "step": 11215 + }, + { + "epoch": 0.7032415825443602, + "grad_norm": 3.4621078968048096, + "learning_rate": 4.274313417138731e-06, + "loss": 1.1834, + "step": 11216 + }, + { + "epoch": 0.7033042824001505, + "grad_norm": 3.3140807151794434, + "learning_rate": 4.272648599194948e-06, + "loss": 1.0319, + "step": 11217 + }, + { + "epoch": 0.7033669822559409, + "grad_norm": 3.489147186279297, + "learning_rate": 4.27098401744739e-06, + "loss": 1.1222, + "step": 11218 + }, + { + "epoch": 0.7034296821117312, + "grad_norm": 3.063859224319458, + "learning_rate": 4.269319671964703e-06, + "loss": 1.1745, + "step": 11219 + }, + { + "epoch": 0.7034923819675215, + "grad_norm": 3.1027138233184814, + "learning_rate": 4.2676555628155184e-06, + "loss": 1.006, + "step": 11220 + }, + { + "epoch": 0.7035550818233118, + "grad_norm": 3.19120717048645, + "learning_rate": 4.265991690068472e-06, + "loss": 0.8919, + "step": 11221 + }, + { + "epoch": 0.7036177816791022, + "grad_norm": 3.396721839904785, + "learning_rate": 4.264328053792178e-06, + "loss": 1.0447, + "step": 11222 + }, + { + "epoch": 0.7036804815348925, + "grad_norm": 3.3741893768310547, + "learning_rate": 4.262664654055247e-06, + "loss": 1.0298, + "step": 11223 + }, + { + "epoch": 0.7037431813906828, + "grad_norm": 3.328716993331909, + "learning_rate": 4.261001490926272e-06, + "loss": 0.9988, + "step": 11224 + }, + { + "epoch": 0.7038058812464731, + "grad_norm": 3.007751226425171, + "learning_rate": 4.2593385644738494e-06, + "loss": 1.1324, + "step": 11225 + }, + { + "epoch": 0.7038685811022635, + "grad_norm": 3.310458183288574, + "learning_rate": 4.257675874766556e-06, + "loss": 1.0275, + "step": 11226 + }, + { + "epoch": 0.7039312809580538, + "grad_norm": 3.6160244941711426, + "learning_rate": 4.256013421872959e-06, + "loss": 0.9455, + "step": 11227 + }, + { + "epoch": 0.7039939808138441, + "grad_norm": 3.143969774246216, + "learning_rate": 4.2543512058616165e-06, + "loss": 1.123, + "step": 11228 + }, + { + "epoch": 0.7040566806696344, + "grad_norm": 2.8618149757385254, + "learning_rate": 4.2526892268010844e-06, + "loss": 1.1105, + "step": 11229 + }, + { + "epoch": 0.7041193805254248, + "grad_norm": 3.2860107421875, + "learning_rate": 4.2510274847599e-06, + "loss": 1.0829, + "step": 11230 + }, + { + "epoch": 0.7041820803812151, + "grad_norm": 3.021265983581543, + "learning_rate": 4.24936597980659e-06, + "loss": 1.0489, + "step": 11231 + }, + { + "epoch": 0.7042447802370054, + "grad_norm": 3.3950893878936768, + "learning_rate": 4.247704712009682e-06, + "loss": 0.9644, + "step": 11232 + }, + { + "epoch": 0.7043074800927958, + "grad_norm": 3.258284568786621, + "learning_rate": 4.2460436814376814e-06, + "loss": 1.2099, + "step": 11233 + }, + { + "epoch": 0.7043701799485861, + "grad_norm": 3.323580265045166, + "learning_rate": 4.244382888159091e-06, + "loss": 1.1493, + "step": 11234 + }, + { + "epoch": 0.7044328798043764, + "grad_norm": 3.1897130012512207, + "learning_rate": 4.242722332242398e-06, + "loss": 1.1252, + "step": 11235 + }, + { + "epoch": 0.7044955796601667, + "grad_norm": 3.4000535011291504, + "learning_rate": 4.241062013756092e-06, + "loss": 1.2397, + "step": 11236 + }, + { + "epoch": 0.7045582795159571, + "grad_norm": 3.290212631225586, + "learning_rate": 4.239401932768639e-06, + "loss": 1.3818, + "step": 11237 + }, + { + "epoch": 0.7046209793717474, + "grad_norm": 3.7964377403259277, + "learning_rate": 4.237742089348502e-06, + "loss": 1.1344, + "step": 11238 + }, + { + "epoch": 0.7046836792275378, + "grad_norm": 3.37423038482666, + "learning_rate": 4.23608248356413e-06, + "loss": 0.9601, + "step": 11239 + }, + { + "epoch": 0.7047463790833282, + "grad_norm": 3.1051506996154785, + "learning_rate": 4.234423115483971e-06, + "loss": 0.9417, + "step": 11240 + }, + { + "epoch": 0.7048090789391185, + "grad_norm": 3.4573488235473633, + "learning_rate": 4.232763985176454e-06, + "loss": 1.0669, + "step": 11241 + }, + { + "epoch": 0.7048717787949088, + "grad_norm": 3.2695486545562744, + "learning_rate": 4.23110509271e-06, + "loss": 1.1492, + "step": 11242 + }, + { + "epoch": 0.7049344786506991, + "grad_norm": 3.45985746383667, + "learning_rate": 4.229446438153027e-06, + "loss": 0.9752, + "step": 11243 + }, + { + "epoch": 0.7049971785064895, + "grad_norm": 3.3217523097991943, + "learning_rate": 4.227788021573935e-06, + "loss": 1.1205, + "step": 11244 + }, + { + "epoch": 0.7050598783622798, + "grad_norm": 3.3373730182647705, + "learning_rate": 4.226129843041117e-06, + "loss": 1.1858, + "step": 11245 + }, + { + "epoch": 0.7051225782180701, + "grad_norm": 2.92866849899292, + "learning_rate": 4.224471902622952e-06, + "loss": 1.2136, + "step": 11246 + }, + { + "epoch": 0.7051852780738604, + "grad_norm": 3.288428783416748, + "learning_rate": 4.222814200387823e-06, + "loss": 0.9877, + "step": 11247 + }, + { + "epoch": 0.7052479779296508, + "grad_norm": 3.697258710861206, + "learning_rate": 4.221156736404087e-06, + "loss": 0.8552, + "step": 11248 + }, + { + "epoch": 0.7053106777854411, + "grad_norm": 3.3589272499084473, + "learning_rate": 4.2194995107401e-06, + "loss": 1.0098, + "step": 11249 + }, + { + "epoch": 0.7053733776412314, + "grad_norm": 3.7330000400543213, + "learning_rate": 4.217842523464203e-06, + "loss": 1.0682, + "step": 11250 + }, + { + "epoch": 0.7054360774970218, + "grad_norm": 2.9620718955993652, + "learning_rate": 4.216185774644735e-06, + "loss": 1.2672, + "step": 11251 + }, + { + "epoch": 0.7054987773528121, + "grad_norm": 3.4994635581970215, + "learning_rate": 4.214529264350019e-06, + "loss": 1.1681, + "step": 11252 + }, + { + "epoch": 0.7055614772086024, + "grad_norm": 3.1889286041259766, + "learning_rate": 4.212872992648366e-06, + "loss": 1.0266, + "step": 11253 + }, + { + "epoch": 0.7056241770643927, + "grad_norm": 3.119257926940918, + "learning_rate": 4.2112169596080865e-06, + "loss": 1.2799, + "step": 11254 + }, + { + "epoch": 0.7056868769201831, + "grad_norm": 3.171002149581909, + "learning_rate": 4.209561165297472e-06, + "loss": 1.0082, + "step": 11255 + }, + { + "epoch": 0.7057495767759734, + "grad_norm": 3.4380574226379395, + "learning_rate": 4.2079056097848085e-06, + "loss": 0.8907, + "step": 11256 + }, + { + "epoch": 0.7058122766317637, + "grad_norm": 3.435685873031616, + "learning_rate": 4.206250293138366e-06, + "loss": 0.9939, + "step": 11257 + }, + { + "epoch": 0.705874976487554, + "grad_norm": 3.1873531341552734, + "learning_rate": 4.204595215426418e-06, + "loss": 1.1717, + "step": 11258 + }, + { + "epoch": 0.7059376763433444, + "grad_norm": 3.458869218826294, + "learning_rate": 4.2029403767172175e-06, + "loss": 1.1413, + "step": 11259 + }, + { + "epoch": 0.7060003761991347, + "grad_norm": 3.1323423385620117, + "learning_rate": 4.201285777079005e-06, + "loss": 1.1335, + "step": 11260 + }, + { + "epoch": 0.706063076054925, + "grad_norm": 3.3946921825408936, + "learning_rate": 4.199631416580024e-06, + "loss": 1.2691, + "step": 11261 + }, + { + "epoch": 0.7061257759107155, + "grad_norm": 3.0142388343811035, + "learning_rate": 4.197977295288497e-06, + "loss": 1.1147, + "step": 11262 + }, + { + "epoch": 0.7061884757665058, + "grad_norm": 3.074579954147339, + "learning_rate": 4.196323413272638e-06, + "loss": 1.1595, + "step": 11263 + }, + { + "epoch": 0.7062511756222961, + "grad_norm": 3.466066360473633, + "learning_rate": 4.194669770600653e-06, + "loss": 1.0368, + "step": 11264 + }, + { + "epoch": 0.7063138754780864, + "grad_norm": 3.0636227130889893, + "learning_rate": 4.193016367340743e-06, + "loss": 1.1177, + "step": 11265 + }, + { + "epoch": 0.7063765753338768, + "grad_norm": 3.4584903717041016, + "learning_rate": 4.191363203561092e-06, + "loss": 1.2031, + "step": 11266 + }, + { + "epoch": 0.7064392751896671, + "grad_norm": 3.404184579849243, + "learning_rate": 4.189710279329876e-06, + "loss": 1.1048, + "step": 11267 + }, + { + "epoch": 0.7065019750454574, + "grad_norm": 3.445523738861084, + "learning_rate": 4.188057594715259e-06, + "loss": 1.1949, + "step": 11268 + }, + { + "epoch": 0.7065646749012477, + "grad_norm": 3.6616628170013428, + "learning_rate": 4.186405149785403e-06, + "loss": 0.9763, + "step": 11269 + }, + { + "epoch": 0.7066273747570381, + "grad_norm": 3.1351137161254883, + "learning_rate": 4.184752944608453e-06, + "loss": 1.009, + "step": 11270 + }, + { + "epoch": 0.7066900746128284, + "grad_norm": 3.4469804763793945, + "learning_rate": 4.183100979252542e-06, + "loss": 1.1036, + "step": 11271 + }, + { + "epoch": 0.7067527744686187, + "grad_norm": 3.4813952445983887, + "learning_rate": 4.1814492537858045e-06, + "loss": 1.0163, + "step": 11272 + }, + { + "epoch": 0.706815474324409, + "grad_norm": 3.413031816482544, + "learning_rate": 4.1797977682763535e-06, + "loss": 1.1295, + "step": 11273 + }, + { + "epoch": 0.7068781741801994, + "grad_norm": 3.1632919311523438, + "learning_rate": 4.178146522792296e-06, + "loss": 1.2243, + "step": 11274 + }, + { + "epoch": 0.7069408740359897, + "grad_norm": 3.440699815750122, + "learning_rate": 4.176495517401728e-06, + "loss": 1.0613, + "step": 11275 + }, + { + "epoch": 0.70700357389178, + "grad_norm": 3.113851308822632, + "learning_rate": 4.174844752172742e-06, + "loss": 1.1965, + "step": 11276 + }, + { + "epoch": 0.7070662737475704, + "grad_norm": 3.2789931297302246, + "learning_rate": 4.173194227173413e-06, + "loss": 1.0039, + "step": 11277 + }, + { + "epoch": 0.7071289736033607, + "grad_norm": 3.837747812271118, + "learning_rate": 4.171543942471808e-06, + "loss": 1.1616, + "step": 11278 + }, + { + "epoch": 0.707191673459151, + "grad_norm": 3.544442653656006, + "learning_rate": 4.169893898135981e-06, + "loss": 0.9707, + "step": 11279 + }, + { + "epoch": 0.7072543733149413, + "grad_norm": 3.4355483055114746, + "learning_rate": 4.16824409423399e-06, + "loss": 0.9684, + "step": 11280 + }, + { + "epoch": 0.7073170731707317, + "grad_norm": 3.1918888092041016, + "learning_rate": 4.166594530833866e-06, + "loss": 1.0844, + "step": 11281 + }, + { + "epoch": 0.707379773026522, + "grad_norm": 3.3137660026550293, + "learning_rate": 4.164945208003635e-06, + "loss": 1.075, + "step": 11282 + }, + { + "epoch": 0.7074424728823123, + "grad_norm": 3.210886001586914, + "learning_rate": 4.163296125811321e-06, + "loss": 1.1947, + "step": 11283 + }, + { + "epoch": 0.7075051727381027, + "grad_norm": 3.050325393676758, + "learning_rate": 4.161647284324931e-06, + "loss": 1.156, + "step": 11284 + }, + { + "epoch": 0.7075678725938931, + "grad_norm": 2.9582908153533936, + "learning_rate": 4.159998683612462e-06, + "loss": 1.0248, + "step": 11285 + }, + { + "epoch": 0.7076305724496834, + "grad_norm": 3.662104606628418, + "learning_rate": 4.158350323741898e-06, + "loss": 1.1726, + "step": 11286 + }, + { + "epoch": 0.7076932723054737, + "grad_norm": 3.1437599658966064, + "learning_rate": 4.156702204781226e-06, + "loss": 1.0571, + "step": 11287 + }, + { + "epoch": 0.7077559721612641, + "grad_norm": 2.9849634170532227, + "learning_rate": 4.155054326798409e-06, + "loss": 0.9878, + "step": 11288 + }, + { + "epoch": 0.7078186720170544, + "grad_norm": 3.103132724761963, + "learning_rate": 4.153406689861408e-06, + "loss": 1.0685, + "step": 11289 + }, + { + "epoch": 0.7078813718728447, + "grad_norm": 3.244018316268921, + "learning_rate": 4.151759294038167e-06, + "loss": 1.0044, + "step": 11290 + }, + { + "epoch": 0.707944071728635, + "grad_norm": 3.678013801574707, + "learning_rate": 4.15011213939663e-06, + "loss": 1.0553, + "step": 11291 + }, + { + "epoch": 0.7080067715844254, + "grad_norm": 3.393313407897949, + "learning_rate": 4.148465226004726e-06, + "loss": 1.0433, + "step": 11292 + }, + { + "epoch": 0.7080694714402157, + "grad_norm": 3.52522349357605, + "learning_rate": 4.146818553930367e-06, + "loss": 1.0943, + "step": 11293 + }, + { + "epoch": 0.708132171296006, + "grad_norm": 3.3897171020507812, + "learning_rate": 4.14517212324147e-06, + "loss": 1.1123, + "step": 11294 + }, + { + "epoch": 0.7081948711517964, + "grad_norm": 3.373561143875122, + "learning_rate": 4.1435259340059306e-06, + "loss": 1.138, + "step": 11295 + }, + { + "epoch": 0.7082575710075867, + "grad_norm": 3.168503999710083, + "learning_rate": 4.141879986291637e-06, + "loss": 1.0722, + "step": 11296 + }, + { + "epoch": 0.708320270863377, + "grad_norm": 3.278198003768921, + "learning_rate": 4.140234280166465e-06, + "loss": 1.1804, + "step": 11297 + }, + { + "epoch": 0.7083829707191673, + "grad_norm": 3.278090238571167, + "learning_rate": 4.138588815698291e-06, + "loss": 1.0491, + "step": 11298 + }, + { + "epoch": 0.7084456705749577, + "grad_norm": 3.615476369857788, + "learning_rate": 4.13694359295497e-06, + "loss": 1.1159, + "step": 11299 + }, + { + "epoch": 0.708508370430748, + "grad_norm": 3.6694445610046387, + "learning_rate": 4.135298612004352e-06, + "loss": 1.1005, + "step": 11300 + }, + { + "epoch": 0.7085710702865383, + "grad_norm": 3.6110572814941406, + "learning_rate": 4.1336538729142716e-06, + "loss": 1.0507, + "step": 11301 + }, + { + "epoch": 0.7086337701423286, + "grad_norm": 3.279634714126587, + "learning_rate": 4.1320093757525644e-06, + "loss": 1.1339, + "step": 11302 + }, + { + "epoch": 0.708696469998119, + "grad_norm": 3.2849514484405518, + "learning_rate": 4.130365120587048e-06, + "loss": 1.0982, + "step": 11303 + }, + { + "epoch": 0.7087591698539093, + "grad_norm": 3.290870428085327, + "learning_rate": 4.128721107485526e-06, + "loss": 1.1914, + "step": 11304 + }, + { + "epoch": 0.7088218697096996, + "grad_norm": 3.51076602935791, + "learning_rate": 4.127077336515805e-06, + "loss": 1.0494, + "step": 11305 + }, + { + "epoch": 0.70888456956549, + "grad_norm": 3.347820281982422, + "learning_rate": 4.125433807745671e-06, + "loss": 1.1257, + "step": 11306 + }, + { + "epoch": 0.7089472694212803, + "grad_norm": 3.273036241531372, + "learning_rate": 4.123790521242905e-06, + "loss": 1.1579, + "step": 11307 + }, + { + "epoch": 0.7090099692770707, + "grad_norm": 3.2700157165527344, + "learning_rate": 4.12214747707527e-06, + "loss": 1.1173, + "step": 11308 + }, + { + "epoch": 0.709072669132861, + "grad_norm": 3.015000581741333, + "learning_rate": 4.120504675310532e-06, + "loss": 1.0001, + "step": 11309 + }, + { + "epoch": 0.7091353689886514, + "grad_norm": 3.0410914421081543, + "learning_rate": 4.11886211601644e-06, + "loss": 1.1598, + "step": 11310 + }, + { + "epoch": 0.7091980688444417, + "grad_norm": 3.1790730953216553, + "learning_rate": 4.1172197992607266e-06, + "loss": 1.0457, + "step": 11311 + }, + { + "epoch": 0.709260768700232, + "grad_norm": 3.5500259399414062, + "learning_rate": 4.1155777251111295e-06, + "loss": 1.1907, + "step": 11312 + }, + { + "epoch": 0.7093234685560224, + "grad_norm": 3.5937957763671875, + "learning_rate": 4.113935893635365e-06, + "loss": 1.0533, + "step": 11313 + }, + { + "epoch": 0.7093861684118127, + "grad_norm": 3.2624435424804688, + "learning_rate": 4.112294304901139e-06, + "loss": 1.0652, + "step": 11314 + }, + { + "epoch": 0.709448868267603, + "grad_norm": 3.06182861328125, + "learning_rate": 4.110652958976151e-06, + "loss": 1.1884, + "step": 11315 + }, + { + "epoch": 0.7095115681233933, + "grad_norm": 3.438210964202881, + "learning_rate": 4.109011855928096e-06, + "loss": 1.0684, + "step": 11316 + }, + { + "epoch": 0.7095742679791837, + "grad_norm": 3.504343032836914, + "learning_rate": 4.107370995824647e-06, + "loss": 0.9619, + "step": 11317 + }, + { + "epoch": 0.709636967834974, + "grad_norm": 3.1309471130371094, + "learning_rate": 4.105730378733478e-06, + "loss": 1.0166, + "step": 11318 + }, + { + "epoch": 0.7096996676907643, + "grad_norm": 3.164971113204956, + "learning_rate": 4.104090004722241e-06, + "loss": 1.1021, + "step": 11319 + }, + { + "epoch": 0.7097623675465546, + "grad_norm": 3.471193313598633, + "learning_rate": 4.102449873858593e-06, + "loss": 1.1169, + "step": 11320 + }, + { + "epoch": 0.709825067402345, + "grad_norm": 3.5202431678771973, + "learning_rate": 4.10080998621017e-06, + "loss": 1.1695, + "step": 11321 + }, + { + "epoch": 0.7098877672581353, + "grad_norm": 3.4126393795013428, + "learning_rate": 4.099170341844597e-06, + "loss": 1.0492, + "step": 11322 + }, + { + "epoch": 0.7099504671139256, + "grad_norm": 3.5445425510406494, + "learning_rate": 4.097530940829502e-06, + "loss": 0.9046, + "step": 11323 + }, + { + "epoch": 0.710013166969716, + "grad_norm": 2.9876065254211426, + "learning_rate": 4.095891783232486e-06, + "loss": 0.9947, + "step": 11324 + }, + { + "epoch": 0.7100758668255063, + "grad_norm": 3.318916082382202, + "learning_rate": 4.094252869121153e-06, + "loss": 1.0794, + "step": 11325 + }, + { + "epoch": 0.7101385666812966, + "grad_norm": 3.292724609375, + "learning_rate": 4.092614198563085e-06, + "loss": 1.2569, + "step": 11326 + }, + { + "epoch": 0.7102012665370869, + "grad_norm": 3.334094285964966, + "learning_rate": 4.090975771625869e-06, + "loss": 1.0369, + "step": 11327 + }, + { + "epoch": 0.7102639663928773, + "grad_norm": 3.090355396270752, + "learning_rate": 4.089337588377071e-06, + "loss": 1.1012, + "step": 11328 + }, + { + "epoch": 0.7103266662486676, + "grad_norm": 3.2988998889923096, + "learning_rate": 4.087699648884248e-06, + "loss": 1.107, + "step": 11329 + }, + { + "epoch": 0.7103893661044579, + "grad_norm": 3.5220067501068115, + "learning_rate": 4.086061953214946e-06, + "loss": 1.1125, + "step": 11330 + }, + { + "epoch": 0.7104520659602483, + "grad_norm": 3.0435047149658203, + "learning_rate": 4.084424501436712e-06, + "loss": 1.2849, + "step": 11331 + }, + { + "epoch": 0.7105147658160387, + "grad_norm": 3.3753368854522705, + "learning_rate": 4.082787293617069e-06, + "loss": 1.1528, + "step": 11332 + }, + { + "epoch": 0.710577465671829, + "grad_norm": 3.240920066833496, + "learning_rate": 4.0811503298235326e-06, + "loss": 1.1769, + "step": 11333 + }, + { + "epoch": 0.7106401655276193, + "grad_norm": 3.320067882537842, + "learning_rate": 4.079513610123619e-06, + "loss": 0.9912, + "step": 11334 + }, + { + "epoch": 0.7107028653834097, + "grad_norm": 3.1350767612457275, + "learning_rate": 4.0778771345848235e-06, + "loss": 0.9262, + "step": 11335 + }, + { + "epoch": 0.7107655652392, + "grad_norm": 3.536055088043213, + "learning_rate": 4.076240903274632e-06, + "loss": 1.1316, + "step": 11336 + }, + { + "epoch": 0.7108282650949903, + "grad_norm": 3.458685874938965, + "learning_rate": 4.0746049162605215e-06, + "loss": 1.0298, + "step": 11337 + }, + { + "epoch": 0.7108909649507806, + "grad_norm": 3.058851957321167, + "learning_rate": 4.072969173609968e-06, + "loss": 0.9568, + "step": 11338 + }, + { + "epoch": 0.710953664806571, + "grad_norm": 3.0455479621887207, + "learning_rate": 4.071333675390422e-06, + "loss": 1.1195, + "step": 11339 + }, + { + "epoch": 0.7110163646623613, + "grad_norm": 3.134794235229492, + "learning_rate": 4.069698421669336e-06, + "loss": 1.1907, + "step": 11340 + }, + { + "epoch": 0.7110790645181516, + "grad_norm": 3.2779974937438965, + "learning_rate": 4.068063412514141e-06, + "loss": 0.8565, + "step": 11341 + }, + { + "epoch": 0.7111417643739419, + "grad_norm": 3.2471961975097656, + "learning_rate": 4.066428647992275e-06, + "loss": 1.03, + "step": 11342 + }, + { + "epoch": 0.7112044642297323, + "grad_norm": 3.1605517864227295, + "learning_rate": 4.06479412817115e-06, + "loss": 1.0123, + "step": 11343 + }, + { + "epoch": 0.7112671640855226, + "grad_norm": 3.3169617652893066, + "learning_rate": 4.0631598531181715e-06, + "loss": 1.0179, + "step": 11344 + }, + { + "epoch": 0.7113298639413129, + "grad_norm": 3.238511562347412, + "learning_rate": 4.061525822900742e-06, + "loss": 1.0855, + "step": 11345 + }, + { + "epoch": 0.7113925637971033, + "grad_norm": 3.2811105251312256, + "learning_rate": 4.059892037586248e-06, + "loss": 1.0459, + "step": 11346 + }, + { + "epoch": 0.7114552636528936, + "grad_norm": 3.419800281524658, + "learning_rate": 4.058258497242065e-06, + "loss": 1.0588, + "step": 11347 + }, + { + "epoch": 0.7115179635086839, + "grad_norm": 3.3602328300476074, + "learning_rate": 4.0566252019355586e-06, + "loss": 1.1192, + "step": 11348 + }, + { + "epoch": 0.7115806633644742, + "grad_norm": 3.3667943477630615, + "learning_rate": 4.054992151734091e-06, + "loss": 1.0962, + "step": 11349 + }, + { + "epoch": 0.7116433632202646, + "grad_norm": 3.3409392833709717, + "learning_rate": 4.053359346705008e-06, + "loss": 0.9652, + "step": 11350 + }, + { + "epoch": 0.7117060630760549, + "grad_norm": 3.48215651512146, + "learning_rate": 4.051726786915644e-06, + "loss": 1.0903, + "step": 11351 + }, + { + "epoch": 0.7117687629318452, + "grad_norm": 3.388017177581787, + "learning_rate": 4.050094472433323e-06, + "loss": 1.0927, + "step": 11352 + }, + { + "epoch": 0.7118314627876355, + "grad_norm": 3.345550298690796, + "learning_rate": 4.048462403325372e-06, + "loss": 1.1393, + "step": 11353 + }, + { + "epoch": 0.7118941626434259, + "grad_norm": 3.2602784633636475, + "learning_rate": 4.046830579659089e-06, + "loss": 1.1341, + "step": 11354 + }, + { + "epoch": 0.7119568624992163, + "grad_norm": 3.8936986923217773, + "learning_rate": 4.0451990015017695e-06, + "loss": 0.9904, + "step": 11355 + }, + { + "epoch": 0.7120195623550066, + "grad_norm": 3.3111636638641357, + "learning_rate": 4.043567668920707e-06, + "loss": 1.1628, + "step": 11356 + }, + { + "epoch": 0.712082262210797, + "grad_norm": 3.604357957839966, + "learning_rate": 4.041936581983171e-06, + "loss": 1.0561, + "step": 11357 + }, + { + "epoch": 0.7121449620665873, + "grad_norm": 3.321909189224243, + "learning_rate": 4.040305740756434e-06, + "loss": 1.1658, + "step": 11358 + }, + { + "epoch": 0.7122076619223776, + "grad_norm": 3.491442918777466, + "learning_rate": 4.038675145307747e-06, + "loss": 1.2151, + "step": 11359 + }, + { + "epoch": 0.7122703617781679, + "grad_norm": 3.212836503982544, + "learning_rate": 4.037044795704356e-06, + "loss": 1.0786, + "step": 11360 + }, + { + "epoch": 0.7123330616339583, + "grad_norm": 3.2742199897766113, + "learning_rate": 4.035414692013501e-06, + "loss": 1.2077, + "step": 11361 + }, + { + "epoch": 0.7123957614897486, + "grad_norm": 3.44267201423645, + "learning_rate": 4.0337848343024035e-06, + "loss": 1.1602, + "step": 11362 + }, + { + "epoch": 0.7124584613455389, + "grad_norm": 3.4336447715759277, + "learning_rate": 4.032155222638276e-06, + "loss": 1.0627, + "step": 11363 + }, + { + "epoch": 0.7125211612013292, + "grad_norm": 3.5771560668945312, + "learning_rate": 4.0305258570883336e-06, + "loss": 1.0965, + "step": 11364 + }, + { + "epoch": 0.7125838610571196, + "grad_norm": 3.3352303504943848, + "learning_rate": 4.028896737719764e-06, + "loss": 1.0582, + "step": 11365 + }, + { + "epoch": 0.7126465609129099, + "grad_norm": 3.3789467811584473, + "learning_rate": 4.027267864599754e-06, + "loss": 1.0138, + "step": 11366 + }, + { + "epoch": 0.7127092607687002, + "grad_norm": 3.2444121837615967, + "learning_rate": 4.025639237795475e-06, + "loss": 0.9736, + "step": 11367 + }, + { + "epoch": 0.7127719606244906, + "grad_norm": 3.429859161376953, + "learning_rate": 4.024010857374098e-06, + "loss": 1.1508, + "step": 11368 + }, + { + "epoch": 0.7128346604802809, + "grad_norm": 2.912170886993408, + "learning_rate": 4.022382723402773e-06, + "loss": 1.0669, + "step": 11369 + }, + { + "epoch": 0.7128973603360712, + "grad_norm": 3.398771047592163, + "learning_rate": 4.020754835948647e-06, + "loss": 1.1154, + "step": 11370 + }, + { + "epoch": 0.7129600601918615, + "grad_norm": 3.3761849403381348, + "learning_rate": 4.019127195078848e-06, + "loss": 1.0986, + "step": 11371 + }, + { + "epoch": 0.7130227600476519, + "grad_norm": 3.8168327808380127, + "learning_rate": 4.0174998008605094e-06, + "loss": 0.9902, + "step": 11372 + }, + { + "epoch": 0.7130854599034422, + "grad_norm": 3.196810007095337, + "learning_rate": 4.015872653360739e-06, + "loss": 1.266, + "step": 11373 + }, + { + "epoch": 0.7131481597592325, + "grad_norm": 3.2957820892333984, + "learning_rate": 4.01424575264664e-06, + "loss": 1.1252, + "step": 11374 + }, + { + "epoch": 0.7132108596150228, + "grad_norm": 3.146878957748413, + "learning_rate": 4.01261909878531e-06, + "loss": 0.9495, + "step": 11375 + }, + { + "epoch": 0.7132735594708132, + "grad_norm": 3.3303589820861816, + "learning_rate": 4.010992691843829e-06, + "loss": 0.9314, + "step": 11376 + }, + { + "epoch": 0.7133362593266035, + "grad_norm": 3.582963705062866, + "learning_rate": 4.009366531889273e-06, + "loss": 1.0632, + "step": 11377 + }, + { + "epoch": 0.7133989591823939, + "grad_norm": 3.281006336212158, + "learning_rate": 4.007740618988697e-06, + "loss": 1.2543, + "step": 11378 + }, + { + "epoch": 0.7134616590381843, + "grad_norm": 3.489889144897461, + "learning_rate": 4.006114953209165e-06, + "loss": 1.2966, + "step": 11379 + }, + { + "epoch": 0.7135243588939746, + "grad_norm": 4.0092010498046875, + "learning_rate": 4.004489534617712e-06, + "loss": 1.0834, + "step": 11380 + }, + { + "epoch": 0.7135870587497649, + "grad_norm": 3.2580771446228027, + "learning_rate": 4.0028643632813715e-06, + "loss": 1.0554, + "step": 11381 + }, + { + "epoch": 0.7136497586055552, + "grad_norm": 3.545743703842163, + "learning_rate": 4.001239439267168e-06, + "loss": 1.2064, + "step": 11382 + }, + { + "epoch": 0.7137124584613456, + "grad_norm": 3.690387010574341, + "learning_rate": 3.999614762642113e-06, + "loss": 0.9061, + "step": 11383 + }, + { + "epoch": 0.7137751583171359, + "grad_norm": 3.2530317306518555, + "learning_rate": 3.997990333473207e-06, + "loss": 1.0879, + "step": 11384 + }, + { + "epoch": 0.7138378581729262, + "grad_norm": 3.1560394763946533, + "learning_rate": 3.996366151827438e-06, + "loss": 1.0278, + "step": 11385 + }, + { + "epoch": 0.7139005580287165, + "grad_norm": 3.6962733268737793, + "learning_rate": 3.994742217771795e-06, + "loss": 1.0368, + "step": 11386 + }, + { + "epoch": 0.7139632578845069, + "grad_norm": 2.949867010116577, + "learning_rate": 3.9931185313732446e-06, + "loss": 1.1187, + "step": 11387 + }, + { + "epoch": 0.7140259577402972, + "grad_norm": 3.3112359046936035, + "learning_rate": 3.991495092698748e-06, + "loss": 1.169, + "step": 11388 + }, + { + "epoch": 0.7140886575960875, + "grad_norm": 3.3019800186157227, + "learning_rate": 3.989871901815254e-06, + "loss": 1.1493, + "step": 11389 + }, + { + "epoch": 0.7141513574518779, + "grad_norm": 3.4230899810791016, + "learning_rate": 3.988248958789708e-06, + "loss": 1.0238, + "step": 11390 + }, + { + "epoch": 0.7142140573076682, + "grad_norm": 2.974745512008667, + "learning_rate": 3.986626263689038e-06, + "loss": 1.134, + "step": 11391 + }, + { + "epoch": 0.7142767571634585, + "grad_norm": 3.2395777702331543, + "learning_rate": 3.985003816580159e-06, + "loss": 1.1102, + "step": 11392 + }, + { + "epoch": 0.7143394570192488, + "grad_norm": 3.3101046085357666, + "learning_rate": 3.98338161752999e-06, + "loss": 1.0898, + "step": 11393 + }, + { + "epoch": 0.7144021568750392, + "grad_norm": 3.1442182064056396, + "learning_rate": 3.981759666605425e-06, + "loss": 1.0279, + "step": 11394 + }, + { + "epoch": 0.7144648567308295, + "grad_norm": 3.3645694255828857, + "learning_rate": 3.980137963873354e-06, + "loss": 0.8776, + "step": 11395 + }, + { + "epoch": 0.7145275565866198, + "grad_norm": 3.0026397705078125, + "learning_rate": 3.978516509400653e-06, + "loss": 1.1256, + "step": 11396 + }, + { + "epoch": 0.7145902564424101, + "grad_norm": 3.0921332836151123, + "learning_rate": 3.9768953032541975e-06, + "loss": 1.1643, + "step": 11397 + }, + { + "epoch": 0.7146529562982005, + "grad_norm": 3.1945688724517822, + "learning_rate": 3.975274345500843e-06, + "loss": 1.1065, + "step": 11398 + }, + { + "epoch": 0.7147156561539908, + "grad_norm": 3.2918765544891357, + "learning_rate": 3.973653636207437e-06, + "loss": 1.2144, + "step": 11399 + }, + { + "epoch": 0.7147783560097811, + "grad_norm": 3.162997245788574, + "learning_rate": 3.972033175440816e-06, + "loss": 0.952, + "step": 11400 + }, + { + "epoch": 0.7148410558655716, + "grad_norm": 2.779254674911499, + "learning_rate": 3.970412963267812e-06, + "loss": 1.2342, + "step": 11401 + }, + { + "epoch": 0.7149037557213619, + "grad_norm": 3.676471471786499, + "learning_rate": 3.968792999755242e-06, + "loss": 1.0117, + "step": 11402 + }, + { + "epoch": 0.7149664555771522, + "grad_norm": 3.1965765953063965, + "learning_rate": 3.967173284969909e-06, + "loss": 1.1714, + "step": 11403 + }, + { + "epoch": 0.7150291554329425, + "grad_norm": 3.6174168586730957, + "learning_rate": 3.965553818978615e-06, + "loss": 1.0083, + "step": 11404 + }, + { + "epoch": 0.7150918552887329, + "grad_norm": 3.6702017784118652, + "learning_rate": 3.963934601848145e-06, + "loss": 1.0572, + "step": 11405 + }, + { + "epoch": 0.7151545551445232, + "grad_norm": 2.943572759628296, + "learning_rate": 3.962315633645277e-06, + "loss": 1.0646, + "step": 11406 + }, + { + "epoch": 0.7152172550003135, + "grad_norm": 3.662273645401001, + "learning_rate": 3.960696914436771e-06, + "loss": 1.2285, + "step": 11407 + }, + { + "epoch": 0.7152799548561039, + "grad_norm": 3.1405467987060547, + "learning_rate": 3.959078444289392e-06, + "loss": 0.9929, + "step": 11408 + }, + { + "epoch": 0.7153426547118942, + "grad_norm": 3.2555510997772217, + "learning_rate": 3.957460223269883e-06, + "loss": 1.0926, + "step": 11409 + }, + { + "epoch": 0.7154053545676845, + "grad_norm": 3.3993430137634277, + "learning_rate": 3.955842251444978e-06, + "loss": 1.0579, + "step": 11410 + }, + { + "epoch": 0.7154680544234748, + "grad_norm": 3.171781063079834, + "learning_rate": 3.954224528881397e-06, + "loss": 1.1498, + "step": 11411 + }, + { + "epoch": 0.7155307542792652, + "grad_norm": 3.1221799850463867, + "learning_rate": 3.952607055645866e-06, + "loss": 1.1027, + "step": 11412 + }, + { + "epoch": 0.7155934541350555, + "grad_norm": 3.142354965209961, + "learning_rate": 3.950989831805083e-06, + "loss": 1.0507, + "step": 11413 + }, + { + "epoch": 0.7156561539908458, + "grad_norm": 3.6234304904937744, + "learning_rate": 3.949372857425741e-06, + "loss": 0.9359, + "step": 11414 + }, + { + "epoch": 0.7157188538466361, + "grad_norm": 3.038036823272705, + "learning_rate": 3.947756132574529e-06, + "loss": 1.1178, + "step": 11415 + }, + { + "epoch": 0.7157815537024265, + "grad_norm": 3.6426918506622314, + "learning_rate": 3.94613965731812e-06, + "loss": 1.1246, + "step": 11416 + }, + { + "epoch": 0.7158442535582168, + "grad_norm": 3.1648316383361816, + "learning_rate": 3.944523431723174e-06, + "loss": 1.1355, + "step": 11417 + }, + { + "epoch": 0.7159069534140071, + "grad_norm": 3.5827412605285645, + "learning_rate": 3.942907455856344e-06, + "loss": 0.9193, + "step": 11418 + }, + { + "epoch": 0.7159696532697974, + "grad_norm": 3.480319023132324, + "learning_rate": 3.941291729784278e-06, + "loss": 1.1221, + "step": 11419 + }, + { + "epoch": 0.7160323531255878, + "grad_norm": 3.2961196899414062, + "learning_rate": 3.9396762535736055e-06, + "loss": 1.1061, + "step": 11420 + }, + { + "epoch": 0.7160950529813781, + "grad_norm": 3.4143636226654053, + "learning_rate": 3.938061027290949e-06, + "loss": 0.9892, + "step": 11421 + }, + { + "epoch": 0.7161577528371684, + "grad_norm": 3.2647109031677246, + "learning_rate": 3.936446051002917e-06, + "loss": 1.0886, + "step": 11422 + }, + { + "epoch": 0.7162204526929588, + "grad_norm": 3.4541242122650146, + "learning_rate": 3.934831324776118e-06, + "loss": 1.1804, + "step": 11423 + }, + { + "epoch": 0.7162831525487492, + "grad_norm": 3.435065269470215, + "learning_rate": 3.93321684867714e-06, + "loss": 1.1509, + "step": 11424 + }, + { + "epoch": 0.7163458524045395, + "grad_norm": 3.479624032974243, + "learning_rate": 3.931602622772561e-06, + "loss": 1.1609, + "step": 11425 + }, + { + "epoch": 0.7164085522603298, + "grad_norm": 3.1512999534606934, + "learning_rate": 3.929988647128959e-06, + "loss": 1.1206, + "step": 11426 + }, + { + "epoch": 0.7164712521161202, + "grad_norm": 3.358109712600708, + "learning_rate": 3.9283749218128885e-06, + "loss": 1.1606, + "step": 11427 + }, + { + "epoch": 0.7165339519719105, + "grad_norm": 3.485276937484741, + "learning_rate": 3.926761446890902e-06, + "loss": 1.0162, + "step": 11428 + }, + { + "epoch": 0.7165966518277008, + "grad_norm": 3.5213623046875, + "learning_rate": 3.925148222429536e-06, + "loss": 1.0955, + "step": 11429 + }, + { + "epoch": 0.7166593516834912, + "grad_norm": 3.4800784587860107, + "learning_rate": 3.923535248495325e-06, + "loss": 1.0322, + "step": 11430 + }, + { + "epoch": 0.7167220515392815, + "grad_norm": 3.0597949028015137, + "learning_rate": 3.921922525154787e-06, + "loss": 1.2501, + "step": 11431 + }, + { + "epoch": 0.7167847513950718, + "grad_norm": 3.3730432987213135, + "learning_rate": 3.920310052474428e-06, + "loss": 1.0276, + "step": 11432 + }, + { + "epoch": 0.7168474512508621, + "grad_norm": 3.158473014831543, + "learning_rate": 3.918697830520746e-06, + "loss": 0.9782, + "step": 11433 + }, + { + "epoch": 0.7169101511066525, + "grad_norm": 3.5004005432128906, + "learning_rate": 3.917085859360234e-06, + "loss": 0.9557, + "step": 11434 + }, + { + "epoch": 0.7169728509624428, + "grad_norm": 3.479423999786377, + "learning_rate": 3.9154741390593655e-06, + "loss": 1.2156, + "step": 11435 + }, + { + "epoch": 0.7170355508182331, + "grad_norm": 3.317549228668213, + "learning_rate": 3.9138626696846075e-06, + "loss": 1.1217, + "step": 11436 + }, + { + "epoch": 0.7170982506740234, + "grad_norm": 3.7440333366394043, + "learning_rate": 3.912251451302421e-06, + "loss": 0.9144, + "step": 11437 + }, + { + "epoch": 0.7171609505298138, + "grad_norm": 3.296462297439575, + "learning_rate": 3.910640483979251e-06, + "loss": 1.2039, + "step": 11438 + }, + { + "epoch": 0.7172236503856041, + "grad_norm": 2.90974760055542, + "learning_rate": 3.909029767781534e-06, + "loss": 1.0171, + "step": 11439 + }, + { + "epoch": 0.7172863502413944, + "grad_norm": 3.520224094390869, + "learning_rate": 3.90741930277569e-06, + "loss": 1.0889, + "step": 11440 + }, + { + "epoch": 0.7173490500971847, + "grad_norm": 3.7413623332977295, + "learning_rate": 3.905809089028145e-06, + "loss": 1.0603, + "step": 11441 + }, + { + "epoch": 0.7174117499529751, + "grad_norm": 3.348210573196411, + "learning_rate": 3.904199126605299e-06, + "loss": 1.1737, + "step": 11442 + }, + { + "epoch": 0.7174744498087654, + "grad_norm": 3.680166006088257, + "learning_rate": 3.902589415573545e-06, + "loss": 1.1685, + "step": 11443 + }, + { + "epoch": 0.7175371496645557, + "grad_norm": 3.3359923362731934, + "learning_rate": 3.900979955999271e-06, + "loss": 1.1091, + "step": 11444 + }, + { + "epoch": 0.7175998495203461, + "grad_norm": 2.9783852100372314, + "learning_rate": 3.8993707479488516e-06, + "loss": 1.0926, + "step": 11445 + }, + { + "epoch": 0.7176625493761364, + "grad_norm": 3.7383549213409424, + "learning_rate": 3.8977617914886476e-06, + "loss": 0.9307, + "step": 11446 + }, + { + "epoch": 0.7177252492319268, + "grad_norm": 2.906526803970337, + "learning_rate": 3.896153086685012e-06, + "loss": 1.0074, + "step": 11447 + }, + { + "epoch": 0.7177879490877171, + "grad_norm": 3.833416223526001, + "learning_rate": 3.894544633604291e-06, + "loss": 1.2864, + "step": 11448 + }, + { + "epoch": 0.7178506489435075, + "grad_norm": 3.159217596054077, + "learning_rate": 3.892936432312818e-06, + "loss": 0.903, + "step": 11449 + }, + { + "epoch": 0.7179133487992978, + "grad_norm": 3.5108768939971924, + "learning_rate": 3.891328482876913e-06, + "loss": 1.2, + "step": 11450 + }, + { + "epoch": 0.7179760486550881, + "grad_norm": 3.0117151737213135, + "learning_rate": 3.889720785362884e-06, + "loss": 1.0982, + "step": 11451 + }, + { + "epoch": 0.7180387485108785, + "grad_norm": 3.005559206008911, + "learning_rate": 3.88811333983704e-06, + "loss": 1.1261, + "step": 11452 + }, + { + "epoch": 0.7181014483666688, + "grad_norm": 3.3972370624542236, + "learning_rate": 3.886506146365668e-06, + "loss": 1.0789, + "step": 11453 + }, + { + "epoch": 0.7181641482224591, + "grad_norm": 3.1713767051696777, + "learning_rate": 3.884899205015048e-06, + "loss": 1.1757, + "step": 11454 + }, + { + "epoch": 0.7182268480782494, + "grad_norm": 3.559299945831299, + "learning_rate": 3.883292515851454e-06, + "loss": 0.9313, + "step": 11455 + }, + { + "epoch": 0.7182895479340398, + "grad_norm": 3.288698434829712, + "learning_rate": 3.8816860789411435e-06, + "loss": 1.0684, + "step": 11456 + }, + { + "epoch": 0.7183522477898301, + "grad_norm": 3.497938871383667, + "learning_rate": 3.880079894350367e-06, + "loss": 1.0041, + "step": 11457 + }, + { + "epoch": 0.7184149476456204, + "grad_norm": 3.3071646690368652, + "learning_rate": 3.878473962145359e-06, + "loss": 1.0886, + "step": 11458 + }, + { + "epoch": 0.7184776475014107, + "grad_norm": 3.3837451934814453, + "learning_rate": 3.876868282392357e-06, + "loss": 1.0694, + "step": 11459 + }, + { + "epoch": 0.7185403473572011, + "grad_norm": 3.422330141067505, + "learning_rate": 3.875262855157573e-06, + "loss": 1.1761, + "step": 11460 + }, + { + "epoch": 0.7186030472129914, + "grad_norm": 3.1988205909729004, + "learning_rate": 3.8736576805072165e-06, + "loss": 1.0889, + "step": 11461 + }, + { + "epoch": 0.7186657470687817, + "grad_norm": 3.3893353939056396, + "learning_rate": 3.872052758507482e-06, + "loss": 1.0307, + "step": 11462 + }, + { + "epoch": 0.718728446924572, + "grad_norm": 3.24700927734375, + "learning_rate": 3.8704480892245625e-06, + "loss": 1.0086, + "step": 11463 + }, + { + "epoch": 0.7187911467803624, + "grad_norm": 3.469346523284912, + "learning_rate": 3.86884367272463e-06, + "loss": 0.9916, + "step": 11464 + }, + { + "epoch": 0.7188538466361527, + "grad_norm": 3.089742422103882, + "learning_rate": 3.867239509073851e-06, + "loss": 1.0479, + "step": 11465 + }, + { + "epoch": 0.718916546491943, + "grad_norm": 3.2197699546813965, + "learning_rate": 3.865635598338385e-06, + "loss": 1.0626, + "step": 11466 + }, + { + "epoch": 0.7189792463477334, + "grad_norm": 3.0866971015930176, + "learning_rate": 3.864031940584374e-06, + "loss": 1.0722, + "step": 11467 + }, + { + "epoch": 0.7190419462035237, + "grad_norm": 3.26312255859375, + "learning_rate": 3.862428535877954e-06, + "loss": 1.0973, + "step": 11468 + }, + { + "epoch": 0.719104646059314, + "grad_norm": 3.357131242752075, + "learning_rate": 3.860825384285247e-06, + "loss": 0.9451, + "step": 11469 + }, + { + "epoch": 0.7191673459151045, + "grad_norm": 3.1844661235809326, + "learning_rate": 3.859222485872372e-06, + "loss": 1.085, + "step": 11470 + }, + { + "epoch": 0.7192300457708948, + "grad_norm": 3.2020652294158936, + "learning_rate": 3.85761984070543e-06, + "loss": 1.0716, + "step": 11471 + }, + { + "epoch": 0.7192927456266851, + "grad_norm": 3.5388810634613037, + "learning_rate": 3.856017448850514e-06, + "loss": 1.0874, + "step": 11472 + }, + { + "epoch": 0.7193554454824754, + "grad_norm": 3.287327289581299, + "learning_rate": 3.854415310373702e-06, + "loss": 1.0901, + "step": 11473 + }, + { + "epoch": 0.7194181453382658, + "grad_norm": 3.3328781127929688, + "learning_rate": 3.852813425341076e-06, + "loss": 1.1172, + "step": 11474 + }, + { + "epoch": 0.7194808451940561, + "grad_norm": 2.8877620697021484, + "learning_rate": 3.8512117938186925e-06, + "loss": 1.1848, + "step": 11475 + }, + { + "epoch": 0.7195435450498464, + "grad_norm": 3.2317399978637695, + "learning_rate": 3.8496104158726e-06, + "loss": 1.1685, + "step": 11476 + }, + { + "epoch": 0.7196062449056367, + "grad_norm": 3.722071647644043, + "learning_rate": 3.848009291568845e-06, + "loss": 1.0473, + "step": 11477 + }, + { + "epoch": 0.7196689447614271, + "grad_norm": 3.3608791828155518, + "learning_rate": 3.846408420973456e-06, + "loss": 1.144, + "step": 11478 + }, + { + "epoch": 0.7197316446172174, + "grad_norm": 3.338381767272949, + "learning_rate": 3.8448078041524515e-06, + "loss": 1.304, + "step": 11479 + }, + { + "epoch": 0.7197943444730077, + "grad_norm": 3.336751937866211, + "learning_rate": 3.84320744117184e-06, + "loss": 1.1406, + "step": 11480 + }, + { + "epoch": 0.719857044328798, + "grad_norm": 3.3101322650909424, + "learning_rate": 3.841607332097625e-06, + "loss": 1.2868, + "step": 11481 + }, + { + "epoch": 0.7199197441845884, + "grad_norm": 3.028918504714966, + "learning_rate": 3.840007476995793e-06, + "loss": 1.0299, + "step": 11482 + }, + { + "epoch": 0.7199824440403787, + "grad_norm": 3.1983864307403564, + "learning_rate": 3.838407875932322e-06, + "loss": 1.1515, + "step": 11483 + }, + { + "epoch": 0.720045143896169, + "grad_norm": 3.1277589797973633, + "learning_rate": 3.836808528973175e-06, + "loss": 1.1453, + "step": 11484 + }, + { + "epoch": 0.7201078437519594, + "grad_norm": 3.077873468399048, + "learning_rate": 3.835209436184317e-06, + "loss": 1.0083, + "step": 11485 + }, + { + "epoch": 0.7201705436077497, + "grad_norm": 3.2445313930511475, + "learning_rate": 3.833610597631692e-06, + "loss": 1.0008, + "step": 11486 + }, + { + "epoch": 0.72023324346354, + "grad_norm": 4.01209020614624, + "learning_rate": 3.832012013381231e-06, + "loss": 0.8218, + "step": 11487 + }, + { + "epoch": 0.7202959433193303, + "grad_norm": 3.297175884246826, + "learning_rate": 3.830413683498867e-06, + "loss": 1.1856, + "step": 11488 + }, + { + "epoch": 0.7203586431751207, + "grad_norm": 3.248575448989868, + "learning_rate": 3.828815608050512e-06, + "loss": 1.0605, + "step": 11489 + }, + { + "epoch": 0.720421343030911, + "grad_norm": 3.445122003555298, + "learning_rate": 3.827217787102072e-06, + "loss": 0.9846, + "step": 11490 + }, + { + "epoch": 0.7204840428867013, + "grad_norm": 3.3949618339538574, + "learning_rate": 3.825620220719436e-06, + "loss": 1.2372, + "step": 11491 + }, + { + "epoch": 0.7205467427424916, + "grad_norm": 3.934511661529541, + "learning_rate": 3.824022908968496e-06, + "loss": 1.1362, + "step": 11492 + }, + { + "epoch": 0.720609442598282, + "grad_norm": 3.639019250869751, + "learning_rate": 3.822425851915119e-06, + "loss": 1.0257, + "step": 11493 + }, + { + "epoch": 0.7206721424540724, + "grad_norm": 3.310154438018799, + "learning_rate": 3.820829049625168e-06, + "loss": 1.1526, + "step": 11494 + }, + { + "epoch": 0.7207348423098627, + "grad_norm": 3.4171698093414307, + "learning_rate": 3.819232502164499e-06, + "loss": 1.0256, + "step": 11495 + }, + { + "epoch": 0.7207975421656531, + "grad_norm": 3.2639365196228027, + "learning_rate": 3.817636209598953e-06, + "loss": 1.0479, + "step": 11496 + }, + { + "epoch": 0.7208602420214434, + "grad_norm": 3.0828795433044434, + "learning_rate": 3.816040171994359e-06, + "loss": 1.0957, + "step": 11497 + }, + { + "epoch": 0.7209229418772337, + "grad_norm": 3.5830631256103516, + "learning_rate": 3.8144443894165338e-06, + "loss": 1.0153, + "step": 11498 + }, + { + "epoch": 0.720985641733024, + "grad_norm": 3.093336820602417, + "learning_rate": 3.8128488619312965e-06, + "loss": 1.1847, + "step": 11499 + }, + { + "epoch": 0.7210483415888144, + "grad_norm": 3.5494322776794434, + "learning_rate": 3.8112535896044422e-06, + "loss": 1.1337, + "step": 11500 + }, + { + "epoch": 0.7211110414446047, + "grad_norm": 3.244904041290283, + "learning_rate": 3.809658572501761e-06, + "loss": 1.0774, + "step": 11501 + }, + { + "epoch": 0.721173741300395, + "grad_norm": 3.4325711727142334, + "learning_rate": 3.8080638106890255e-06, + "loss": 1.0816, + "step": 11502 + }, + { + "epoch": 0.7212364411561853, + "grad_norm": 3.38761830329895, + "learning_rate": 3.8064693042320133e-06, + "loss": 1.0421, + "step": 11503 + }, + { + "epoch": 0.7212991410119757, + "grad_norm": 2.983729124069214, + "learning_rate": 3.804875053196477e-06, + "loss": 1.0771, + "step": 11504 + }, + { + "epoch": 0.721361840867766, + "grad_norm": 3.104203939437866, + "learning_rate": 3.8032810576481618e-06, + "loss": 1.1979, + "step": 11505 + }, + { + "epoch": 0.7214245407235563, + "grad_norm": 3.391045570373535, + "learning_rate": 3.801687317652809e-06, + "loss": 1.0403, + "step": 11506 + }, + { + "epoch": 0.7214872405793467, + "grad_norm": 3.1510226726531982, + "learning_rate": 3.800093833276142e-06, + "loss": 1.0773, + "step": 11507 + }, + { + "epoch": 0.721549940435137, + "grad_norm": 3.454298257827759, + "learning_rate": 3.7985006045838757e-06, + "loss": 1.1174, + "step": 11508 + }, + { + "epoch": 0.7216126402909273, + "grad_norm": 3.037601947784424, + "learning_rate": 3.796907631641712e-06, + "loss": 1.058, + "step": 11509 + }, + { + "epoch": 0.7216753401467176, + "grad_norm": 3.3605570793151855, + "learning_rate": 3.795314914515352e-06, + "loss": 0.9264, + "step": 11510 + }, + { + "epoch": 0.721738040002508, + "grad_norm": 3.4878053665161133, + "learning_rate": 3.7937224532704752e-06, + "loss": 1.1193, + "step": 11511 + }, + { + "epoch": 0.7218007398582983, + "grad_norm": 3.0400726795196533, + "learning_rate": 3.792130247972756e-06, + "loss": 1.1687, + "step": 11512 + }, + { + "epoch": 0.7218634397140886, + "grad_norm": 3.3937649726867676, + "learning_rate": 3.7905382986878536e-06, + "loss": 1.1238, + "step": 11513 + }, + { + "epoch": 0.7219261395698789, + "grad_norm": 3.1487627029418945, + "learning_rate": 3.7889466054814262e-06, + "loss": 1.0371, + "step": 11514 + }, + { + "epoch": 0.7219888394256693, + "grad_norm": 3.5638091564178467, + "learning_rate": 3.7873551684191114e-06, + "loss": 0.9864, + "step": 11515 + }, + { + "epoch": 0.7220515392814596, + "grad_norm": 3.1275761127471924, + "learning_rate": 3.7857639875665377e-06, + "loss": 1.2094, + "step": 11516 + }, + { + "epoch": 0.72211423913725, + "grad_norm": 3.6374149322509766, + "learning_rate": 3.7841730629893324e-06, + "loss": 1.1729, + "step": 11517 + }, + { + "epoch": 0.7221769389930404, + "grad_norm": 3.582750082015991, + "learning_rate": 3.7825823947531003e-06, + "loss": 1.1741, + "step": 11518 + }, + { + "epoch": 0.7222396388488307, + "grad_norm": 3.512932777404785, + "learning_rate": 3.780991982923443e-06, + "loss": 1.0134, + "step": 11519 + }, + { + "epoch": 0.722302338704621, + "grad_norm": 3.46111798286438, + "learning_rate": 3.779401827565944e-06, + "loss": 1.0023, + "step": 11520 + }, + { + "epoch": 0.7223650385604113, + "grad_norm": 3.697392702102661, + "learning_rate": 3.777811928746189e-06, + "loss": 1.0784, + "step": 11521 + }, + { + "epoch": 0.7224277384162017, + "grad_norm": 3.290395498275757, + "learning_rate": 3.7762222865297417e-06, + "loss": 1.1485, + "step": 11522 + }, + { + "epoch": 0.722490438271992, + "grad_norm": 3.0878570079803467, + "learning_rate": 3.774632900982158e-06, + "loss": 1.2635, + "step": 11523 + }, + { + "epoch": 0.7225531381277823, + "grad_norm": 3.09983491897583, + "learning_rate": 3.7730437721689827e-06, + "loss": 1.1798, + "step": 11524 + }, + { + "epoch": 0.7226158379835727, + "grad_norm": 3.2804949283599854, + "learning_rate": 3.7714549001557576e-06, + "loss": 1.0412, + "step": 11525 + }, + { + "epoch": 0.722678537839363, + "grad_norm": 3.1430511474609375, + "learning_rate": 3.769866285008005e-06, + "loss": 1.0906, + "step": 11526 + }, + { + "epoch": 0.7227412376951533, + "grad_norm": 3.624037742614746, + "learning_rate": 3.768277926791234e-06, + "loss": 1.0501, + "step": 11527 + }, + { + "epoch": 0.7228039375509436, + "grad_norm": 3.5486085414886475, + "learning_rate": 3.7666898255709583e-06, + "loss": 1.0591, + "step": 11528 + }, + { + "epoch": 0.722866637406734, + "grad_norm": 3.2371864318847656, + "learning_rate": 3.7651019814126656e-06, + "loss": 1.0671, + "step": 11529 + }, + { + "epoch": 0.7229293372625243, + "grad_norm": 3.0206267833709717, + "learning_rate": 3.76351439438184e-06, + "loss": 0.9982, + "step": 11530 + }, + { + "epoch": 0.7229920371183146, + "grad_norm": 3.391732931137085, + "learning_rate": 3.7619270645439487e-06, + "loss": 1.284, + "step": 11531 + }, + { + "epoch": 0.7230547369741049, + "grad_norm": 3.4300577640533447, + "learning_rate": 3.7603399919644623e-06, + "loss": 1.227, + "step": 11532 + }, + { + "epoch": 0.7231174368298953, + "grad_norm": 3.128645181655884, + "learning_rate": 3.758753176708827e-06, + "loss": 0.965, + "step": 11533 + }, + { + "epoch": 0.7231801366856856, + "grad_norm": 3.7219908237457275, + "learning_rate": 3.7571666188424827e-06, + "loss": 1.0798, + "step": 11534 + }, + { + "epoch": 0.7232428365414759, + "grad_norm": 3.1064887046813965, + "learning_rate": 3.755580318430856e-06, + "loss": 1.0958, + "step": 11535 + }, + { + "epoch": 0.7233055363972662, + "grad_norm": 3.033088445663452, + "learning_rate": 3.7539942755393733e-06, + "loss": 1.1224, + "step": 11536 + }, + { + "epoch": 0.7233682362530566, + "grad_norm": 2.9718704223632812, + "learning_rate": 3.75240849023344e-06, + "loss": 0.9774, + "step": 11537 + }, + { + "epoch": 0.7234309361088469, + "grad_norm": 2.9405293464660645, + "learning_rate": 3.7508229625784496e-06, + "loss": 1.1125, + "step": 11538 + }, + { + "epoch": 0.7234936359646372, + "grad_norm": 3.278895854949951, + "learning_rate": 3.7492376926397966e-06, + "loss": 1.0414, + "step": 11539 + }, + { + "epoch": 0.7235563358204277, + "grad_norm": 3.154824733734131, + "learning_rate": 3.747652680482854e-06, + "loss": 1.302, + "step": 11540 + }, + { + "epoch": 0.723619035676218, + "grad_norm": 3.0690033435821533, + "learning_rate": 3.746067926172988e-06, + "loss": 1.1229, + "step": 11541 + }, + { + "epoch": 0.7236817355320083, + "grad_norm": 3.475709915161133, + "learning_rate": 3.7444834297755504e-06, + "loss": 1.2803, + "step": 11542 + }, + { + "epoch": 0.7237444353877986, + "grad_norm": 3.313608169555664, + "learning_rate": 3.742899191355893e-06, + "loss": 0.9667, + "step": 11543 + }, + { + "epoch": 0.723807135243589, + "grad_norm": 3.068572998046875, + "learning_rate": 3.741315210979346e-06, + "loss": 1.0873, + "step": 11544 + }, + { + "epoch": 0.7238698350993793, + "grad_norm": 3.0972015857696533, + "learning_rate": 3.739731488711229e-06, + "loss": 1.0376, + "step": 11545 + }, + { + "epoch": 0.7239325349551696, + "grad_norm": 3.092597007751465, + "learning_rate": 3.738148024616863e-06, + "loss": 1.185, + "step": 11546 + }, + { + "epoch": 0.72399523481096, + "grad_norm": 3.3566062450408936, + "learning_rate": 3.7365648187615457e-06, + "loss": 1.1937, + "step": 11547 + }, + { + "epoch": 0.7240579346667503, + "grad_norm": 3.397242307662964, + "learning_rate": 3.734981871210569e-06, + "loss": 1.0431, + "step": 11548 + }, + { + "epoch": 0.7241206345225406, + "grad_norm": 3.4995288848876953, + "learning_rate": 3.7333991820292094e-06, + "loss": 1.2058, + "step": 11549 + }, + { + "epoch": 0.7241833343783309, + "grad_norm": 3.728233814239502, + "learning_rate": 3.7318167512827454e-06, + "loss": 1.1968, + "step": 11550 + }, + { + "epoch": 0.7242460342341213, + "grad_norm": 3.478830099105835, + "learning_rate": 3.7302345790364326e-06, + "loss": 0.9652, + "step": 11551 + }, + { + "epoch": 0.7243087340899116, + "grad_norm": 2.9775941371917725, + "learning_rate": 3.728652665355519e-06, + "loss": 1.1031, + "step": 11552 + }, + { + "epoch": 0.7243714339457019, + "grad_norm": 3.560997247695923, + "learning_rate": 3.727071010305239e-06, + "loss": 1.1439, + "step": 11553 + }, + { + "epoch": 0.7244341338014922, + "grad_norm": 3.4589669704437256, + "learning_rate": 3.725489613950829e-06, + "loss": 1.0476, + "step": 11554 + }, + { + "epoch": 0.7244968336572826, + "grad_norm": 3.1901357173919678, + "learning_rate": 3.7239084763575016e-06, + "loss": 1.123, + "step": 11555 + }, + { + "epoch": 0.7245595335130729, + "grad_norm": 3.424791097640991, + "learning_rate": 3.722327597590458e-06, + "loss": 0.9114, + "step": 11556 + }, + { + "epoch": 0.7246222333688632, + "grad_norm": 3.133539915084839, + "learning_rate": 3.720746977714903e-06, + "loss": 1.0684, + "step": 11557 + }, + { + "epoch": 0.7246849332246535, + "grad_norm": 3.3145530223846436, + "learning_rate": 3.7191666167960173e-06, + "loss": 1.005, + "step": 11558 + }, + { + "epoch": 0.7247476330804439, + "grad_norm": 2.9725534915924072, + "learning_rate": 3.717586514898973e-06, + "loss": 0.9355, + "step": 11559 + }, + { + "epoch": 0.7248103329362342, + "grad_norm": 3.5283353328704834, + "learning_rate": 3.716006672088933e-06, + "loss": 1.1455, + "step": 11560 + }, + { + "epoch": 0.7248730327920245, + "grad_norm": 3.728111982345581, + "learning_rate": 3.714427088431055e-06, + "loss": 1.1755, + "step": 11561 + }, + { + "epoch": 0.7249357326478149, + "grad_norm": 3.6564130783081055, + "learning_rate": 3.7128477639904792e-06, + "loss": 1.0523, + "step": 11562 + }, + { + "epoch": 0.7249984325036053, + "grad_norm": 3.139901876449585, + "learning_rate": 3.7112686988323353e-06, + "loss": 1.0201, + "step": 11563 + }, + { + "epoch": 0.7250611323593956, + "grad_norm": 3.0078766345977783, + "learning_rate": 3.709689893021742e-06, + "loss": 1.162, + "step": 11564 + }, + { + "epoch": 0.725123832215186, + "grad_norm": 3.7337069511413574, + "learning_rate": 3.708111346623815e-06, + "loss": 1.1083, + "step": 11565 + }, + { + "epoch": 0.7251865320709763, + "grad_norm": 3.5813710689544678, + "learning_rate": 3.7065330597036508e-06, + "loss": 0.92, + "step": 11566 + }, + { + "epoch": 0.7252492319267666, + "grad_norm": 3.3535115718841553, + "learning_rate": 3.704955032326335e-06, + "loss": 1.2411, + "step": 11567 + }, + { + "epoch": 0.7253119317825569, + "grad_norm": 3.4240503311157227, + "learning_rate": 3.703377264556952e-06, + "loss": 1.0177, + "step": 11568 + }, + { + "epoch": 0.7253746316383473, + "grad_norm": 3.5361969470977783, + "learning_rate": 3.7017997564605655e-06, + "loss": 0.949, + "step": 11569 + }, + { + "epoch": 0.7254373314941376, + "grad_norm": 3.1435210704803467, + "learning_rate": 3.7002225081022323e-06, + "loss": 1.1634, + "step": 11570 + }, + { + "epoch": 0.7255000313499279, + "grad_norm": 3.442941665649414, + "learning_rate": 3.698645519546995e-06, + "loss": 1.19, + "step": 11571 + }, + { + "epoch": 0.7255627312057182, + "grad_norm": 2.9787139892578125, + "learning_rate": 3.6970687908598945e-06, + "loss": 1.0613, + "step": 11572 + }, + { + "epoch": 0.7256254310615086, + "grad_norm": 3.2342965602874756, + "learning_rate": 3.6954923221059523e-06, + "loss": 1.0555, + "step": 11573 + }, + { + "epoch": 0.7256881309172989, + "grad_norm": 3.827371120452881, + "learning_rate": 3.6939161133501823e-06, + "loss": 1.1269, + "step": 11574 + }, + { + "epoch": 0.7257508307730892, + "grad_norm": 3.396946907043457, + "learning_rate": 3.692340164657584e-06, + "loss": 1.0968, + "step": 11575 + }, + { + "epoch": 0.7258135306288795, + "grad_norm": 3.479729652404785, + "learning_rate": 3.690764476093156e-06, + "loss": 1.1341, + "step": 11576 + }, + { + "epoch": 0.7258762304846699, + "grad_norm": 2.9725821018218994, + "learning_rate": 3.689189047721876e-06, + "loss": 1.1845, + "step": 11577 + }, + { + "epoch": 0.7259389303404602, + "grad_norm": 3.5419063568115234, + "learning_rate": 3.6876138796087123e-06, + "loss": 1.0805, + "step": 11578 + }, + { + "epoch": 0.7260016301962505, + "grad_norm": 2.944732189178467, + "learning_rate": 3.6860389718186306e-06, + "loss": 1.1925, + "step": 11579 + }, + { + "epoch": 0.7260643300520409, + "grad_norm": 3.411102533340454, + "learning_rate": 3.684464324416578e-06, + "loss": 1.0361, + "step": 11580 + }, + { + "epoch": 0.7261270299078312, + "grad_norm": 3.2874882221221924, + "learning_rate": 3.6828899374674933e-06, + "loss": 1.1558, + "step": 11581 + }, + { + "epoch": 0.7261897297636215, + "grad_norm": 3.263732671737671, + "learning_rate": 3.681315811036299e-06, + "loss": 1.0604, + "step": 11582 + }, + { + "epoch": 0.7262524296194118, + "grad_norm": 3.4727394580841064, + "learning_rate": 3.6797419451879203e-06, + "loss": 1.0866, + "step": 11583 + }, + { + "epoch": 0.7263151294752022, + "grad_norm": 3.263425350189209, + "learning_rate": 3.678168339987259e-06, + "loss": 1.1589, + "step": 11584 + }, + { + "epoch": 0.7263778293309925, + "grad_norm": 3.1004743576049805, + "learning_rate": 3.676594995499212e-06, + "loss": 1.1013, + "step": 11585 + }, + { + "epoch": 0.7264405291867829, + "grad_norm": 3.045983076095581, + "learning_rate": 3.67502191178866e-06, + "loss": 1.1087, + "step": 11586 + }, + { + "epoch": 0.7265032290425733, + "grad_norm": 2.959566593170166, + "learning_rate": 3.6734490889204843e-06, + "loss": 1.1652, + "step": 11587 + }, + { + "epoch": 0.7265659288983636, + "grad_norm": 3.2873036861419678, + "learning_rate": 3.6718765269595445e-06, + "loss": 1.0862, + "step": 11588 + }, + { + "epoch": 0.7266286287541539, + "grad_norm": 3.512211561203003, + "learning_rate": 3.67030422597069e-06, + "loss": 1.1348, + "step": 11589 + }, + { + "epoch": 0.7266913286099442, + "grad_norm": 3.5734097957611084, + "learning_rate": 3.6687321860187688e-06, + "loss": 0.981, + "step": 11590 + }, + { + "epoch": 0.7267540284657346, + "grad_norm": 3.338268995285034, + "learning_rate": 3.6671604071686073e-06, + "loss": 1.0027, + "step": 11591 + }, + { + "epoch": 0.7268167283215249, + "grad_norm": 3.300306797027588, + "learning_rate": 3.6655888894850288e-06, + "loss": 0.9217, + "step": 11592 + }, + { + "epoch": 0.7268794281773152, + "grad_norm": 3.3905298709869385, + "learning_rate": 3.664017633032837e-06, + "loss": 1.0586, + "step": 11593 + }, + { + "epoch": 0.7269421280331055, + "grad_norm": 3.5032968521118164, + "learning_rate": 3.6624466378768387e-06, + "loss": 1.1751, + "step": 11594 + }, + { + "epoch": 0.7270048278888959, + "grad_norm": 3.6362802982330322, + "learning_rate": 3.6608759040818177e-06, + "loss": 1.035, + "step": 11595 + }, + { + "epoch": 0.7270675277446862, + "grad_norm": 3.2746317386627197, + "learning_rate": 3.659305431712551e-06, + "loss": 1.0908, + "step": 11596 + }, + { + "epoch": 0.7271302276004765, + "grad_norm": 3.1524462699890137, + "learning_rate": 3.6577352208338015e-06, + "loss": 1.2613, + "step": 11597 + }, + { + "epoch": 0.7271929274562668, + "grad_norm": 3.0681393146514893, + "learning_rate": 3.6561652715103324e-06, + "loss": 1.0247, + "step": 11598 + }, + { + "epoch": 0.7272556273120572, + "grad_norm": 3.1403110027313232, + "learning_rate": 3.6545955838068847e-06, + "loss": 1.047, + "step": 11599 + }, + { + "epoch": 0.7273183271678475, + "grad_norm": 3.287689447402954, + "learning_rate": 3.6530261577881886e-06, + "loss": 1.2712, + "step": 11600 + }, + { + "epoch": 0.7273810270236378, + "grad_norm": 3.4856154918670654, + "learning_rate": 3.6514569935189735e-06, + "loss": 1.0973, + "step": 11601 + }, + { + "epoch": 0.7274437268794282, + "grad_norm": 3.6830127239227295, + "learning_rate": 3.6498880910639502e-06, + "loss": 1.185, + "step": 11602 + }, + { + "epoch": 0.7275064267352185, + "grad_norm": 3.325284004211426, + "learning_rate": 3.6483194504878193e-06, + "loss": 0.977, + "step": 11603 + }, + { + "epoch": 0.7275691265910088, + "grad_norm": 3.884938955307007, + "learning_rate": 3.646751071855268e-06, + "loss": 0.9318, + "step": 11604 + }, + { + "epoch": 0.7276318264467991, + "grad_norm": 3.00994873046875, + "learning_rate": 3.6451829552309837e-06, + "loss": 1.0949, + "step": 11605 + }, + { + "epoch": 0.7276945263025895, + "grad_norm": 3.1171584129333496, + "learning_rate": 3.6436151006796307e-06, + "loss": 1.2165, + "step": 11606 + }, + { + "epoch": 0.7277572261583798, + "grad_norm": 3.6937947273254395, + "learning_rate": 3.642047508265866e-06, + "loss": 0.9431, + "step": 11607 + }, + { + "epoch": 0.7278199260141701, + "grad_norm": 3.1003100872039795, + "learning_rate": 3.640480178054343e-06, + "loss": 1.1184, + "step": 11608 + }, + { + "epoch": 0.7278826258699606, + "grad_norm": 3.4453749656677246, + "learning_rate": 3.6389131101096953e-06, + "loss": 1.1904, + "step": 11609 + }, + { + "epoch": 0.7279453257257509, + "grad_norm": 3.394791603088379, + "learning_rate": 3.6373463044965486e-06, + "loss": 1.0089, + "step": 11610 + }, + { + "epoch": 0.7280080255815412, + "grad_norm": 3.2205779552459717, + "learning_rate": 3.6357797612795155e-06, + "loss": 1.2079, + "step": 11611 + }, + { + "epoch": 0.7280707254373315, + "grad_norm": 2.9822473526000977, + "learning_rate": 3.6342134805232055e-06, + "loss": 1.1932, + "step": 11612 + }, + { + "epoch": 0.7281334252931219, + "grad_norm": 3.0534069538116455, + "learning_rate": 3.632647462292209e-06, + "loss": 1.1043, + "step": 11613 + }, + { + "epoch": 0.7281961251489122, + "grad_norm": 3.3351032733917236, + "learning_rate": 3.6310817066511106e-06, + "loss": 1.1533, + "step": 11614 + }, + { + "epoch": 0.7282588250047025, + "grad_norm": 3.1082606315612793, + "learning_rate": 3.629516213664477e-06, + "loss": 1.1846, + "step": 11615 + }, + { + "epoch": 0.7283215248604928, + "grad_norm": 3.4065887928009033, + "learning_rate": 3.627950983396875e-06, + "loss": 1.128, + "step": 11616 + }, + { + "epoch": 0.7283842247162832, + "grad_norm": 3.4408061504364014, + "learning_rate": 3.626386015912854e-06, + "loss": 1.1041, + "step": 11617 + }, + { + "epoch": 0.7284469245720735, + "grad_norm": 3.0635476112365723, + "learning_rate": 3.6248213112769483e-06, + "loss": 1.1578, + "step": 11618 + }, + { + "epoch": 0.7285096244278638, + "grad_norm": 3.2954628467559814, + "learning_rate": 3.6232568695536942e-06, + "loss": 1.0453, + "step": 11619 + }, + { + "epoch": 0.7285723242836541, + "grad_norm": 3.371856927871704, + "learning_rate": 3.6216926908076043e-06, + "loss": 1.0086, + "step": 11620 + }, + { + "epoch": 0.7286350241394445, + "grad_norm": 3.0271542072296143, + "learning_rate": 3.6201287751031866e-06, + "loss": 1.0328, + "step": 11621 + }, + { + "epoch": 0.7286977239952348, + "grad_norm": 3.2799577713012695, + "learning_rate": 3.6185651225049336e-06, + "loss": 0.7851, + "step": 11622 + }, + { + "epoch": 0.7287604238510251, + "grad_norm": 3.273390531539917, + "learning_rate": 3.6170017330773376e-06, + "loss": 1.086, + "step": 11623 + }, + { + "epoch": 0.7288231237068155, + "grad_norm": 3.3557240962982178, + "learning_rate": 3.615438606884868e-06, + "loss": 1.2276, + "step": 11624 + }, + { + "epoch": 0.7288858235626058, + "grad_norm": 3.3595824241638184, + "learning_rate": 3.613875743991989e-06, + "loss": 1.1516, + "step": 11625 + }, + { + "epoch": 0.7289485234183961, + "grad_norm": 3.46205735206604, + "learning_rate": 3.6123131444631508e-06, + "loss": 0.9935, + "step": 11626 + }, + { + "epoch": 0.7290112232741864, + "grad_norm": 3.461571216583252, + "learning_rate": 3.6107508083627995e-06, + "loss": 1.0419, + "step": 11627 + }, + { + "epoch": 0.7290739231299768, + "grad_norm": 3.405768394470215, + "learning_rate": 3.6091887357553658e-06, + "loss": 1.0373, + "step": 11628 + }, + { + "epoch": 0.7291366229857671, + "grad_norm": 3.267979145050049, + "learning_rate": 3.607626926705262e-06, + "loss": 0.931, + "step": 11629 + }, + { + "epoch": 0.7291993228415574, + "grad_norm": 3.763782262802124, + "learning_rate": 3.6060653812769075e-06, + "loss": 1.1169, + "step": 11630 + }, + { + "epoch": 0.7292620226973477, + "grad_norm": 3.35025691986084, + "learning_rate": 3.604504099534696e-06, + "loss": 1.073, + "step": 11631 + }, + { + "epoch": 0.7293247225531382, + "grad_norm": 3.2119743824005127, + "learning_rate": 3.6029430815430143e-06, + "loss": 1.1126, + "step": 11632 + }, + { + "epoch": 0.7293874224089285, + "grad_norm": 3.0997660160064697, + "learning_rate": 3.601382327366236e-06, + "loss": 1.2404, + "step": 11633 + }, + { + "epoch": 0.7294501222647188, + "grad_norm": 3.5176033973693848, + "learning_rate": 3.5998218370687343e-06, + "loss": 0.9494, + "step": 11634 + }, + { + "epoch": 0.7295128221205092, + "grad_norm": 3.420280694961548, + "learning_rate": 3.5982616107148583e-06, + "loss": 1.0051, + "step": 11635 + }, + { + "epoch": 0.7295755219762995, + "grad_norm": 3.5110690593719482, + "learning_rate": 3.5967016483689543e-06, + "loss": 1.156, + "step": 11636 + }, + { + "epoch": 0.7296382218320898, + "grad_norm": 3.2021167278289795, + "learning_rate": 3.595141950095349e-06, + "loss": 1.1876, + "step": 11637 + }, + { + "epoch": 0.7297009216878801, + "grad_norm": 3.326864004135132, + "learning_rate": 3.5935825159583737e-06, + "loss": 1.0967, + "step": 11638 + }, + { + "epoch": 0.7297636215436705, + "grad_norm": 3.2915005683898926, + "learning_rate": 3.5920233460223353e-06, + "loss": 1.1403, + "step": 11639 + }, + { + "epoch": 0.7298263213994608, + "grad_norm": 3.1947665214538574, + "learning_rate": 3.59046444035153e-06, + "loss": 1.1682, + "step": 11640 + }, + { + "epoch": 0.7298890212552511, + "grad_norm": 3.5786969661712646, + "learning_rate": 3.588905799010255e-06, + "loss": 1.0972, + "step": 11641 + }, + { + "epoch": 0.7299517211110415, + "grad_norm": 3.4307429790496826, + "learning_rate": 3.5873474220627844e-06, + "loss": 1.0371, + "step": 11642 + }, + { + "epoch": 0.7300144209668318, + "grad_norm": 3.197173833847046, + "learning_rate": 3.5857893095733864e-06, + "loss": 1.085, + "step": 11643 + }, + { + "epoch": 0.7300771208226221, + "grad_norm": 3.151176691055298, + "learning_rate": 3.5842314616063134e-06, + "loss": 0.9748, + "step": 11644 + }, + { + "epoch": 0.7301398206784124, + "grad_norm": 3.2145612239837646, + "learning_rate": 3.5826738782258197e-06, + "loss": 1.2163, + "step": 11645 + }, + { + "epoch": 0.7302025205342028, + "grad_norm": 3.282313823699951, + "learning_rate": 3.581116559496134e-06, + "loss": 1.2601, + "step": 11646 + }, + { + "epoch": 0.7302652203899931, + "grad_norm": 3.426302671432495, + "learning_rate": 3.5795595054814823e-06, + "loss": 1.1022, + "step": 11647 + }, + { + "epoch": 0.7303279202457834, + "grad_norm": 3.486957311630249, + "learning_rate": 3.578002716246074e-06, + "loss": 1.1918, + "step": 11648 + }, + { + "epoch": 0.7303906201015737, + "grad_norm": 3.46732234954834, + "learning_rate": 3.576446191854118e-06, + "loss": 1.0692, + "step": 11649 + }, + { + "epoch": 0.7304533199573641, + "grad_norm": 3.391576051712036, + "learning_rate": 3.5748899323698004e-06, + "loss": 1.0649, + "step": 11650 + }, + { + "epoch": 0.7305160198131544, + "grad_norm": 3.131805181503296, + "learning_rate": 3.5733339378572998e-06, + "loss": 1.126, + "step": 11651 + }, + { + "epoch": 0.7305787196689447, + "grad_norm": 3.8333168029785156, + "learning_rate": 3.5717782083807917e-06, + "loss": 1.0844, + "step": 11652 + }, + { + "epoch": 0.730641419524735, + "grad_norm": 3.2208473682403564, + "learning_rate": 3.5702227440044313e-06, + "loss": 1.1224, + "step": 11653 + }, + { + "epoch": 0.7307041193805254, + "grad_norm": 3.126032590866089, + "learning_rate": 3.568667544792365e-06, + "loss": 0.8363, + "step": 11654 + }, + { + "epoch": 0.7307668192363157, + "grad_norm": 3.2356762886047363, + "learning_rate": 3.5671126108087262e-06, + "loss": 1.1685, + "step": 11655 + }, + { + "epoch": 0.7308295190921061, + "grad_norm": 3.409518003463745, + "learning_rate": 3.565557942117649e-06, + "loss": 1.0574, + "step": 11656 + }, + { + "epoch": 0.7308922189478965, + "grad_norm": 3.21549391746521, + "learning_rate": 3.5640035387832416e-06, + "loss": 0.9797, + "step": 11657 + }, + { + "epoch": 0.7309549188036868, + "grad_norm": 3.433622121810913, + "learning_rate": 3.5624494008696077e-06, + "loss": 1.1298, + "step": 11658 + }, + { + "epoch": 0.7310176186594771, + "grad_norm": 3.435167074203491, + "learning_rate": 3.560895528440844e-06, + "loss": 1.007, + "step": 11659 + }, + { + "epoch": 0.7310803185152674, + "grad_norm": 3.633868932723999, + "learning_rate": 3.5593419215610305e-06, + "loss": 1.1974, + "step": 11660 + }, + { + "epoch": 0.7311430183710578, + "grad_norm": 3.495575189590454, + "learning_rate": 3.557788580294237e-06, + "loss": 0.998, + "step": 11661 + }, + { + "epoch": 0.7312057182268481, + "grad_norm": 3.241682529449463, + "learning_rate": 3.5562355047045205e-06, + "loss": 1.0701, + "step": 11662 + }, + { + "epoch": 0.7312684180826384, + "grad_norm": 3.4120118618011475, + "learning_rate": 3.5546826948559367e-06, + "loss": 1.0117, + "step": 11663 + }, + { + "epoch": 0.7313311179384288, + "grad_norm": 3.7410566806793213, + "learning_rate": 3.553130150812519e-06, + "loss": 1.0947, + "step": 11664 + }, + { + "epoch": 0.7313938177942191, + "grad_norm": 3.2501256465911865, + "learning_rate": 3.5515778726382967e-06, + "loss": 1.1076, + "step": 11665 + }, + { + "epoch": 0.7314565176500094, + "grad_norm": 3.5807411670684814, + "learning_rate": 3.5500258603972806e-06, + "loss": 1.1143, + "step": 11666 + }, + { + "epoch": 0.7315192175057997, + "grad_norm": 3.394132614135742, + "learning_rate": 3.5484741141534827e-06, + "loss": 1.1074, + "step": 11667 + }, + { + "epoch": 0.7315819173615901, + "grad_norm": 3.7612521648406982, + "learning_rate": 3.546922633970895e-06, + "loss": 0.9695, + "step": 11668 + }, + { + "epoch": 0.7316446172173804, + "grad_norm": 3.1784777641296387, + "learning_rate": 3.545371419913496e-06, + "loss": 1.1186, + "step": 11669 + }, + { + "epoch": 0.7317073170731707, + "grad_norm": 3.2503082752227783, + "learning_rate": 3.543820472045265e-06, + "loss": 0.9983, + "step": 11670 + }, + { + "epoch": 0.731770016928961, + "grad_norm": 3.4499764442443848, + "learning_rate": 3.5422697904301596e-06, + "loss": 1.1307, + "step": 11671 + }, + { + "epoch": 0.7318327167847514, + "grad_norm": 3.2092907428741455, + "learning_rate": 3.540719375132129e-06, + "loss": 0.9225, + "step": 11672 + }, + { + "epoch": 0.7318954166405417, + "grad_norm": 3.201636552810669, + "learning_rate": 3.539169226215111e-06, + "loss": 1.1511, + "step": 11673 + }, + { + "epoch": 0.731958116496332, + "grad_norm": 3.249345064163208, + "learning_rate": 3.5376193437430395e-06, + "loss": 0.9762, + "step": 11674 + }, + { + "epoch": 0.7320208163521223, + "grad_norm": 3.5727806091308594, + "learning_rate": 3.5360697277798297e-06, + "loss": 1.08, + "step": 11675 + }, + { + "epoch": 0.7320835162079127, + "grad_norm": 3.6993229389190674, + "learning_rate": 3.5345203783893842e-06, + "loss": 1.1221, + "step": 11676 + }, + { + "epoch": 0.732146216063703, + "grad_norm": 3.428680658340454, + "learning_rate": 3.5329712956355987e-06, + "loss": 1.1223, + "step": 11677 + }, + { + "epoch": 0.7322089159194933, + "grad_norm": 2.8554227352142334, + "learning_rate": 3.5314224795823628e-06, + "loss": 1.0968, + "step": 11678 + }, + { + "epoch": 0.7322716157752838, + "grad_norm": 3.371964931488037, + "learning_rate": 3.529873930293546e-06, + "loss": 1.2559, + "step": 11679 + }, + { + "epoch": 0.7323343156310741, + "grad_norm": 3.2467215061187744, + "learning_rate": 3.5283256478330075e-06, + "loss": 1.1519, + "step": 11680 + }, + { + "epoch": 0.7323970154868644, + "grad_norm": 3.077044725418091, + "learning_rate": 3.5267776322646063e-06, + "loss": 1.0783, + "step": 11681 + }, + { + "epoch": 0.7324597153426547, + "grad_norm": 3.269224166870117, + "learning_rate": 3.525229883652177e-06, + "loss": 1.0359, + "step": 11682 + }, + { + "epoch": 0.7325224151984451, + "grad_norm": 3.0156784057617188, + "learning_rate": 3.523682402059551e-06, + "loss": 1.1072, + "step": 11683 + }, + { + "epoch": 0.7325851150542354, + "grad_norm": 3.2801268100738525, + "learning_rate": 3.5221351875505405e-06, + "loss": 1.1973, + "step": 11684 + }, + { + "epoch": 0.7326478149100257, + "grad_norm": 3.27840256690979, + "learning_rate": 3.5205882401889624e-06, + "loss": 1.1478, + "step": 11685 + }, + { + "epoch": 0.7327105147658161, + "grad_norm": 3.4229893684387207, + "learning_rate": 3.5190415600386087e-06, + "loss": 1.0752, + "step": 11686 + }, + { + "epoch": 0.7327732146216064, + "grad_norm": 3.3749101161956787, + "learning_rate": 3.5174951471632623e-06, + "loss": 1.0786, + "step": 11687 + }, + { + "epoch": 0.7328359144773967, + "grad_norm": 3.2477452754974365, + "learning_rate": 3.515949001626697e-06, + "loss": 0.9015, + "step": 11688 + }, + { + "epoch": 0.732898614333187, + "grad_norm": 3.714179754257202, + "learning_rate": 3.514403123492681e-06, + "loss": 1.1278, + "step": 11689 + }, + { + "epoch": 0.7329613141889774, + "grad_norm": 3.7944061756134033, + "learning_rate": 3.5128575128249642e-06, + "loss": 0.8534, + "step": 11690 + }, + { + "epoch": 0.7330240140447677, + "grad_norm": 3.2488350868225098, + "learning_rate": 3.511312169687283e-06, + "loss": 1.1359, + "step": 11691 + }, + { + "epoch": 0.733086713900558, + "grad_norm": 3.4129772186279297, + "learning_rate": 3.509767094143375e-06, + "loss": 1.0798, + "step": 11692 + }, + { + "epoch": 0.7331494137563483, + "grad_norm": 3.3665950298309326, + "learning_rate": 3.508222286256955e-06, + "loss": 1.071, + "step": 11693 + }, + { + "epoch": 0.7332121136121387, + "grad_norm": 3.4772493839263916, + "learning_rate": 3.5066777460917313e-06, + "loss": 1.1205, + "step": 11694 + }, + { + "epoch": 0.733274813467929, + "grad_norm": 3.158102035522461, + "learning_rate": 3.5051334737113974e-06, + "loss": 1.0392, + "step": 11695 + }, + { + "epoch": 0.7333375133237193, + "grad_norm": 3.1669843196868896, + "learning_rate": 3.503589469179647e-06, + "loss": 1.0269, + "step": 11696 + }, + { + "epoch": 0.7334002131795097, + "grad_norm": 3.481637716293335, + "learning_rate": 3.5020457325601496e-06, + "loss": 1.0691, + "step": 11697 + }, + { + "epoch": 0.7334629130353, + "grad_norm": 3.6530919075012207, + "learning_rate": 3.500502263916571e-06, + "loss": 0.8753, + "step": 11698 + }, + { + "epoch": 0.7335256128910903, + "grad_norm": 3.634634017944336, + "learning_rate": 3.4989590633125583e-06, + "loss": 1.2216, + "step": 11699 + }, + { + "epoch": 0.7335883127468806, + "grad_norm": 3.6089744567871094, + "learning_rate": 3.4974161308117617e-06, + "loss": 1.0942, + "step": 11700 + }, + { + "epoch": 0.733651012602671, + "grad_norm": 3.2461118698120117, + "learning_rate": 3.4958734664778083e-06, + "loss": 0.9532, + "step": 11701 + }, + { + "epoch": 0.7337137124584614, + "grad_norm": 3.7887113094329834, + "learning_rate": 3.4943310703743127e-06, + "loss": 1.2399, + "step": 11702 + }, + { + "epoch": 0.7337764123142517, + "grad_norm": 3.405219793319702, + "learning_rate": 3.492788942564892e-06, + "loss": 1.0646, + "step": 11703 + }, + { + "epoch": 0.733839112170042, + "grad_norm": 3.310128927230835, + "learning_rate": 3.4912470831131394e-06, + "loss": 1.1141, + "step": 11704 + }, + { + "epoch": 0.7339018120258324, + "grad_norm": 3.4907233715057373, + "learning_rate": 3.489705492082641e-06, + "loss": 0.9294, + "step": 11705 + }, + { + "epoch": 0.7339645118816227, + "grad_norm": 3.105285882949829, + "learning_rate": 3.4881641695369696e-06, + "loss": 1.0728, + "step": 11706 + }, + { + "epoch": 0.734027211737413, + "grad_norm": 3.556633949279785, + "learning_rate": 3.4866231155396946e-06, + "loss": 1.0797, + "step": 11707 + }, + { + "epoch": 0.7340899115932034, + "grad_norm": 3.521479845046997, + "learning_rate": 3.485082330154368e-06, + "loss": 1.2109, + "step": 11708 + }, + { + "epoch": 0.7341526114489937, + "grad_norm": 3.5384459495544434, + "learning_rate": 3.4835418134445264e-06, + "loss": 0.9593, + "step": 11709 + }, + { + "epoch": 0.734215311304784, + "grad_norm": 3.510524272918701, + "learning_rate": 3.482001565473708e-06, + "loss": 1.1586, + "step": 11710 + }, + { + "epoch": 0.7342780111605743, + "grad_norm": 3.285618543624878, + "learning_rate": 3.4804615863054314e-06, + "loss": 1.1043, + "step": 11711 + }, + { + "epoch": 0.7343407110163647, + "grad_norm": 3.2342545986175537, + "learning_rate": 3.4789218760032018e-06, + "loss": 1.043, + "step": 11712 + }, + { + "epoch": 0.734403410872155, + "grad_norm": 3.672487497329712, + "learning_rate": 3.4773824346305165e-06, + "loss": 1.0035, + "step": 11713 + }, + { + "epoch": 0.7344661107279453, + "grad_norm": 3.441669225692749, + "learning_rate": 3.4758432622508677e-06, + "loss": 1.14, + "step": 11714 + }, + { + "epoch": 0.7345288105837356, + "grad_norm": 3.266787052154541, + "learning_rate": 3.474304358927727e-06, + "loss": 1.0091, + "step": 11715 + }, + { + "epoch": 0.734591510439526, + "grad_norm": 3.3708372116088867, + "learning_rate": 3.4727657247245607e-06, + "loss": 1.1655, + "step": 11716 + }, + { + "epoch": 0.7346542102953163, + "grad_norm": 3.144521951675415, + "learning_rate": 3.4712273597048163e-06, + "loss": 1.0276, + "step": 11717 + }, + { + "epoch": 0.7347169101511066, + "grad_norm": 3.496502161026001, + "learning_rate": 3.4696892639319447e-06, + "loss": 0.9423, + "step": 11718 + }, + { + "epoch": 0.734779610006897, + "grad_norm": 3.2796287536621094, + "learning_rate": 3.4681514374693736e-06, + "loss": 1.0493, + "step": 11719 + }, + { + "epoch": 0.7348423098626873, + "grad_norm": 3.044255256652832, + "learning_rate": 3.4666138803805185e-06, + "loss": 1.0124, + "step": 11720 + }, + { + "epoch": 0.7349050097184776, + "grad_norm": 3.5032854080200195, + "learning_rate": 3.4650765927287955e-06, + "loss": 1.0166, + "step": 11721 + }, + { + "epoch": 0.7349677095742679, + "grad_norm": 3.417423963546753, + "learning_rate": 3.463539574577599e-06, + "loss": 1.2221, + "step": 11722 + }, + { + "epoch": 0.7350304094300583, + "grad_norm": 3.303311347961426, + "learning_rate": 3.462002825990316e-06, + "loss": 1.0977, + "step": 11723 + }, + { + "epoch": 0.7350931092858486, + "grad_norm": 3.5551860332489014, + "learning_rate": 3.460466347030319e-06, + "loss": 0.8954, + "step": 11724 + }, + { + "epoch": 0.735155809141639, + "grad_norm": 3.1620407104492188, + "learning_rate": 3.4589301377609786e-06, + "loss": 1.2299, + "step": 11725 + }, + { + "epoch": 0.7352185089974294, + "grad_norm": 3.1972849369049072, + "learning_rate": 3.457394198245645e-06, + "loss": 1.1259, + "step": 11726 + }, + { + "epoch": 0.7352812088532197, + "grad_norm": 3.4831957817077637, + "learning_rate": 3.4558585285476607e-06, + "loss": 1.1722, + "step": 11727 + }, + { + "epoch": 0.73534390870901, + "grad_norm": 3.353316068649292, + "learning_rate": 3.4543231287303526e-06, + "loss": 1.0202, + "step": 11728 + }, + { + "epoch": 0.7354066085648003, + "grad_norm": 3.2249152660369873, + "learning_rate": 3.452787998857048e-06, + "loss": 1.2014, + "step": 11729 + }, + { + "epoch": 0.7354693084205907, + "grad_norm": 2.8747143745422363, + "learning_rate": 3.451253138991053e-06, + "loss": 1.101, + "step": 11730 + }, + { + "epoch": 0.735532008276381, + "grad_norm": 3.5915122032165527, + "learning_rate": 3.4497185491956596e-06, + "loss": 1.0345, + "step": 11731 + }, + { + "epoch": 0.7355947081321713, + "grad_norm": 3.100360155105591, + "learning_rate": 3.4481842295341637e-06, + "loss": 1.106, + "step": 11732 + }, + { + "epoch": 0.7356574079879616, + "grad_norm": 3.3615386486053467, + "learning_rate": 3.446650180069837e-06, + "loss": 1.1572, + "step": 11733 + }, + { + "epoch": 0.735720107843752, + "grad_norm": 3.317023992538452, + "learning_rate": 3.445116400865942e-06, + "loss": 1.3075, + "step": 11734 + }, + { + "epoch": 0.7357828076995423, + "grad_norm": 3.117685317993164, + "learning_rate": 3.4435828919857293e-06, + "loss": 1.155, + "step": 11735 + }, + { + "epoch": 0.7358455075553326, + "grad_norm": 3.5342483520507812, + "learning_rate": 3.4420496534924487e-06, + "loss": 1.137, + "step": 11736 + }, + { + "epoch": 0.735908207411123, + "grad_norm": 3.615382432937622, + "learning_rate": 3.4405166854493266e-06, + "loss": 0.9535, + "step": 11737 + }, + { + "epoch": 0.7359709072669133, + "grad_norm": 3.677553653717041, + "learning_rate": 3.438983987919582e-06, + "loss": 0.9641, + "step": 11738 + }, + { + "epoch": 0.7360336071227036, + "grad_norm": 3.7154769897460938, + "learning_rate": 3.4374515609664216e-06, + "loss": 0.9916, + "step": 11739 + }, + { + "epoch": 0.7360963069784939, + "grad_norm": 3.8032259941101074, + "learning_rate": 3.4359194046530485e-06, + "loss": 0.9953, + "step": 11740 + }, + { + "epoch": 0.7361590068342843, + "grad_norm": 3.4627277851104736, + "learning_rate": 3.434387519042646e-06, + "loss": 0.9293, + "step": 11741 + }, + { + "epoch": 0.7362217066900746, + "grad_norm": 3.7237768173217773, + "learning_rate": 3.4328559041983856e-06, + "loss": 1.0488, + "step": 11742 + }, + { + "epoch": 0.7362844065458649, + "grad_norm": 3.3639883995056152, + "learning_rate": 3.431324560183438e-06, + "loss": 1.1052, + "step": 11743 + }, + { + "epoch": 0.7363471064016552, + "grad_norm": 3.26998233795166, + "learning_rate": 3.4297934870609516e-06, + "loss": 1.1485, + "step": 11744 + }, + { + "epoch": 0.7364098062574456, + "grad_norm": 3.3494606018066406, + "learning_rate": 3.428262684894069e-06, + "loss": 1.0314, + "step": 11745 + }, + { + "epoch": 0.7364725061132359, + "grad_norm": 3.0976603031158447, + "learning_rate": 3.4267321537459173e-06, + "loss": 1.0429, + "step": 11746 + }, + { + "epoch": 0.7365352059690262, + "grad_norm": 3.409456253051758, + "learning_rate": 3.4252018936796226e-06, + "loss": 1.2309, + "step": 11747 + }, + { + "epoch": 0.7365979058248167, + "grad_norm": 3.044861078262329, + "learning_rate": 3.4236719047582877e-06, + "loss": 1.1288, + "step": 11748 + }, + { + "epoch": 0.736660605680607, + "grad_norm": 3.516139030456543, + "learning_rate": 3.422142187045011e-06, + "loss": 1.0446, + "step": 11749 + }, + { + "epoch": 0.7367233055363973, + "grad_norm": 3.2945847511291504, + "learning_rate": 3.4206127406028744e-06, + "loss": 1.1685, + "step": 11750 + }, + { + "epoch": 0.7367860053921876, + "grad_norm": 3.177253007888794, + "learning_rate": 3.4190835654949595e-06, + "loss": 1.1657, + "step": 11751 + }, + { + "epoch": 0.736848705247978, + "grad_norm": 3.5059080123901367, + "learning_rate": 3.4175546617843257e-06, + "loss": 1.0429, + "step": 11752 + }, + { + "epoch": 0.7369114051037683, + "grad_norm": 3.1281962394714355, + "learning_rate": 3.4160260295340207e-06, + "loss": 1.0342, + "step": 11753 + }, + { + "epoch": 0.7369741049595586, + "grad_norm": 3.2618935108184814, + "learning_rate": 3.414497668807094e-06, + "loss": 1.016, + "step": 11754 + }, + { + "epoch": 0.737036804815349, + "grad_norm": 3.8671679496765137, + "learning_rate": 3.4129695796665718e-06, + "loss": 0.9494, + "step": 11755 + }, + { + "epoch": 0.7370995046711393, + "grad_norm": 3.763388156890869, + "learning_rate": 3.4114417621754702e-06, + "loss": 1.0536, + "step": 11756 + }, + { + "epoch": 0.7371622045269296, + "grad_norm": 3.3132853507995605, + "learning_rate": 3.4099142163967956e-06, + "loss": 1.2216, + "step": 11757 + }, + { + "epoch": 0.7372249043827199, + "grad_norm": 3.3129918575286865, + "learning_rate": 3.4083869423935503e-06, + "loss": 0.9351, + "step": 11758 + }, + { + "epoch": 0.7372876042385103, + "grad_norm": 3.333966016769409, + "learning_rate": 3.406859940228714e-06, + "loss": 1.0783, + "step": 11759 + }, + { + "epoch": 0.7373503040943006, + "grad_norm": 3.6847658157348633, + "learning_rate": 3.4053332099652626e-06, + "loss": 0.8781, + "step": 11760 + }, + { + "epoch": 0.7374130039500909, + "grad_norm": 3.4121015071868896, + "learning_rate": 3.4038067516661545e-06, + "loss": 0.9321, + "step": 11761 + }, + { + "epoch": 0.7374757038058812, + "grad_norm": 3.6763241291046143, + "learning_rate": 3.402280565394347e-06, + "loss": 1.127, + "step": 11762 + }, + { + "epoch": 0.7375384036616716, + "grad_norm": 3.1947102546691895, + "learning_rate": 3.4007546512127764e-06, + "loss": 1.0531, + "step": 11763 + }, + { + "epoch": 0.7376011035174619, + "grad_norm": 3.491314172744751, + "learning_rate": 3.3992290091843704e-06, + "loss": 1.1097, + "step": 11764 + }, + { + "epoch": 0.7376638033732522, + "grad_norm": 3.203423023223877, + "learning_rate": 3.397703639372051e-06, + "loss": 0.9405, + "step": 11765 + }, + { + "epoch": 0.7377265032290425, + "grad_norm": 3.1038167476654053, + "learning_rate": 3.3961785418387203e-06, + "loss": 1.1756, + "step": 11766 + }, + { + "epoch": 0.7377892030848329, + "grad_norm": 3.4793944358825684, + "learning_rate": 3.394653716647277e-06, + "loss": 0.9411, + "step": 11767 + }, + { + "epoch": 0.7378519029406232, + "grad_norm": 3.2575645446777344, + "learning_rate": 3.393129163860599e-06, + "loss": 1.1166, + "step": 11768 + }, + { + "epoch": 0.7379146027964135, + "grad_norm": 3.134849786758423, + "learning_rate": 3.391604883541566e-06, + "loss": 1.186, + "step": 11769 + }, + { + "epoch": 0.7379773026522038, + "grad_norm": 3.11974835395813, + "learning_rate": 3.3900808757530357e-06, + "loss": 1.055, + "step": 11770 + }, + { + "epoch": 0.7380400025079943, + "grad_norm": 3.2334439754486084, + "learning_rate": 3.3885571405578555e-06, + "loss": 1.027, + "step": 11771 + }, + { + "epoch": 0.7381027023637846, + "grad_norm": 3.240424633026123, + "learning_rate": 3.387033678018872e-06, + "loss": 1.2684, + "step": 11772 + }, + { + "epoch": 0.7381654022195749, + "grad_norm": 3.7406952381134033, + "learning_rate": 3.3855104881989066e-06, + "loss": 1.2082, + "step": 11773 + }, + { + "epoch": 0.7382281020753653, + "grad_norm": 3.5387563705444336, + "learning_rate": 3.3839875711607783e-06, + "loss": 0.9083, + "step": 11774 + }, + { + "epoch": 0.7382908019311556, + "grad_norm": 3.23626708984375, + "learning_rate": 3.3824649269672894e-06, + "loss": 1.0396, + "step": 11775 + }, + { + "epoch": 0.7383535017869459, + "grad_norm": 3.5390589237213135, + "learning_rate": 3.380942555681238e-06, + "loss": 1.078, + "step": 11776 + }, + { + "epoch": 0.7384162016427362, + "grad_norm": 3.167539119720459, + "learning_rate": 3.3794204573654043e-06, + "loss": 1.0484, + "step": 11777 + }, + { + "epoch": 0.7384789014985266, + "grad_norm": 2.9992501735687256, + "learning_rate": 3.377898632082561e-06, + "loss": 1.0566, + "step": 11778 + }, + { + "epoch": 0.7385416013543169, + "grad_norm": 3.1817500591278076, + "learning_rate": 3.3763770798954633e-06, + "loss": 1.0647, + "step": 11779 + }, + { + "epoch": 0.7386043012101072, + "grad_norm": 3.503310441970825, + "learning_rate": 3.3748558008668673e-06, + "loss": 1.083, + "step": 11780 + }, + { + "epoch": 0.7386670010658976, + "grad_norm": 3.37148380279541, + "learning_rate": 3.3733347950595084e-06, + "loss": 1.097, + "step": 11781 + }, + { + "epoch": 0.7387297009216879, + "grad_norm": 3.3325014114379883, + "learning_rate": 3.3718140625361084e-06, + "loss": 1.0256, + "step": 11782 + }, + { + "epoch": 0.7387924007774782, + "grad_norm": 3.311755895614624, + "learning_rate": 3.370293603359388e-06, + "loss": 1.0855, + "step": 11783 + }, + { + "epoch": 0.7388551006332685, + "grad_norm": 3.3207225799560547, + "learning_rate": 3.3687734175920505e-06, + "loss": 1.121, + "step": 11784 + }, + { + "epoch": 0.7389178004890589, + "grad_norm": 3.5169427394866943, + "learning_rate": 3.3672535052967857e-06, + "loss": 1.0418, + "step": 11785 + }, + { + "epoch": 0.7389805003448492, + "grad_norm": 3.3225998878479004, + "learning_rate": 3.3657338665362727e-06, + "loss": 1.1122, + "step": 11786 + }, + { + "epoch": 0.7390432002006395, + "grad_norm": 3.3689630031585693, + "learning_rate": 3.3642145013731886e-06, + "loss": 1.132, + "step": 11787 + }, + { + "epoch": 0.7391059000564298, + "grad_norm": 3.36181640625, + "learning_rate": 3.362695409870188e-06, + "loss": 1.1639, + "step": 11788 + }, + { + "epoch": 0.7391685999122202, + "grad_norm": 3.5132901668548584, + "learning_rate": 3.361176592089919e-06, + "loss": 1.0497, + "step": 11789 + }, + { + "epoch": 0.7392312997680105, + "grad_norm": 3.5562002658843994, + "learning_rate": 3.3596580480950134e-06, + "loss": 1.0598, + "step": 11790 + }, + { + "epoch": 0.7392939996238008, + "grad_norm": 3.3163411617279053, + "learning_rate": 3.358139777948104e-06, + "loss": 1.1629, + "step": 11791 + }, + { + "epoch": 0.7393566994795912, + "grad_norm": 3.3692688941955566, + "learning_rate": 3.3566217817118e-06, + "loss": 1.0289, + "step": 11792 + }, + { + "epoch": 0.7394193993353815, + "grad_norm": 3.1608970165252686, + "learning_rate": 3.3551040594486995e-06, + "loss": 1.1642, + "step": 11793 + }, + { + "epoch": 0.7394820991911719, + "grad_norm": 3.274848461151123, + "learning_rate": 3.3535866112214023e-06, + "loss": 1.0473, + "step": 11794 + }, + { + "epoch": 0.7395447990469622, + "grad_norm": 3.2685678005218506, + "learning_rate": 3.352069437092482e-06, + "loss": 1.0133, + "step": 11795 + }, + { + "epoch": 0.7396074989027526, + "grad_norm": 3.2634477615356445, + "learning_rate": 3.3505525371245094e-06, + "loss": 1.2287, + "step": 11796 + }, + { + "epoch": 0.7396701987585429, + "grad_norm": 3.448303699493408, + "learning_rate": 3.3490359113800374e-06, + "loss": 1.125, + "step": 11797 + }, + { + "epoch": 0.7397328986143332, + "grad_norm": 3.328638792037964, + "learning_rate": 3.347519559921618e-06, + "loss": 1.0811, + "step": 11798 + }, + { + "epoch": 0.7397955984701235, + "grad_norm": 3.22501802444458, + "learning_rate": 3.3460034828117828e-06, + "loss": 1.2143, + "step": 11799 + }, + { + "epoch": 0.7398582983259139, + "grad_norm": 3.0137338638305664, + "learning_rate": 3.344487680113053e-06, + "loss": 1.0604, + "step": 11800 + }, + { + "epoch": 0.7399209981817042, + "grad_norm": 3.720716953277588, + "learning_rate": 3.342972151887941e-06, + "loss": 1.2557, + "step": 11801 + }, + { + "epoch": 0.7399836980374945, + "grad_norm": 3.719388246536255, + "learning_rate": 3.3414568981989492e-06, + "loss": 1.0996, + "step": 11802 + }, + { + "epoch": 0.7400463978932849, + "grad_norm": 3.4100546836853027, + "learning_rate": 3.339941919108567e-06, + "loss": 1.2236, + "step": 11803 + }, + { + "epoch": 0.7401090977490752, + "grad_norm": 3.1614606380462646, + "learning_rate": 3.3384272146792674e-06, + "loss": 1.0545, + "step": 11804 + }, + { + "epoch": 0.7401717976048655, + "grad_norm": 3.3168938159942627, + "learning_rate": 3.3369127849735237e-06, + "loss": 1.1752, + "step": 11805 + }, + { + "epoch": 0.7402344974606558, + "grad_norm": 3.2331199645996094, + "learning_rate": 3.3353986300537876e-06, + "loss": 0.9393, + "step": 11806 + }, + { + "epoch": 0.7402971973164462, + "grad_norm": 3.285923480987549, + "learning_rate": 3.333884749982503e-06, + "loss": 1.1539, + "step": 11807 + }, + { + "epoch": 0.7403598971722365, + "grad_norm": 2.753152847290039, + "learning_rate": 3.3323711448220997e-06, + "loss": 1.1144, + "step": 11808 + }, + { + "epoch": 0.7404225970280268, + "grad_norm": 3.265233039855957, + "learning_rate": 3.3308578146350047e-06, + "loss": 1.1341, + "step": 11809 + }, + { + "epoch": 0.7404852968838171, + "grad_norm": 3.7456107139587402, + "learning_rate": 3.3293447594836235e-06, + "loss": 1.2385, + "step": 11810 + }, + { + "epoch": 0.7405479967396075, + "grad_norm": 3.2907238006591797, + "learning_rate": 3.327831979430357e-06, + "loss": 0.9879, + "step": 11811 + }, + { + "epoch": 0.7406106965953978, + "grad_norm": 3.074683666229248, + "learning_rate": 3.326319474537587e-06, + "loss": 1.0194, + "step": 11812 + }, + { + "epoch": 0.7406733964511881, + "grad_norm": 3.278836727142334, + "learning_rate": 3.324807244867697e-06, + "loss": 0.8618, + "step": 11813 + }, + { + "epoch": 0.7407360963069785, + "grad_norm": 3.146458625793457, + "learning_rate": 3.3232952904830486e-06, + "loss": 1.1449, + "step": 11814 + }, + { + "epoch": 0.7407987961627688, + "grad_norm": 3.328587055206299, + "learning_rate": 3.3217836114459887e-06, + "loss": 1.0879, + "step": 11815 + }, + { + "epoch": 0.7408614960185591, + "grad_norm": 3.3707728385925293, + "learning_rate": 3.3202722078188685e-06, + "loss": 0.9583, + "step": 11816 + }, + { + "epoch": 0.7409241958743494, + "grad_norm": 3.3148508071899414, + "learning_rate": 3.318761079664015e-06, + "loss": 1.0118, + "step": 11817 + }, + { + "epoch": 0.7409868957301399, + "grad_norm": 3.0998406410217285, + "learning_rate": 3.317250227043746e-06, + "loss": 1.068, + "step": 11818 + }, + { + "epoch": 0.7410495955859302, + "grad_norm": 3.1908395290374756, + "learning_rate": 3.3157396500203655e-06, + "loss": 1.0277, + "step": 11819 + }, + { + "epoch": 0.7411122954417205, + "grad_norm": 3.188642740249634, + "learning_rate": 3.314229348656177e-06, + "loss": 1.1648, + "step": 11820 + }, + { + "epoch": 0.7411749952975109, + "grad_norm": 3.3506381511688232, + "learning_rate": 3.3127193230134623e-06, + "loss": 1.0075, + "step": 11821 + }, + { + "epoch": 0.7412376951533012, + "grad_norm": 3.694685220718384, + "learning_rate": 3.3112095731544925e-06, + "loss": 1.0607, + "step": 11822 + }, + { + "epoch": 0.7413003950090915, + "grad_norm": 3.365919589996338, + "learning_rate": 3.309700099141534e-06, + "loss": 1.0818, + "step": 11823 + }, + { + "epoch": 0.7413630948648818, + "grad_norm": 3.3785665035247803, + "learning_rate": 3.3081909010368365e-06, + "loss": 1.0495, + "step": 11824 + }, + { + "epoch": 0.7414257947206722, + "grad_norm": 3.373955011367798, + "learning_rate": 3.3066819789026374e-06, + "loss": 1.1196, + "step": 11825 + }, + { + "epoch": 0.7414884945764625, + "grad_norm": 3.45086669921875, + "learning_rate": 3.3051733328011635e-06, + "loss": 1.1989, + "step": 11826 + }, + { + "epoch": 0.7415511944322528, + "grad_norm": 3.2215466499328613, + "learning_rate": 3.303664962794636e-06, + "loss": 1.1008, + "step": 11827 + }, + { + "epoch": 0.7416138942880431, + "grad_norm": 3.6728312969207764, + "learning_rate": 3.302156868945259e-06, + "loss": 1.136, + "step": 11828 + }, + { + "epoch": 0.7416765941438335, + "grad_norm": 3.276726722717285, + "learning_rate": 3.3006490513152245e-06, + "loss": 1.1045, + "step": 11829 + }, + { + "epoch": 0.7417392939996238, + "grad_norm": 3.1084046363830566, + "learning_rate": 3.2991415099667124e-06, + "loss": 1.152, + "step": 11830 + }, + { + "epoch": 0.7418019938554141, + "grad_norm": 3.247378349304199, + "learning_rate": 3.2976342449619005e-06, + "loss": 1.0348, + "step": 11831 + }, + { + "epoch": 0.7418646937112044, + "grad_norm": 3.147979259490967, + "learning_rate": 3.2961272563629454e-06, + "loss": 1.0308, + "step": 11832 + }, + { + "epoch": 0.7419273935669948, + "grad_norm": 3.0627617835998535, + "learning_rate": 3.294620544231991e-06, + "loss": 1.0855, + "step": 11833 + }, + { + "epoch": 0.7419900934227851, + "grad_norm": 3.4750499725341797, + "learning_rate": 3.2931141086311825e-06, + "loss": 1.0341, + "step": 11834 + }, + { + "epoch": 0.7420527932785754, + "grad_norm": 3.4180476665496826, + "learning_rate": 3.2916079496226407e-06, + "loss": 1.0052, + "step": 11835 + }, + { + "epoch": 0.7421154931343658, + "grad_norm": 3.543532371520996, + "learning_rate": 3.290102067268479e-06, + "loss": 0.9707, + "step": 11836 + }, + { + "epoch": 0.7421781929901561, + "grad_norm": 3.2892277240753174, + "learning_rate": 3.2885964616307985e-06, + "loss": 0.9776, + "step": 11837 + }, + { + "epoch": 0.7422408928459464, + "grad_norm": 3.658596992492676, + "learning_rate": 3.287091132771696e-06, + "loss": 1.2206, + "step": 11838 + }, + { + "epoch": 0.7423035927017367, + "grad_norm": 3.380089044570923, + "learning_rate": 3.2855860807532493e-06, + "loss": 1.1266, + "step": 11839 + }, + { + "epoch": 0.7423662925575271, + "grad_norm": 3.5031163692474365, + "learning_rate": 3.284081305637522e-06, + "loss": 0.9825, + "step": 11840 + }, + { + "epoch": 0.7424289924133175, + "grad_norm": 3.2513558864593506, + "learning_rate": 3.282576807486575e-06, + "loss": 0.9878, + "step": 11841 + }, + { + "epoch": 0.7424916922691078, + "grad_norm": 3.7374260425567627, + "learning_rate": 3.281072586362456e-06, + "loss": 1.0941, + "step": 11842 + }, + { + "epoch": 0.7425543921248982, + "grad_norm": 3.6680471897125244, + "learning_rate": 3.2795686423271977e-06, + "loss": 1.031, + "step": 11843 + }, + { + "epoch": 0.7426170919806885, + "grad_norm": 3.5057270526885986, + "learning_rate": 3.278064975442823e-06, + "loss": 1.1015, + "step": 11844 + }, + { + "epoch": 0.7426797918364788, + "grad_norm": 3.3052632808685303, + "learning_rate": 3.2765615857713383e-06, + "loss": 1.1426, + "step": 11845 + }, + { + "epoch": 0.7427424916922691, + "grad_norm": 3.2133917808532715, + "learning_rate": 3.2750584733747505e-06, + "loss": 1.0373, + "step": 11846 + }, + { + "epoch": 0.7428051915480595, + "grad_norm": 3.198660135269165, + "learning_rate": 3.2735556383150447e-06, + "loss": 0.9551, + "step": 11847 + }, + { + "epoch": 0.7428678914038498, + "grad_norm": 3.2323222160339355, + "learning_rate": 3.2720530806541983e-06, + "loss": 1.1447, + "step": 11848 + }, + { + "epoch": 0.7429305912596401, + "grad_norm": 3.6540849208831787, + "learning_rate": 3.2705508004541743e-06, + "loss": 1.0479, + "step": 11849 + }, + { + "epoch": 0.7429932911154304, + "grad_norm": 3.264354944229126, + "learning_rate": 3.2690487977769315e-06, + "loss": 1.1417, + "step": 11850 + }, + { + "epoch": 0.7430559909712208, + "grad_norm": 3.200892686843872, + "learning_rate": 3.2675470726844104e-06, + "loss": 1.0961, + "step": 11851 + }, + { + "epoch": 0.7431186908270111, + "grad_norm": 3.227295398712158, + "learning_rate": 3.266045625238539e-06, + "loss": 1.0811, + "step": 11852 + }, + { + "epoch": 0.7431813906828014, + "grad_norm": 3.8365895748138428, + "learning_rate": 3.264544455501243e-06, + "loss": 0.8871, + "step": 11853 + }, + { + "epoch": 0.7432440905385918, + "grad_norm": 3.426105499267578, + "learning_rate": 3.2630435635344283e-06, + "loss": 1.1306, + "step": 11854 + }, + { + "epoch": 0.7433067903943821, + "grad_norm": 3.475071668624878, + "learning_rate": 3.2615429493999904e-06, + "loss": 0.9842, + "step": 11855 + }, + { + "epoch": 0.7433694902501724, + "grad_norm": 3.397768259048462, + "learning_rate": 3.2600426131598127e-06, + "loss": 1.1756, + "step": 11856 + }, + { + "epoch": 0.7434321901059627, + "grad_norm": 4.0498247146606445, + "learning_rate": 3.2585425548757755e-06, + "loss": 0.9613, + "step": 11857 + }, + { + "epoch": 0.7434948899617531, + "grad_norm": 3.3622853755950928, + "learning_rate": 3.2570427746097378e-06, + "loss": 1.1271, + "step": 11858 + }, + { + "epoch": 0.7435575898175434, + "grad_norm": 3.5494325160980225, + "learning_rate": 3.25554327242355e-06, + "loss": 1.0463, + "step": 11859 + }, + { + "epoch": 0.7436202896733337, + "grad_norm": 3.2477104663848877, + "learning_rate": 3.254044048379049e-06, + "loss": 1.1137, + "step": 11860 + }, + { + "epoch": 0.743682989529124, + "grad_norm": 3.602790117263794, + "learning_rate": 3.2525451025380693e-06, + "loss": 1.0694, + "step": 11861 + }, + { + "epoch": 0.7437456893849144, + "grad_norm": 3.4399852752685547, + "learning_rate": 3.251046434962425e-06, + "loss": 1.1493, + "step": 11862 + }, + { + "epoch": 0.7438083892407047, + "grad_norm": 3.4757847785949707, + "learning_rate": 3.2495480457139163e-06, + "loss": 1.2018, + "step": 11863 + }, + { + "epoch": 0.7438710890964951, + "grad_norm": 3.1039421558380127, + "learning_rate": 3.2480499348543447e-06, + "loss": 1.244, + "step": 11864 + }, + { + "epoch": 0.7439337889522855, + "grad_norm": 3.3587496280670166, + "learning_rate": 3.246552102445488e-06, + "loss": 1.0145, + "step": 11865 + }, + { + "epoch": 0.7439964888080758, + "grad_norm": 3.230347156524658, + "learning_rate": 3.245054548549118e-06, + "loss": 1.0832, + "step": 11866 + }, + { + "epoch": 0.7440591886638661, + "grad_norm": 3.43267560005188, + "learning_rate": 3.2435572732269905e-06, + "loss": 1.0227, + "step": 11867 + }, + { + "epoch": 0.7441218885196564, + "grad_norm": 3.4347596168518066, + "learning_rate": 3.2420602765408595e-06, + "loss": 0.9263, + "step": 11868 + }, + { + "epoch": 0.7441845883754468, + "grad_norm": 3.095890522003174, + "learning_rate": 3.2405635585524566e-06, + "loss": 1.2046, + "step": 11869 + }, + { + "epoch": 0.7442472882312371, + "grad_norm": 3.098778486251831, + "learning_rate": 3.239067119323509e-06, + "loss": 1.0739, + "step": 11870 + }, + { + "epoch": 0.7443099880870274, + "grad_norm": 3.4088447093963623, + "learning_rate": 3.2375709589157244e-06, + "loss": 1.0399, + "step": 11871 + }, + { + "epoch": 0.7443726879428177, + "grad_norm": 3.309536933898926, + "learning_rate": 3.2360750773908125e-06, + "loss": 1.0386, + "step": 11872 + }, + { + "epoch": 0.7444353877986081, + "grad_norm": 3.3656492233276367, + "learning_rate": 3.2345794748104595e-06, + "loss": 1.0906, + "step": 11873 + }, + { + "epoch": 0.7444980876543984, + "grad_norm": 3.3585944175720215, + "learning_rate": 3.233084151236342e-06, + "loss": 1.1668, + "step": 11874 + }, + { + "epoch": 0.7445607875101887, + "grad_norm": 3.6653571128845215, + "learning_rate": 3.231589106730132e-06, + "loss": 1.1077, + "step": 11875 + }, + { + "epoch": 0.744623487365979, + "grad_norm": 3.5508406162261963, + "learning_rate": 3.2300943413534837e-06, + "loss": 0.9351, + "step": 11876 + }, + { + "epoch": 0.7446861872217694, + "grad_norm": 3.5411462783813477, + "learning_rate": 3.22859985516804e-06, + "loss": 1.11, + "step": 11877 + }, + { + "epoch": 0.7447488870775597, + "grad_norm": 3.569758653640747, + "learning_rate": 3.22710564823543e-06, + "loss": 0.9643, + "step": 11878 + }, + { + "epoch": 0.74481158693335, + "grad_norm": 2.752898693084717, + "learning_rate": 3.225611720617283e-06, + "loss": 1.1587, + "step": 11879 + }, + { + "epoch": 0.7448742867891404, + "grad_norm": 4.074037551879883, + "learning_rate": 3.224118072375204e-06, + "loss": 1.0166, + "step": 11880 + }, + { + "epoch": 0.7449369866449307, + "grad_norm": 3.6279525756835938, + "learning_rate": 3.2226247035707916e-06, + "loss": 1.0967, + "step": 11881 + }, + { + "epoch": 0.744999686500721, + "grad_norm": 3.592902421951294, + "learning_rate": 3.221131614265629e-06, + "loss": 1.1486, + "step": 11882 + }, + { + "epoch": 0.7450623863565113, + "grad_norm": 3.605077028274536, + "learning_rate": 3.219638804521299e-06, + "loss": 1.0365, + "step": 11883 + }, + { + "epoch": 0.7451250862123017, + "grad_norm": 3.5107085704803467, + "learning_rate": 3.2181462743993596e-06, + "loss": 1.0272, + "step": 11884 + }, + { + "epoch": 0.745187786068092, + "grad_norm": 3.205902338027954, + "learning_rate": 3.216654023961361e-06, + "loss": 1.1483, + "step": 11885 + }, + { + "epoch": 0.7452504859238823, + "grad_norm": 3.244340181350708, + "learning_rate": 3.21516205326885e-06, + "loss": 1.221, + "step": 11886 + }, + { + "epoch": 0.7453131857796728, + "grad_norm": 3.1865015029907227, + "learning_rate": 3.213670362383352e-06, + "loss": 1.1514, + "step": 11887 + }, + { + "epoch": 0.7453758856354631, + "grad_norm": 3.334214448928833, + "learning_rate": 3.212178951366385e-06, + "loss": 1.0792, + "step": 11888 + }, + { + "epoch": 0.7454385854912534, + "grad_norm": 3.5721709728240967, + "learning_rate": 3.2106878202794513e-06, + "loss": 1.0374, + "step": 11889 + }, + { + "epoch": 0.7455012853470437, + "grad_norm": 3.3224706649780273, + "learning_rate": 3.209196969184052e-06, + "loss": 1.0498, + "step": 11890 + }, + { + "epoch": 0.7455639852028341, + "grad_norm": 3.44328236579895, + "learning_rate": 3.2077063981416658e-06, + "loss": 0.9308, + "step": 11891 + }, + { + "epoch": 0.7456266850586244, + "grad_norm": 3.498539686203003, + "learning_rate": 3.206216107213761e-06, + "loss": 1.1539, + "step": 11892 + }, + { + "epoch": 0.7456893849144147, + "grad_norm": 3.603323459625244, + "learning_rate": 3.2047260964618034e-06, + "loss": 1.2081, + "step": 11893 + }, + { + "epoch": 0.745752084770205, + "grad_norm": 3.413353443145752, + "learning_rate": 3.203236365947239e-06, + "loss": 1.1576, + "step": 11894 + }, + { + "epoch": 0.7458147846259954, + "grad_norm": 3.123845100402832, + "learning_rate": 3.201746915731503e-06, + "loss": 1.0975, + "step": 11895 + }, + { + "epoch": 0.7458774844817857, + "grad_norm": 3.4649736881256104, + "learning_rate": 3.200257745876019e-06, + "loss": 1.2642, + "step": 11896 + }, + { + "epoch": 0.745940184337576, + "grad_norm": 3.567513942718506, + "learning_rate": 3.198768856442205e-06, + "loss": 1.2121, + "step": 11897 + }, + { + "epoch": 0.7460028841933664, + "grad_norm": 3.4593019485473633, + "learning_rate": 3.19728024749146e-06, + "loss": 1.1079, + "step": 11898 + }, + { + "epoch": 0.7460655840491567, + "grad_norm": 4.039361476898193, + "learning_rate": 3.195791919085175e-06, + "loss": 1.0949, + "step": 11899 + }, + { + "epoch": 0.746128283904947, + "grad_norm": 3.3272573947906494, + "learning_rate": 3.1943038712847253e-06, + "loss": 1.0374, + "step": 11900 + }, + { + "epoch": 0.7461909837607373, + "grad_norm": 3.253399133682251, + "learning_rate": 3.1928161041514848e-06, + "loss": 1.0135, + "step": 11901 + }, + { + "epoch": 0.7462536836165277, + "grad_norm": 3.6234493255615234, + "learning_rate": 3.191328617746805e-06, + "loss": 0.9754, + "step": 11902 + }, + { + "epoch": 0.746316383472318, + "grad_norm": 3.0241105556488037, + "learning_rate": 3.1898414121320277e-06, + "loss": 1.0784, + "step": 11903 + }, + { + "epoch": 0.7463790833281083, + "grad_norm": 3.4359521865844727, + "learning_rate": 3.1883544873684903e-06, + "loss": 1.0766, + "step": 11904 + }, + { + "epoch": 0.7464417831838986, + "grad_norm": 3.497421979904175, + "learning_rate": 3.1868678435175116e-06, + "loss": 1.0017, + "step": 11905 + }, + { + "epoch": 0.746504483039689, + "grad_norm": 2.8439838886260986, + "learning_rate": 3.1853814806404003e-06, + "loss": 1.1045, + "step": 11906 + }, + { + "epoch": 0.7465671828954793, + "grad_norm": 3.261590003967285, + "learning_rate": 3.183895398798451e-06, + "loss": 1.113, + "step": 11907 + }, + { + "epoch": 0.7466298827512696, + "grad_norm": 3.0841736793518066, + "learning_rate": 3.1824095980529566e-06, + "loss": 1.121, + "step": 11908 + }, + { + "epoch": 0.74669258260706, + "grad_norm": 3.0065712928771973, + "learning_rate": 3.1809240784651886e-06, + "loss": 1.1372, + "step": 11909 + }, + { + "epoch": 0.7467552824628504, + "grad_norm": 3.301492691040039, + "learning_rate": 3.179438840096409e-06, + "loss": 1.119, + "step": 11910 + }, + { + "epoch": 0.7468179823186407, + "grad_norm": 3.431525468826294, + "learning_rate": 3.177953883007866e-06, + "loss": 1.1037, + "step": 11911 + }, + { + "epoch": 0.746880682174431, + "grad_norm": 3.7684953212738037, + "learning_rate": 3.176469207260807e-06, + "loss": 0.889, + "step": 11912 + }, + { + "epoch": 0.7469433820302214, + "grad_norm": 3.0029711723327637, + "learning_rate": 3.174984812916455e-06, + "loss": 1.2046, + "step": 11913 + }, + { + "epoch": 0.7470060818860117, + "grad_norm": 3.0794529914855957, + "learning_rate": 3.173500700036024e-06, + "loss": 1.0187, + "step": 11914 + }, + { + "epoch": 0.747068781741802, + "grad_norm": 3.278672456741333, + "learning_rate": 3.172016868680726e-06, + "loss": 1.0365, + "step": 11915 + }, + { + "epoch": 0.7471314815975924, + "grad_norm": 3.536682367324829, + "learning_rate": 3.1705333189117504e-06, + "loss": 1.1006, + "step": 11916 + }, + { + "epoch": 0.7471941814533827, + "grad_norm": 3.322643756866455, + "learning_rate": 3.169050050790279e-06, + "loss": 1.1674, + "step": 11917 + }, + { + "epoch": 0.747256881309173, + "grad_norm": 3.0395607948303223, + "learning_rate": 3.167567064377478e-06, + "loss": 1.0657, + "step": 11918 + }, + { + "epoch": 0.7473195811649633, + "grad_norm": 3.5116562843322754, + "learning_rate": 3.1660843597345137e-06, + "loss": 1.1488, + "step": 11919 + }, + { + "epoch": 0.7473822810207537, + "grad_norm": 3.3899993896484375, + "learning_rate": 3.1646019369225277e-06, + "loss": 1.097, + "step": 11920 + }, + { + "epoch": 0.747444980876544, + "grad_norm": 3.3033628463745117, + "learning_rate": 3.1631197960026572e-06, + "loss": 1.0445, + "step": 11921 + }, + { + "epoch": 0.7475076807323343, + "grad_norm": 3.3671274185180664, + "learning_rate": 3.1616379370360216e-06, + "loss": 1.0384, + "step": 11922 + }, + { + "epoch": 0.7475703805881246, + "grad_norm": 3.582411766052246, + "learning_rate": 3.1601563600837393e-06, + "loss": 1.0733, + "step": 11923 + }, + { + "epoch": 0.747633080443915, + "grad_norm": 3.3114864826202393, + "learning_rate": 3.1586750652069077e-06, + "loss": 1.1581, + "step": 11924 + }, + { + "epoch": 0.7476957802997053, + "grad_norm": 3.397749662399292, + "learning_rate": 3.1571940524666123e-06, + "loss": 1.146, + "step": 11925 + }, + { + "epoch": 0.7477584801554956, + "grad_norm": 3.3270809650421143, + "learning_rate": 3.1557133219239356e-06, + "loss": 1.227, + "step": 11926 + }, + { + "epoch": 0.747821180011286, + "grad_norm": 3.2364256381988525, + "learning_rate": 3.1542328736399397e-06, + "loss": 1.0845, + "step": 11927 + }, + { + "epoch": 0.7478838798670763, + "grad_norm": 3.196458101272583, + "learning_rate": 3.1527527076756792e-06, + "loss": 1.0819, + "step": 11928 + }, + { + "epoch": 0.7479465797228666, + "grad_norm": 3.5776140689849854, + "learning_rate": 3.1512728240921943e-06, + "loss": 1.0802, + "step": 11929 + }, + { + "epoch": 0.7480092795786569, + "grad_norm": 3.265727996826172, + "learning_rate": 3.149793222950519e-06, + "loss": 1.2526, + "step": 11930 + }, + { + "epoch": 0.7480719794344473, + "grad_norm": 3.212282419204712, + "learning_rate": 3.1483139043116705e-06, + "loss": 1.2584, + "step": 11931 + }, + { + "epoch": 0.7481346792902376, + "grad_norm": 3.3976008892059326, + "learning_rate": 3.1468348682366567e-06, + "loss": 0.9836, + "step": 11932 + }, + { + "epoch": 0.748197379146028, + "grad_norm": 3.417036533355713, + "learning_rate": 3.1453561147864686e-06, + "loss": 1.1129, + "step": 11933 + }, + { + "epoch": 0.7482600790018183, + "grad_norm": 2.9846889972686768, + "learning_rate": 3.1438776440220964e-06, + "loss": 1.0654, + "step": 11934 + }, + { + "epoch": 0.7483227788576087, + "grad_norm": 3.155487060546875, + "learning_rate": 3.14239945600451e-06, + "loss": 1.2212, + "step": 11935 + }, + { + "epoch": 0.748385478713399, + "grad_norm": 3.356579303741455, + "learning_rate": 3.1409215507946655e-06, + "loss": 0.8262, + "step": 11936 + }, + { + "epoch": 0.7484481785691893, + "grad_norm": 3.246304750442505, + "learning_rate": 3.1394439284535206e-06, + "loss": 1.0766, + "step": 11937 + }, + { + "epoch": 0.7485108784249797, + "grad_norm": 3.708808660507202, + "learning_rate": 3.137966589042006e-06, + "loss": 0.9723, + "step": 11938 + }, + { + "epoch": 0.74857357828077, + "grad_norm": 3.1896681785583496, + "learning_rate": 3.1364895326210497e-06, + "loss": 0.9784, + "step": 11939 + }, + { + "epoch": 0.7486362781365603, + "grad_norm": 3.4361565113067627, + "learning_rate": 3.135012759251561e-06, + "loss": 1.0132, + "step": 11940 + }, + { + "epoch": 0.7486989779923506, + "grad_norm": 3.1703481674194336, + "learning_rate": 3.1335362689944506e-06, + "loss": 1.112, + "step": 11941 + }, + { + "epoch": 0.748761677848141, + "grad_norm": 3.8660433292388916, + "learning_rate": 3.1320600619106035e-06, + "loss": 1.1533, + "step": 11942 + }, + { + "epoch": 0.7488243777039313, + "grad_norm": 2.8840255737304688, + "learning_rate": 3.130584138060899e-06, + "loss": 1.0159, + "step": 11943 + }, + { + "epoch": 0.7488870775597216, + "grad_norm": 3.5732245445251465, + "learning_rate": 3.1291084975062013e-06, + "loss": 1.0861, + "step": 11944 + }, + { + "epoch": 0.7489497774155119, + "grad_norm": 3.639768123626709, + "learning_rate": 3.1276331403073733e-06, + "loss": 0.8405, + "step": 11945 + }, + { + "epoch": 0.7490124772713023, + "grad_norm": 3.2914209365844727, + "learning_rate": 3.1261580665252544e-06, + "loss": 1.2093, + "step": 11946 + }, + { + "epoch": 0.7490751771270926, + "grad_norm": 3.175445556640625, + "learning_rate": 3.1246832762206735e-06, + "loss": 1.1714, + "step": 11947 + }, + { + "epoch": 0.7491378769828829, + "grad_norm": 3.3758199214935303, + "learning_rate": 3.1232087694544577e-06, + "loss": 0.9086, + "step": 11948 + }, + { + "epoch": 0.7492005768386732, + "grad_norm": 3.122744560241699, + "learning_rate": 3.1217345462874125e-06, + "loss": 1.0295, + "step": 11949 + }, + { + "epoch": 0.7492632766944636, + "grad_norm": 3.395695209503174, + "learning_rate": 3.1202606067803355e-06, + "loss": 1.0735, + "step": 11950 + }, + { + "epoch": 0.7493259765502539, + "grad_norm": 3.2813119888305664, + "learning_rate": 3.1187869509940084e-06, + "loss": 1.1856, + "step": 11951 + }, + { + "epoch": 0.7493886764060442, + "grad_norm": 3.1276044845581055, + "learning_rate": 3.1173135789892105e-06, + "loss": 0.9987, + "step": 11952 + }, + { + "epoch": 0.7494513762618346, + "grad_norm": 3.2553868293762207, + "learning_rate": 3.1158404908267016e-06, + "loss": 1.1613, + "step": 11953 + }, + { + "epoch": 0.7495140761176249, + "grad_norm": 3.224156618118286, + "learning_rate": 3.114367686567228e-06, + "loss": 1.0313, + "step": 11954 + }, + { + "epoch": 0.7495767759734152, + "grad_norm": 3.2731568813323975, + "learning_rate": 3.1128951662715345e-06, + "loss": 1.11, + "step": 11955 + }, + { + "epoch": 0.7496394758292056, + "grad_norm": 3.008127212524414, + "learning_rate": 3.1114229300003464e-06, + "loss": 1.1005, + "step": 11956 + }, + { + "epoch": 0.749702175684996, + "grad_norm": 4.039447784423828, + "learning_rate": 3.1099509778143767e-06, + "loss": 1.0666, + "step": 11957 + }, + { + "epoch": 0.7497648755407863, + "grad_norm": 3.877268075942993, + "learning_rate": 3.108479309774326e-06, + "loss": 1.3497, + "step": 11958 + }, + { + "epoch": 0.7498275753965766, + "grad_norm": 3.2780444622039795, + "learning_rate": 3.1070079259408934e-06, + "loss": 1.2732, + "step": 11959 + }, + { + "epoch": 0.749890275252367, + "grad_norm": 3.607095956802368, + "learning_rate": 3.105536826374754e-06, + "loss": 1.1738, + "step": 11960 + }, + { + "epoch": 0.7499529751081573, + "grad_norm": 3.7480862140655518, + "learning_rate": 3.1040660111365782e-06, + "loss": 1.0759, + "step": 11961 + }, + { + "epoch": 0.7500156749639476, + "grad_norm": 3.199899196624756, + "learning_rate": 3.1025954802870174e-06, + "loss": 1.1284, + "step": 11962 + }, + { + "epoch": 0.7500783748197379, + "grad_norm": 3.468869686126709, + "learning_rate": 3.101125233886724e-06, + "loss": 1.1235, + "step": 11963 + }, + { + "epoch": 0.7501410746755283, + "grad_norm": 3.5633704662323, + "learning_rate": 3.099655271996327e-06, + "loss": 0.9551, + "step": 11964 + }, + { + "epoch": 0.7502037745313186, + "grad_norm": 3.4161529541015625, + "learning_rate": 3.0981855946764438e-06, + "loss": 1.0792, + "step": 11965 + }, + { + "epoch": 0.7502664743871089, + "grad_norm": 3.3682498931884766, + "learning_rate": 3.096716201987692e-06, + "loss": 1.1128, + "step": 11966 + }, + { + "epoch": 0.7503291742428992, + "grad_norm": 3.5050957202911377, + "learning_rate": 3.0952470939906652e-06, + "loss": 1.0571, + "step": 11967 + }, + { + "epoch": 0.7503918740986896, + "grad_norm": 2.870208740234375, + "learning_rate": 3.09377827074595e-06, + "loss": 1.1118, + "step": 11968 + }, + { + "epoch": 0.7504545739544799, + "grad_norm": 3.143458127975464, + "learning_rate": 3.0923097323141173e-06, + "loss": 1.0751, + "step": 11969 + }, + { + "epoch": 0.7505172738102702, + "grad_norm": 3.762665271759033, + "learning_rate": 3.090841478755736e-06, + "loss": 1.0652, + "step": 11970 + }, + { + "epoch": 0.7505799736660606, + "grad_norm": 3.6710360050201416, + "learning_rate": 3.089373510131354e-06, + "loss": 1.1531, + "step": 11971 + }, + { + "epoch": 0.7506426735218509, + "grad_norm": 3.3791584968566895, + "learning_rate": 3.0879058265015107e-06, + "loss": 0.9788, + "step": 11972 + }, + { + "epoch": 0.7507053733776412, + "grad_norm": 3.4177801609039307, + "learning_rate": 3.086438427926729e-06, + "loss": 1.0398, + "step": 11973 + }, + { + "epoch": 0.7507680732334315, + "grad_norm": 2.9096577167510986, + "learning_rate": 3.084971314467532e-06, + "loss": 1.0429, + "step": 11974 + }, + { + "epoch": 0.7508307730892219, + "grad_norm": 3.7659873962402344, + "learning_rate": 3.08350448618442e-06, + "loss": 1.1588, + "step": 11975 + }, + { + "epoch": 0.7508934729450122, + "grad_norm": 3.1213788986206055, + "learning_rate": 3.0820379431378813e-06, + "loss": 1.1364, + "step": 11976 + }, + { + "epoch": 0.7509561728008025, + "grad_norm": 3.5643367767333984, + "learning_rate": 3.080571685388404e-06, + "loss": 1.0881, + "step": 11977 + }, + { + "epoch": 0.7510188726565928, + "grad_norm": 3.451840877532959, + "learning_rate": 3.079105712996452e-06, + "loss": 1.0284, + "step": 11978 + }, + { + "epoch": 0.7510815725123832, + "grad_norm": 3.3099474906921387, + "learning_rate": 3.0776400260224825e-06, + "loss": 1.0026, + "step": 11979 + }, + { + "epoch": 0.7511442723681736, + "grad_norm": 3.110595703125, + "learning_rate": 3.0761746245269376e-06, + "loss": 1.2042, + "step": 11980 + }, + { + "epoch": 0.7512069722239639, + "grad_norm": 3.5101099014282227, + "learning_rate": 3.074709508570257e-06, + "loss": 1.1338, + "step": 11981 + }, + { + "epoch": 0.7512696720797543, + "grad_norm": 3.2552390098571777, + "learning_rate": 3.073244678212858e-06, + "loss": 1.1803, + "step": 11982 + }, + { + "epoch": 0.7513323719355446, + "grad_norm": 3.541682720184326, + "learning_rate": 3.071780133515152e-06, + "loss": 0.9988, + "step": 11983 + }, + { + "epoch": 0.7513950717913349, + "grad_norm": 3.1129837036132812, + "learning_rate": 3.0703158745375316e-06, + "loss": 1.2054, + "step": 11984 + }, + { + "epoch": 0.7514577716471252, + "grad_norm": 3.800992488861084, + "learning_rate": 3.0688519013403906e-06, + "loss": 0.9468, + "step": 11985 + }, + { + "epoch": 0.7515204715029156, + "grad_norm": 3.129899024963379, + "learning_rate": 3.0673882139841006e-06, + "loss": 1.0872, + "step": 11986 + }, + { + "epoch": 0.7515831713587059, + "grad_norm": 3.702624797821045, + "learning_rate": 3.06592481252902e-06, + "loss": 1.0311, + "step": 11987 + }, + { + "epoch": 0.7516458712144962, + "grad_norm": 3.5839180946350098, + "learning_rate": 3.064461697035506e-06, + "loss": 1.2183, + "step": 11988 + }, + { + "epoch": 0.7517085710702865, + "grad_norm": 3.2643659114837646, + "learning_rate": 3.0629988675638944e-06, + "loss": 1.0083, + "step": 11989 + }, + { + "epoch": 0.7517712709260769, + "grad_norm": 3.3699402809143066, + "learning_rate": 3.0615363241745124e-06, + "loss": 1.0586, + "step": 11990 + }, + { + "epoch": 0.7518339707818672, + "grad_norm": 3.287815570831299, + "learning_rate": 3.0600740669276716e-06, + "loss": 0.9957, + "step": 11991 + }, + { + "epoch": 0.7518966706376575, + "grad_norm": 3.439260721206665, + "learning_rate": 3.058612095883683e-06, + "loss": 1.0804, + "step": 11992 + }, + { + "epoch": 0.7519593704934479, + "grad_norm": 3.4281044006347656, + "learning_rate": 3.057150411102835e-06, + "loss": 1.046, + "step": 11993 + }, + { + "epoch": 0.7520220703492382, + "grad_norm": 3.134411096572876, + "learning_rate": 3.0556890126454075e-06, + "loss": 1.0926, + "step": 11994 + }, + { + "epoch": 0.7520847702050285, + "grad_norm": 3.0510778427124023, + "learning_rate": 3.0542279005716646e-06, + "loss": 1.0559, + "step": 11995 + }, + { + "epoch": 0.7521474700608188, + "grad_norm": 3.14219331741333, + "learning_rate": 3.0527670749418693e-06, + "loss": 0.992, + "step": 11996 + }, + { + "epoch": 0.7522101699166092, + "grad_norm": 3.6895430088043213, + "learning_rate": 3.051306535816264e-06, + "loss": 1.047, + "step": 11997 + }, + { + "epoch": 0.7522728697723995, + "grad_norm": 3.2577805519104004, + "learning_rate": 3.0498462832550766e-06, + "loss": 1.1227, + "step": 11998 + }, + { + "epoch": 0.7523355696281898, + "grad_norm": 3.5072450637817383, + "learning_rate": 3.048386317318537e-06, + "loss": 1.1169, + "step": 11999 + }, + { + "epoch": 0.7523982694839801, + "grad_norm": 3.0118300914764404, + "learning_rate": 3.0469266380668473e-06, + "loss": 1.2044, + "step": 12000 + }, + { + "epoch": 0.7523982694839801, + "eval_loss": 1.1000334024429321, + "eval_runtime": 143.8076, + "eval_samples_per_second": 4.381, + "eval_steps_per_second": 1.099, + "step": 12000 + }, + { + "epoch": 0.7524609693397705, + "grad_norm": 3.7009761333465576, + "learning_rate": 3.0454672455602077e-06, + "loss": 0.8449, + "step": 12001 + }, + { + "epoch": 0.7525236691955608, + "grad_norm": 3.63543438911438, + "learning_rate": 3.0440081398587982e-06, + "loss": 1.2395, + "step": 12002 + }, + { + "epoch": 0.7525863690513512, + "grad_norm": 3.8126611709594727, + "learning_rate": 3.0425493210228006e-06, + "loss": 1.1751, + "step": 12003 + }, + { + "epoch": 0.7526490689071416, + "grad_norm": 3.320439338684082, + "learning_rate": 3.041090789112374e-06, + "loss": 1.2259, + "step": 12004 + }, + { + "epoch": 0.7527117687629319, + "grad_norm": 3.298802614212036, + "learning_rate": 3.0396325441876627e-06, + "loss": 1.0797, + "step": 12005 + }, + { + "epoch": 0.7527744686187222, + "grad_norm": 3.203244924545288, + "learning_rate": 3.038174586308813e-06, + "loss": 1.1103, + "step": 12006 + }, + { + "epoch": 0.7528371684745125, + "grad_norm": 2.9447059631347656, + "learning_rate": 3.0367169155359467e-06, + "loss": 1.1277, + "step": 12007 + }, + { + "epoch": 0.7528998683303029, + "grad_norm": 3.6538679599761963, + "learning_rate": 3.03525953192918e-06, + "loss": 1.1502, + "step": 12008 + }, + { + "epoch": 0.7529625681860932, + "grad_norm": 3.109344959259033, + "learning_rate": 3.0338024355486094e-06, + "loss": 0.9762, + "step": 12009 + }, + { + "epoch": 0.7530252680418835, + "grad_norm": 3.353846311569214, + "learning_rate": 3.032345626454335e-06, + "loss": 1.1577, + "step": 12010 + }, + { + "epoch": 0.7530879678976738, + "grad_norm": 3.274289131164551, + "learning_rate": 3.0308891047064313e-06, + "loss": 0.9834, + "step": 12011 + }, + { + "epoch": 0.7531506677534642, + "grad_norm": 3.1797187328338623, + "learning_rate": 3.0294328703649655e-06, + "loss": 1.1303, + "step": 12012 + }, + { + "epoch": 0.7532133676092545, + "grad_norm": 3.1708476543426514, + "learning_rate": 3.027976923489988e-06, + "loss": 1.1206, + "step": 12013 + }, + { + "epoch": 0.7532760674650448, + "grad_norm": 3.6183934211730957, + "learning_rate": 3.026521264141551e-06, + "loss": 1.1203, + "step": 12014 + }, + { + "epoch": 0.7533387673208352, + "grad_norm": 2.856125831604004, + "learning_rate": 3.0250658923796817e-06, + "loss": 1.1176, + "step": 12015 + }, + { + "epoch": 0.7534014671766255, + "grad_norm": 3.191847085952759, + "learning_rate": 3.0236108082643954e-06, + "loss": 1.0314, + "step": 12016 + }, + { + "epoch": 0.7534641670324158, + "grad_norm": 3.903756856918335, + "learning_rate": 3.022156011855708e-06, + "loss": 1.0824, + "step": 12017 + }, + { + "epoch": 0.7535268668882061, + "grad_norm": 3.6667284965515137, + "learning_rate": 3.020701503213611e-06, + "loss": 0.968, + "step": 12018 + }, + { + "epoch": 0.7535895667439965, + "grad_norm": 3.261878728866577, + "learning_rate": 3.0192472823980903e-06, + "loss": 1.056, + "step": 12019 + }, + { + "epoch": 0.7536522665997868, + "grad_norm": 3.625896692276001, + "learning_rate": 3.0177933494691115e-06, + "loss": 1.0424, + "step": 12020 + }, + { + "epoch": 0.7537149664555771, + "grad_norm": 3.029142379760742, + "learning_rate": 3.016339704486644e-06, + "loss": 1.2846, + "step": 12021 + }, + { + "epoch": 0.7537776663113674, + "grad_norm": 3.2767586708068848, + "learning_rate": 3.0148863475106315e-06, + "loss": 1.1825, + "step": 12022 + }, + { + "epoch": 0.7538403661671578, + "grad_norm": 3.861018180847168, + "learning_rate": 3.013433278601011e-06, + "loss": 1.0145, + "step": 12023 + }, + { + "epoch": 0.7539030660229481, + "grad_norm": 3.106250524520874, + "learning_rate": 3.011980497817705e-06, + "loss": 1.2371, + "step": 12024 + }, + { + "epoch": 0.7539657658787384, + "grad_norm": 3.530777931213379, + "learning_rate": 3.0105280052206297e-06, + "loss": 1.0869, + "step": 12025 + }, + { + "epoch": 0.7540284657345289, + "grad_norm": 3.205037832260132, + "learning_rate": 3.0090758008696864e-06, + "loss": 1.2501, + "step": 12026 + }, + { + "epoch": 0.7540911655903192, + "grad_norm": 3.187903642654419, + "learning_rate": 3.0076238848247586e-06, + "loss": 1.1034, + "step": 12027 + }, + { + "epoch": 0.7541538654461095, + "grad_norm": 3.497614860534668, + "learning_rate": 3.006172257145731e-06, + "loss": 1.022, + "step": 12028 + }, + { + "epoch": 0.7542165653018998, + "grad_norm": 3.200272560119629, + "learning_rate": 3.004720917892464e-06, + "loss": 0.9147, + "step": 12029 + }, + { + "epoch": 0.7542792651576902, + "grad_norm": 3.3348217010498047, + "learning_rate": 3.0032698671248118e-06, + "loss": 1.1521, + "step": 12030 + }, + { + "epoch": 0.7543419650134805, + "grad_norm": 3.5534603595733643, + "learning_rate": 3.0018191049026136e-06, + "loss": 0.9878, + "step": 12031 + }, + { + "epoch": 0.7544046648692708, + "grad_norm": 3.1351516246795654, + "learning_rate": 3.0003686312857037e-06, + "loss": 0.966, + "step": 12032 + }, + { + "epoch": 0.7544673647250612, + "grad_norm": 3.2220962047576904, + "learning_rate": 2.998918446333897e-06, + "loss": 0.9359, + "step": 12033 + }, + { + "epoch": 0.7545300645808515, + "grad_norm": 3.3626394271850586, + "learning_rate": 2.997468550106999e-06, + "loss": 1.1397, + "step": 12034 + }, + { + "epoch": 0.7545927644366418, + "grad_norm": 3.2734522819519043, + "learning_rate": 2.996018942664801e-06, + "loss": 1.1405, + "step": 12035 + }, + { + "epoch": 0.7546554642924321, + "grad_norm": 3.2274606227874756, + "learning_rate": 2.9945696240670905e-06, + "loss": 1.0807, + "step": 12036 + }, + { + "epoch": 0.7547181641482225, + "grad_norm": 3.5325589179992676, + "learning_rate": 2.9931205943736365e-06, + "loss": 0.9891, + "step": 12037 + }, + { + "epoch": 0.7547808640040128, + "grad_norm": 3.311453104019165, + "learning_rate": 2.99167185364419e-06, + "loss": 1.0788, + "step": 12038 + }, + { + "epoch": 0.7548435638598031, + "grad_norm": 3.569903612136841, + "learning_rate": 2.9902234019385056e-06, + "loss": 1.051, + "step": 12039 + }, + { + "epoch": 0.7549062637155934, + "grad_norm": 3.314380407333374, + "learning_rate": 2.988775239316315e-06, + "loss": 1.2594, + "step": 12040 + }, + { + "epoch": 0.7549689635713838, + "grad_norm": 3.417168617248535, + "learning_rate": 2.9873273658373393e-06, + "loss": 1.0405, + "step": 12041 + }, + { + "epoch": 0.7550316634271741, + "grad_norm": 3.71803617477417, + "learning_rate": 2.9858797815612863e-06, + "loss": 1.0311, + "step": 12042 + }, + { + "epoch": 0.7550943632829644, + "grad_norm": 3.2245779037475586, + "learning_rate": 2.98443248654786e-06, + "loss": 1.0427, + "step": 12043 + }, + { + "epoch": 0.7551570631387547, + "grad_norm": 3.7172513008117676, + "learning_rate": 2.982985480856745e-06, + "loss": 1.0749, + "step": 12044 + }, + { + "epoch": 0.7552197629945451, + "grad_norm": 2.9560279846191406, + "learning_rate": 2.981538764547616e-06, + "loss": 0.9809, + "step": 12045 + }, + { + "epoch": 0.7552824628503354, + "grad_norm": 3.305259943008423, + "learning_rate": 2.9800923376801295e-06, + "loss": 1.1234, + "step": 12046 + }, + { + "epoch": 0.7553451627061257, + "grad_norm": 3.4214930534362793, + "learning_rate": 2.978646200313946e-06, + "loss": 1.0649, + "step": 12047 + }, + { + "epoch": 0.755407862561916, + "grad_norm": 3.2963879108428955, + "learning_rate": 2.9772003525087e-06, + "loss": 1.1909, + "step": 12048 + }, + { + "epoch": 0.7554705624177065, + "grad_norm": 3.145347833633423, + "learning_rate": 2.975754794324015e-06, + "loss": 1.1701, + "step": 12049 + }, + { + "epoch": 0.7555332622734968, + "grad_norm": 3.8324785232543945, + "learning_rate": 2.974309525819512e-06, + "loss": 0.7297, + "step": 12050 + }, + { + "epoch": 0.7555959621292871, + "grad_norm": 3.188154935836792, + "learning_rate": 2.97286454705479e-06, + "loss": 1.1022, + "step": 12051 + }, + { + "epoch": 0.7556586619850775, + "grad_norm": 3.075145959854126, + "learning_rate": 2.9714198580894427e-06, + "loss": 1.0693, + "step": 12052 + }, + { + "epoch": 0.7557213618408678, + "grad_norm": 3.3082187175750732, + "learning_rate": 2.969975458983043e-06, + "loss": 1.1437, + "step": 12053 + }, + { + "epoch": 0.7557840616966581, + "grad_norm": 3.6980881690979004, + "learning_rate": 2.9685313497951663e-06, + "loss": 1.1616, + "step": 12054 + }, + { + "epoch": 0.7558467615524485, + "grad_norm": 3.556854486465454, + "learning_rate": 2.967087530585363e-06, + "loss": 1.3128, + "step": 12055 + }, + { + "epoch": 0.7559094614082388, + "grad_norm": 3.057360887527466, + "learning_rate": 2.9656440014131737e-06, + "loss": 1.1428, + "step": 12056 + }, + { + "epoch": 0.7559721612640291, + "grad_norm": 3.5304691791534424, + "learning_rate": 2.9642007623381376e-06, + "loss": 1.1008, + "step": 12057 + }, + { + "epoch": 0.7560348611198194, + "grad_norm": 3.6297969818115234, + "learning_rate": 2.9627578134197675e-06, + "loss": 1.0002, + "step": 12058 + }, + { + "epoch": 0.7560975609756098, + "grad_norm": 3.70430326461792, + "learning_rate": 2.9613151547175746e-06, + "loss": 1.0253, + "step": 12059 + }, + { + "epoch": 0.7561602608314001, + "grad_norm": 3.251467227935791, + "learning_rate": 2.959872786291047e-06, + "loss": 1.1778, + "step": 12060 + }, + { + "epoch": 0.7562229606871904, + "grad_norm": 3.8851888179779053, + "learning_rate": 2.9584307081996778e-06, + "loss": 1.0419, + "step": 12061 + }, + { + "epoch": 0.7562856605429807, + "grad_norm": 3.284511089324951, + "learning_rate": 2.9569889205029324e-06, + "loss": 1.1414, + "step": 12062 + }, + { + "epoch": 0.7563483603987711, + "grad_norm": 3.1517326831817627, + "learning_rate": 2.955547423260272e-06, + "loss": 1.0408, + "step": 12063 + }, + { + "epoch": 0.7564110602545614, + "grad_norm": 3.4536075592041016, + "learning_rate": 2.954106216531141e-06, + "loss": 1.1381, + "step": 12064 + }, + { + "epoch": 0.7564737601103517, + "grad_norm": 3.2668511867523193, + "learning_rate": 2.9526653003749794e-06, + "loss": 0.9731, + "step": 12065 + }, + { + "epoch": 0.756536459966142, + "grad_norm": 3.332028865814209, + "learning_rate": 2.9512246748512085e-06, + "loss": 1.0082, + "step": 12066 + }, + { + "epoch": 0.7565991598219324, + "grad_norm": 3.2638139724731445, + "learning_rate": 2.9497843400192383e-06, + "loss": 1.166, + "step": 12067 + }, + { + "epoch": 0.7566618596777227, + "grad_norm": 3.2606749534606934, + "learning_rate": 2.948344295938471e-06, + "loss": 1.0317, + "step": 12068 + }, + { + "epoch": 0.756724559533513, + "grad_norm": 3.4598758220672607, + "learning_rate": 2.946904542668294e-06, + "loss": 1.135, + "step": 12069 + }, + { + "epoch": 0.7567872593893034, + "grad_norm": 3.122769832611084, + "learning_rate": 2.9454650802680817e-06, + "loss": 1.1412, + "step": 12070 + }, + { + "epoch": 0.7568499592450937, + "grad_norm": 3.6286535263061523, + "learning_rate": 2.9440259087971944e-06, + "loss": 0.9031, + "step": 12071 + }, + { + "epoch": 0.7569126591008841, + "grad_norm": 3.261204957962036, + "learning_rate": 2.9425870283149895e-06, + "loss": 1.0042, + "step": 12072 + }, + { + "epoch": 0.7569753589566744, + "grad_norm": 3.1121225357055664, + "learning_rate": 2.941148438880803e-06, + "loss": 1.0318, + "step": 12073 + }, + { + "epoch": 0.7570380588124648, + "grad_norm": 3.1565463542938232, + "learning_rate": 2.9397101405539654e-06, + "loss": 1.1346, + "step": 12074 + }, + { + "epoch": 0.7571007586682551, + "grad_norm": 3.1576523780822754, + "learning_rate": 2.9382721333937847e-06, + "loss": 1.0596, + "step": 12075 + }, + { + "epoch": 0.7571634585240454, + "grad_norm": 3.3719379901885986, + "learning_rate": 2.936834417459573e-06, + "loss": 1.0952, + "step": 12076 + }, + { + "epoch": 0.7572261583798358, + "grad_norm": 3.381315231323242, + "learning_rate": 2.93539699281062e-06, + "loss": 1.093, + "step": 12077 + }, + { + "epoch": 0.7572888582356261, + "grad_norm": 3.542200803756714, + "learning_rate": 2.933959859506198e-06, + "loss": 0.9927, + "step": 12078 + }, + { + "epoch": 0.7573515580914164, + "grad_norm": 3.3435745239257812, + "learning_rate": 2.932523017605585e-06, + "loss": 1.1292, + "step": 12079 + }, + { + "epoch": 0.7574142579472067, + "grad_norm": 3.4584836959838867, + "learning_rate": 2.9310864671680305e-06, + "loss": 1.139, + "step": 12080 + }, + { + "epoch": 0.7574769578029971, + "grad_norm": 2.9665186405181885, + "learning_rate": 2.929650208252779e-06, + "loss": 1.052, + "step": 12081 + }, + { + "epoch": 0.7575396576587874, + "grad_norm": 3.4086318016052246, + "learning_rate": 2.9282142409190585e-06, + "loss": 1.1064, + "step": 12082 + }, + { + "epoch": 0.7576023575145777, + "grad_norm": 3.0008602142333984, + "learning_rate": 2.926778565226095e-06, + "loss": 1.1884, + "step": 12083 + }, + { + "epoch": 0.757665057370368, + "grad_norm": 3.4167439937591553, + "learning_rate": 2.9253431812330925e-06, + "loss": 1.0046, + "step": 12084 + }, + { + "epoch": 0.7577277572261584, + "grad_norm": 3.5619754791259766, + "learning_rate": 2.9239080889992454e-06, + "loss": 1.0523, + "step": 12085 + }, + { + "epoch": 0.7577904570819487, + "grad_norm": 3.5890896320343018, + "learning_rate": 2.922473288583735e-06, + "loss": 1.0511, + "step": 12086 + }, + { + "epoch": 0.757853156937739, + "grad_norm": 3.176586866378784, + "learning_rate": 2.9210387800457386e-06, + "loss": 1.1301, + "step": 12087 + }, + { + "epoch": 0.7579158567935294, + "grad_norm": 3.2469515800476074, + "learning_rate": 2.9196045634444116e-06, + "loss": 1.0078, + "step": 12088 + }, + { + "epoch": 0.7579785566493197, + "grad_norm": 3.3368539810180664, + "learning_rate": 2.9181706388388988e-06, + "loss": 1.2155, + "step": 12089 + }, + { + "epoch": 0.75804125650511, + "grad_norm": 3.19647216796875, + "learning_rate": 2.9167370062883403e-06, + "loss": 1.0288, + "step": 12090 + }, + { + "epoch": 0.7581039563609003, + "grad_norm": 3.407094717025757, + "learning_rate": 2.9153036658518584e-06, + "loss": 1.0156, + "step": 12091 + }, + { + "epoch": 0.7581666562166907, + "grad_norm": 3.5310487747192383, + "learning_rate": 2.9138706175885623e-06, + "loss": 1.0652, + "step": 12092 + }, + { + "epoch": 0.758229356072481, + "grad_norm": 3.5037624835968018, + "learning_rate": 2.912437861557548e-06, + "loss": 1.045, + "step": 12093 + }, + { + "epoch": 0.7582920559282713, + "grad_norm": 3.1167702674865723, + "learning_rate": 2.9110053978179087e-06, + "loss": 1.0939, + "step": 12094 + }, + { + "epoch": 0.7583547557840618, + "grad_norm": 3.66879940032959, + "learning_rate": 2.909573226428717e-06, + "loss": 1.1708, + "step": 12095 + }, + { + "epoch": 0.7584174556398521, + "grad_norm": 3.217806577682495, + "learning_rate": 2.9081413474490337e-06, + "loss": 1.1332, + "step": 12096 + }, + { + "epoch": 0.7584801554956424, + "grad_norm": 3.373356580734253, + "learning_rate": 2.9067097609379093e-06, + "loss": 1.12, + "step": 12097 + }, + { + "epoch": 0.7585428553514327, + "grad_norm": 3.3760123252868652, + "learning_rate": 2.9052784669543867e-06, + "loss": 1.1381, + "step": 12098 + }, + { + "epoch": 0.7586055552072231, + "grad_norm": 2.9782302379608154, + "learning_rate": 2.90384746555749e-06, + "loss": 1.3397, + "step": 12099 + }, + { + "epoch": 0.7586682550630134, + "grad_norm": 3.865217447280884, + "learning_rate": 2.902416756806231e-06, + "loss": 0.9495, + "step": 12100 + }, + { + "epoch": 0.7587309549188037, + "grad_norm": 3.396362543106079, + "learning_rate": 2.9009863407596173e-06, + "loss": 1.2989, + "step": 12101 + }, + { + "epoch": 0.758793654774594, + "grad_norm": 3.418381929397583, + "learning_rate": 2.8995562174766378e-06, + "loss": 1.0423, + "step": 12102 + }, + { + "epoch": 0.7588563546303844, + "grad_norm": 3.601886034011841, + "learning_rate": 2.898126387016269e-06, + "loss": 1.0669, + "step": 12103 + }, + { + "epoch": 0.7589190544861747, + "grad_norm": 3.178767442703247, + "learning_rate": 2.896696849437476e-06, + "loss": 0.9934, + "step": 12104 + }, + { + "epoch": 0.758981754341965, + "grad_norm": 3.5330607891082764, + "learning_rate": 2.895267604799217e-06, + "loss": 1.2318, + "step": 12105 + }, + { + "epoch": 0.7590444541977553, + "grad_norm": 3.4877567291259766, + "learning_rate": 2.8938386531604336e-06, + "loss": 1.0732, + "step": 12106 + }, + { + "epoch": 0.7591071540535457, + "grad_norm": 3.501284122467041, + "learning_rate": 2.8924099945800533e-06, + "loss": 1.0189, + "step": 12107 + }, + { + "epoch": 0.759169853909336, + "grad_norm": 3.3387451171875, + "learning_rate": 2.8909816291169923e-06, + "loss": 0.9638, + "step": 12108 + }, + { + "epoch": 0.7592325537651263, + "grad_norm": 3.7921018600463867, + "learning_rate": 2.8895535568301638e-06, + "loss": 1.0039, + "step": 12109 + }, + { + "epoch": 0.7592952536209167, + "grad_norm": 2.9627652168273926, + "learning_rate": 2.8881257777784556e-06, + "loss": 1.2381, + "step": 12110 + }, + { + "epoch": 0.759357953476707, + "grad_norm": 3.1095359325408936, + "learning_rate": 2.886698292020749e-06, + "loss": 1.1519, + "step": 12111 + }, + { + "epoch": 0.7594206533324973, + "grad_norm": 3.326406955718994, + "learning_rate": 2.885271099615917e-06, + "loss": 1.1893, + "step": 12112 + }, + { + "epoch": 0.7594833531882876, + "grad_norm": 3.4326207637786865, + "learning_rate": 2.883844200622816e-06, + "loss": 1.1977, + "step": 12113 + }, + { + "epoch": 0.759546053044078, + "grad_norm": 3.1063196659088135, + "learning_rate": 2.8824175951002918e-06, + "loss": 0.9658, + "step": 12114 + }, + { + "epoch": 0.7596087528998683, + "grad_norm": 3.7124826908111572, + "learning_rate": 2.880991283107174e-06, + "loss": 1.0935, + "step": 12115 + }, + { + "epoch": 0.7596714527556586, + "grad_norm": 3.454172134399414, + "learning_rate": 2.8795652647022887e-06, + "loss": 0.9631, + "step": 12116 + }, + { + "epoch": 0.7597341526114489, + "grad_norm": 3.952608108520508, + "learning_rate": 2.8781395399444434e-06, + "loss": 1.2738, + "step": 12117 + }, + { + "epoch": 0.7597968524672394, + "grad_norm": 3.997450590133667, + "learning_rate": 2.8767141088924312e-06, + "loss": 1.108, + "step": 12118 + }, + { + "epoch": 0.7598595523230297, + "grad_norm": 3.6357083320617676, + "learning_rate": 2.8752889716050426e-06, + "loss": 1.0836, + "step": 12119 + }, + { + "epoch": 0.75992225217882, + "grad_norm": 3.3283724784851074, + "learning_rate": 2.8738641281410484e-06, + "loss": 1.131, + "step": 12120 + }, + { + "epoch": 0.7599849520346104, + "grad_norm": 3.58406400680542, + "learning_rate": 2.8724395785592086e-06, + "loss": 1.147, + "step": 12121 + }, + { + "epoch": 0.7600476518904007, + "grad_norm": 3.138441324234009, + "learning_rate": 2.8710153229182693e-06, + "loss": 1.0532, + "step": 12122 + }, + { + "epoch": 0.760110351746191, + "grad_norm": 3.5564427375793457, + "learning_rate": 2.869591361276972e-06, + "loss": 1.1537, + "step": 12123 + }, + { + "epoch": 0.7601730516019813, + "grad_norm": 3.265965223312378, + "learning_rate": 2.8681676936940397e-06, + "loss": 1.0906, + "step": 12124 + }, + { + "epoch": 0.7602357514577717, + "grad_norm": 3.849010467529297, + "learning_rate": 2.866744320228182e-06, + "loss": 1.0297, + "step": 12125 + }, + { + "epoch": 0.760298451313562, + "grad_norm": 3.380659818649292, + "learning_rate": 2.865321240938096e-06, + "loss": 1.118, + "step": 12126 + }, + { + "epoch": 0.7603611511693523, + "grad_norm": 3.2935478687286377, + "learning_rate": 2.8638984558824777e-06, + "loss": 1.0821, + "step": 12127 + }, + { + "epoch": 0.7604238510251426, + "grad_norm": 3.4773991107940674, + "learning_rate": 2.8624759651199997e-06, + "loss": 1.1092, + "step": 12128 + }, + { + "epoch": 0.760486550880933, + "grad_norm": 3.1375303268432617, + "learning_rate": 2.86105376870932e-06, + "loss": 1.0691, + "step": 12129 + }, + { + "epoch": 0.7605492507367233, + "grad_norm": 3.3908588886260986, + "learning_rate": 2.859631866709098e-06, + "loss": 1.2028, + "step": 12130 + }, + { + "epoch": 0.7606119505925136, + "grad_norm": 3.369149923324585, + "learning_rate": 2.85821025917797e-06, + "loss": 1.1862, + "step": 12131 + }, + { + "epoch": 0.760674650448304, + "grad_norm": 3.373969793319702, + "learning_rate": 2.8567889461745623e-06, + "loss": 0.9066, + "step": 12132 + }, + { + "epoch": 0.7607373503040943, + "grad_norm": 3.3622169494628906, + "learning_rate": 2.8553679277574885e-06, + "loss": 0.9877, + "step": 12133 + }, + { + "epoch": 0.7608000501598846, + "grad_norm": 3.369293689727783, + "learning_rate": 2.8539472039853557e-06, + "loss": 0.9361, + "step": 12134 + }, + { + "epoch": 0.7608627500156749, + "grad_norm": 3.198906660079956, + "learning_rate": 2.852526774916752e-06, + "loss": 1.212, + "step": 12135 + }, + { + "epoch": 0.7609254498714653, + "grad_norm": 3.129807233810425, + "learning_rate": 2.851106640610256e-06, + "loss": 1.2133, + "step": 12136 + }, + { + "epoch": 0.7609881497272556, + "grad_norm": 3.0673487186431885, + "learning_rate": 2.8496868011244316e-06, + "loss": 1.1161, + "step": 12137 + }, + { + "epoch": 0.7610508495830459, + "grad_norm": 2.9969043731689453, + "learning_rate": 2.8482672565178392e-06, + "loss": 1.0626, + "step": 12138 + }, + { + "epoch": 0.7611135494388362, + "grad_norm": 3.038506031036377, + "learning_rate": 2.846848006849018e-06, + "loss": 1.1421, + "step": 12139 + }, + { + "epoch": 0.7611762492946266, + "grad_norm": 3.4211723804473877, + "learning_rate": 2.8454290521764927e-06, + "loss": 0.8871, + "step": 12140 + }, + { + "epoch": 0.7612389491504169, + "grad_norm": 3.559622287750244, + "learning_rate": 2.8440103925587904e-06, + "loss": 1.0781, + "step": 12141 + }, + { + "epoch": 0.7613016490062073, + "grad_norm": 3.338824510574341, + "learning_rate": 2.8425920280544104e-06, + "loss": 1.0601, + "step": 12142 + }, + { + "epoch": 0.7613643488619977, + "grad_norm": 3.3758203983306885, + "learning_rate": 2.841173958721849e-06, + "loss": 0.9937, + "step": 12143 + }, + { + "epoch": 0.761427048717788, + "grad_norm": 3.5304689407348633, + "learning_rate": 2.839756184619582e-06, + "loss": 1.1664, + "step": 12144 + }, + { + "epoch": 0.7614897485735783, + "grad_norm": 3.1654751300811768, + "learning_rate": 2.8383387058060875e-06, + "loss": 0.9713, + "step": 12145 + }, + { + "epoch": 0.7615524484293686, + "grad_norm": 3.9517388343811035, + "learning_rate": 2.8369215223398163e-06, + "loss": 1.1227, + "step": 12146 + }, + { + "epoch": 0.761615148285159, + "grad_norm": 3.3039209842681885, + "learning_rate": 2.8355046342792137e-06, + "loss": 0.9377, + "step": 12147 + }, + { + "epoch": 0.7616778481409493, + "grad_norm": 3.0457701683044434, + "learning_rate": 2.834088041682711e-06, + "loss": 1.0183, + "step": 12148 + }, + { + "epoch": 0.7617405479967396, + "grad_norm": 3.2027721405029297, + "learning_rate": 2.8326717446087325e-06, + "loss": 0.9763, + "step": 12149 + }, + { + "epoch": 0.76180324785253, + "grad_norm": 3.2236902713775635, + "learning_rate": 2.831255743115685e-06, + "loss": 1.0634, + "step": 12150 + }, + { + "epoch": 0.7618659477083203, + "grad_norm": 3.0450918674468994, + "learning_rate": 2.8298400372619605e-06, + "loss": 1.2591, + "step": 12151 + }, + { + "epoch": 0.7619286475641106, + "grad_norm": 3.172231435775757, + "learning_rate": 2.828424627105949e-06, + "loss": 1.2588, + "step": 12152 + }, + { + "epoch": 0.7619913474199009, + "grad_norm": 3.725785970687866, + "learning_rate": 2.827009512706018e-06, + "loss": 0.8644, + "step": 12153 + }, + { + "epoch": 0.7620540472756913, + "grad_norm": 3.4161696434020996, + "learning_rate": 2.825594694120529e-06, + "loss": 1.1409, + "step": 12154 + }, + { + "epoch": 0.7621167471314816, + "grad_norm": 3.4368550777435303, + "learning_rate": 2.8241801714078254e-06, + "loss": 1.1297, + "step": 12155 + }, + { + "epoch": 0.7621794469872719, + "grad_norm": 3.78859806060791, + "learning_rate": 2.822765944626248e-06, + "loss": 1.0145, + "step": 12156 + }, + { + "epoch": 0.7622421468430622, + "grad_norm": 3.5121400356292725, + "learning_rate": 2.8213520138341154e-06, + "loss": 1.1103, + "step": 12157 + }, + { + "epoch": 0.7623048466988526, + "grad_norm": 3.9396770000457764, + "learning_rate": 2.8199383790897405e-06, + "loss": 1.1671, + "step": 12158 + }, + { + "epoch": 0.7623675465546429, + "grad_norm": 3.3828439712524414, + "learning_rate": 2.8185250404514164e-06, + "loss": 1.123, + "step": 12159 + }, + { + "epoch": 0.7624302464104332, + "grad_norm": 3.4360885620117188, + "learning_rate": 2.8171119979774365e-06, + "loss": 1.1782, + "step": 12160 + }, + { + "epoch": 0.7624929462662235, + "grad_norm": 3.3542699813842773, + "learning_rate": 2.815699251726072e-06, + "loss": 1.0828, + "step": 12161 + }, + { + "epoch": 0.7625556461220139, + "grad_norm": 3.6525890827178955, + "learning_rate": 2.8142868017555803e-06, + "loss": 1.1071, + "step": 12162 + }, + { + "epoch": 0.7626183459778042, + "grad_norm": 3.509646415710449, + "learning_rate": 2.812874648124219e-06, + "loss": 0.9586, + "step": 12163 + }, + { + "epoch": 0.7626810458335945, + "grad_norm": 3.6270151138305664, + "learning_rate": 2.8114627908902194e-06, + "loss": 1.1392, + "step": 12164 + }, + { + "epoch": 0.762743745689385, + "grad_norm": 3.4023733139038086, + "learning_rate": 2.8100512301118087e-06, + "loss": 1.1179, + "step": 12165 + }, + { + "epoch": 0.7628064455451753, + "grad_norm": 3.6094002723693848, + "learning_rate": 2.8086399658471965e-06, + "loss": 0.9196, + "step": 12166 + }, + { + "epoch": 0.7628691454009656, + "grad_norm": 3.3999924659729004, + "learning_rate": 2.80722899815459e-06, + "loss": 1.1021, + "step": 12167 + }, + { + "epoch": 0.762931845256756, + "grad_norm": 3.3847270011901855, + "learning_rate": 2.8058183270921724e-06, + "loss": 1.2512, + "step": 12168 + }, + { + "epoch": 0.7629945451125463, + "grad_norm": 3.1796417236328125, + "learning_rate": 2.804407952718119e-06, + "loss": 1.158, + "step": 12169 + }, + { + "epoch": 0.7630572449683366, + "grad_norm": 3.5086915493011475, + "learning_rate": 2.8029978750905983e-06, + "loss": 1.1699, + "step": 12170 + }, + { + "epoch": 0.7631199448241269, + "grad_norm": 2.9611477851867676, + "learning_rate": 2.8015880942677596e-06, + "loss": 1.1052, + "step": 12171 + }, + { + "epoch": 0.7631826446799173, + "grad_norm": 3.2617454528808594, + "learning_rate": 2.8001786103077423e-06, + "loss": 1.0458, + "step": 12172 + }, + { + "epoch": 0.7632453445357076, + "grad_norm": 3.56909441947937, + "learning_rate": 2.798769423268671e-06, + "loss": 0.9506, + "step": 12173 + }, + { + "epoch": 0.7633080443914979, + "grad_norm": 3.0267579555511475, + "learning_rate": 2.797360533208666e-06, + "loss": 1.026, + "step": 12174 + }, + { + "epoch": 0.7633707442472882, + "grad_norm": 3.1331310272216797, + "learning_rate": 2.795951940185827e-06, + "loss": 1.2525, + "step": 12175 + }, + { + "epoch": 0.7634334441030786, + "grad_norm": 3.2109878063201904, + "learning_rate": 2.7945436442582443e-06, + "loss": 1.0522, + "step": 12176 + }, + { + "epoch": 0.7634961439588689, + "grad_norm": 3.762552499771118, + "learning_rate": 2.793135645483993e-06, + "loss": 1.0764, + "step": 12177 + }, + { + "epoch": 0.7635588438146592, + "grad_norm": 3.1134698390960693, + "learning_rate": 2.7917279439211463e-06, + "loss": 1.069, + "step": 12178 + }, + { + "epoch": 0.7636215436704495, + "grad_norm": 3.5952115058898926, + "learning_rate": 2.7903205396277546e-06, + "loss": 1.1344, + "step": 12179 + }, + { + "epoch": 0.7636842435262399, + "grad_norm": 3.0016160011291504, + "learning_rate": 2.788913432661854e-06, + "loss": 1.1683, + "step": 12180 + }, + { + "epoch": 0.7637469433820302, + "grad_norm": 3.09151029586792, + "learning_rate": 2.7875066230814816e-06, + "loss": 1.1102, + "step": 12181 + }, + { + "epoch": 0.7638096432378205, + "grad_norm": 3.5022225379943848, + "learning_rate": 2.786100110944652e-06, + "loss": 1.0766, + "step": 12182 + }, + { + "epoch": 0.7638723430936109, + "grad_norm": 3.287440299987793, + "learning_rate": 2.7846938963093683e-06, + "loss": 1.0416, + "step": 12183 + }, + { + "epoch": 0.7639350429494012, + "grad_norm": 3.4411585330963135, + "learning_rate": 2.783287979233621e-06, + "loss": 1.2326, + "step": 12184 + }, + { + "epoch": 0.7639977428051915, + "grad_norm": 3.0693576335906982, + "learning_rate": 2.781882359775395e-06, + "loss": 1.0964, + "step": 12185 + }, + { + "epoch": 0.7640604426609818, + "grad_norm": 3.274576425552368, + "learning_rate": 2.780477037992656e-06, + "loss": 0.991, + "step": 12186 + }, + { + "epoch": 0.7641231425167722, + "grad_norm": 3.018392324447632, + "learning_rate": 2.779072013943359e-06, + "loss": 1.1026, + "step": 12187 + }, + { + "epoch": 0.7641858423725626, + "grad_norm": 3.2551109790802, + "learning_rate": 2.777667287685445e-06, + "loss": 1.2253, + "step": 12188 + }, + { + "epoch": 0.7642485422283529, + "grad_norm": 2.9865589141845703, + "learning_rate": 2.7762628592768494e-06, + "loss": 1.1755, + "step": 12189 + }, + { + "epoch": 0.7643112420841432, + "grad_norm": 3.4672927856445312, + "learning_rate": 2.7748587287754893e-06, + "loss": 1.1601, + "step": 12190 + }, + { + "epoch": 0.7643739419399336, + "grad_norm": 3.362455368041992, + "learning_rate": 2.7734548962392682e-06, + "loss": 1.0875, + "step": 12191 + }, + { + "epoch": 0.7644366417957239, + "grad_norm": 3.318394660949707, + "learning_rate": 2.7720513617260857e-06, + "loss": 1.0199, + "step": 12192 + }, + { + "epoch": 0.7644993416515142, + "grad_norm": 3.824220895767212, + "learning_rate": 2.77064812529382e-06, + "loss": 1.0685, + "step": 12193 + }, + { + "epoch": 0.7645620415073046, + "grad_norm": 3.405585765838623, + "learning_rate": 2.7692451870003424e-06, + "loss": 1.0622, + "step": 12194 + }, + { + "epoch": 0.7646247413630949, + "grad_norm": 3.7013158798217773, + "learning_rate": 2.7678425469035054e-06, + "loss": 1.1233, + "step": 12195 + }, + { + "epoch": 0.7646874412188852, + "grad_norm": 3.5551095008850098, + "learning_rate": 2.7664402050611604e-06, + "loss": 1.0435, + "step": 12196 + }, + { + "epoch": 0.7647501410746755, + "grad_norm": 3.5482382774353027, + "learning_rate": 2.7650381615311384e-06, + "loss": 0.9319, + "step": 12197 + }, + { + "epoch": 0.7648128409304659, + "grad_norm": 3.2312302589416504, + "learning_rate": 2.763636416371257e-06, + "loss": 1.1081, + "step": 12198 + }, + { + "epoch": 0.7648755407862562, + "grad_norm": 3.1825852394104004, + "learning_rate": 2.7622349696393236e-06, + "loss": 1.1706, + "step": 12199 + }, + { + "epoch": 0.7649382406420465, + "grad_norm": 4.115033149719238, + "learning_rate": 2.7608338213931394e-06, + "loss": 1.1175, + "step": 12200 + }, + { + "epoch": 0.7650009404978368, + "grad_norm": 3.196488380432129, + "learning_rate": 2.7594329716904844e-06, + "loss": 1.1075, + "step": 12201 + }, + { + "epoch": 0.7650636403536272, + "grad_norm": 3.12990403175354, + "learning_rate": 2.758032420589126e-06, + "loss": 1.1799, + "step": 12202 + }, + { + "epoch": 0.7651263402094175, + "grad_norm": 3.6380629539489746, + "learning_rate": 2.7566321681468313e-06, + "loss": 0.992, + "step": 12203 + }, + { + "epoch": 0.7651890400652078, + "grad_norm": 3.239530086517334, + "learning_rate": 2.7552322144213405e-06, + "loss": 1.1187, + "step": 12204 + }, + { + "epoch": 0.7652517399209982, + "grad_norm": 3.331346273422241, + "learning_rate": 2.7538325594703907e-06, + "loss": 1.1965, + "step": 12205 + }, + { + "epoch": 0.7653144397767885, + "grad_norm": 3.1765079498291016, + "learning_rate": 2.7524332033517e-06, + "loss": 1.1268, + "step": 12206 + }, + { + "epoch": 0.7653771396325788, + "grad_norm": 3.187351703643799, + "learning_rate": 2.751034146122983e-06, + "loss": 1.0215, + "step": 12207 + }, + { + "epoch": 0.7654398394883691, + "grad_norm": 3.3743889331817627, + "learning_rate": 2.7496353878419334e-06, + "loss": 0.9927, + "step": 12208 + }, + { + "epoch": 0.7655025393441595, + "grad_norm": 3.3904366493225098, + "learning_rate": 2.748236928566238e-06, + "loss": 1.1268, + "step": 12209 + }, + { + "epoch": 0.7655652391999498, + "grad_norm": 3.1912660598754883, + "learning_rate": 2.7468387683535657e-06, + "loss": 0.9745, + "step": 12210 + }, + { + "epoch": 0.7656279390557402, + "grad_norm": 3.6852800846099854, + "learning_rate": 2.745440907261582e-06, + "loss": 1.1895, + "step": 12211 + }, + { + "epoch": 0.7656906389115306, + "grad_norm": 3.2604024410247803, + "learning_rate": 2.7440433453479322e-06, + "loss": 1.0231, + "step": 12212 + }, + { + "epoch": 0.7657533387673209, + "grad_norm": 3.802734851837158, + "learning_rate": 2.742646082670248e-06, + "loss": 1.105, + "step": 12213 + }, + { + "epoch": 0.7658160386231112, + "grad_norm": 3.6218860149383545, + "learning_rate": 2.74124911928616e-06, + "loss": 1.2395, + "step": 12214 + }, + { + "epoch": 0.7658787384789015, + "grad_norm": 3.302478075027466, + "learning_rate": 2.7398524552532745e-06, + "loss": 1.01, + "step": 12215 + }, + { + "epoch": 0.7659414383346919, + "grad_norm": 3.0680387020111084, + "learning_rate": 2.738456090629191e-06, + "loss": 1.0617, + "step": 12216 + }, + { + "epoch": 0.7660041381904822, + "grad_norm": 3.2441179752349854, + "learning_rate": 2.7370600254714943e-06, + "loss": 0.9767, + "step": 12217 + }, + { + "epoch": 0.7660668380462725, + "grad_norm": 3.1863582134246826, + "learning_rate": 2.7356642598377604e-06, + "loss": 1.2017, + "step": 12218 + }, + { + "epoch": 0.7661295379020628, + "grad_norm": 3.2951231002807617, + "learning_rate": 2.7342687937855494e-06, + "loss": 0.873, + "step": 12219 + }, + { + "epoch": 0.7661922377578532, + "grad_norm": 3.392439365386963, + "learning_rate": 2.732873627372409e-06, + "loss": 0.8763, + "step": 12220 + }, + { + "epoch": 0.7662549376136435, + "grad_norm": 3.236065626144409, + "learning_rate": 2.73147876065588e-06, + "loss": 1.2223, + "step": 12221 + }, + { + "epoch": 0.7663176374694338, + "grad_norm": 3.6682229042053223, + "learning_rate": 2.730084193693484e-06, + "loss": 1.0981, + "step": 12222 + }, + { + "epoch": 0.7663803373252241, + "grad_norm": 3.60750150680542, + "learning_rate": 2.7286899265427337e-06, + "loss": 1.1323, + "step": 12223 + }, + { + "epoch": 0.7664430371810145, + "grad_norm": 3.538682460784912, + "learning_rate": 2.727295959261125e-06, + "loss": 1.1186, + "step": 12224 + }, + { + "epoch": 0.7665057370368048, + "grad_norm": 3.395911455154419, + "learning_rate": 2.7259022919061516e-06, + "loss": 0.9157, + "step": 12225 + }, + { + "epoch": 0.7665684368925951, + "grad_norm": 3.2934067249298096, + "learning_rate": 2.7245089245352864e-06, + "loss": 1.0553, + "step": 12226 + }, + { + "epoch": 0.7666311367483855, + "grad_norm": 3.1830739974975586, + "learning_rate": 2.723115857205989e-06, + "loss": 1.1898, + "step": 12227 + }, + { + "epoch": 0.7666938366041758, + "grad_norm": 3.387375831604004, + "learning_rate": 2.7217230899757095e-06, + "loss": 1.0471, + "step": 12228 + }, + { + "epoch": 0.7667565364599661, + "grad_norm": 3.9306492805480957, + "learning_rate": 2.720330622901891e-06, + "loss": 1.0697, + "step": 12229 + }, + { + "epoch": 0.7668192363157564, + "grad_norm": 3.5967118740081787, + "learning_rate": 2.7189384560419553e-06, + "loss": 1.0549, + "step": 12230 + }, + { + "epoch": 0.7668819361715468, + "grad_norm": 3.3770394325256348, + "learning_rate": 2.7175465894533114e-06, + "loss": 0.9131, + "step": 12231 + }, + { + "epoch": 0.7669446360273371, + "grad_norm": 2.8158090114593506, + "learning_rate": 2.7161550231933687e-06, + "loss": 1.2207, + "step": 12232 + }, + { + "epoch": 0.7670073358831274, + "grad_norm": 2.979921817779541, + "learning_rate": 2.7147637573195105e-06, + "loss": 1.1342, + "step": 12233 + }, + { + "epoch": 0.7670700357389179, + "grad_norm": 3.4235920906066895, + "learning_rate": 2.713372791889113e-06, + "loss": 1.1173, + "step": 12234 + }, + { + "epoch": 0.7671327355947082, + "grad_norm": 3.225320339202881, + "learning_rate": 2.711982126959537e-06, + "loss": 1.1557, + "step": 12235 + }, + { + "epoch": 0.7671954354504985, + "grad_norm": 3.599571704864502, + "learning_rate": 2.7105917625881385e-06, + "loss": 1.1397, + "step": 12236 + }, + { + "epoch": 0.7672581353062888, + "grad_norm": 3.4886817932128906, + "learning_rate": 2.7092016988322546e-06, + "loss": 0.9525, + "step": 12237 + }, + { + "epoch": 0.7673208351620792, + "grad_norm": 3.245490312576294, + "learning_rate": 2.7078119357492116e-06, + "loss": 1.0022, + "step": 12238 + }, + { + "epoch": 0.7673835350178695, + "grad_norm": 3.551117420196533, + "learning_rate": 2.7064224733963197e-06, + "loss": 0.9787, + "step": 12239 + }, + { + "epoch": 0.7674462348736598, + "grad_norm": 3.525808095932007, + "learning_rate": 2.7050333118308857e-06, + "loss": 1.083, + "step": 12240 + }, + { + "epoch": 0.7675089347294501, + "grad_norm": 3.3566348552703857, + "learning_rate": 2.7036444511101967e-06, + "loss": 1.1565, + "step": 12241 + }, + { + "epoch": 0.7675716345852405, + "grad_norm": 3.2628462314605713, + "learning_rate": 2.7022558912915264e-06, + "loss": 1.1581, + "step": 12242 + }, + { + "epoch": 0.7676343344410308, + "grad_norm": 3.4229555130004883, + "learning_rate": 2.700867632432145e-06, + "loss": 1.026, + "step": 12243 + }, + { + "epoch": 0.7676970342968211, + "grad_norm": 3.965223550796509, + "learning_rate": 2.6994796745893e-06, + "loss": 1.1029, + "step": 12244 + }, + { + "epoch": 0.7677597341526115, + "grad_norm": 3.7161383628845215, + "learning_rate": 2.6980920178202343e-06, + "loss": 1.0139, + "step": 12245 + }, + { + "epoch": 0.7678224340084018, + "grad_norm": 3.1546366214752197, + "learning_rate": 2.696704662182168e-06, + "loss": 1.2109, + "step": 12246 + }, + { + "epoch": 0.7678851338641921, + "grad_norm": 2.991692066192627, + "learning_rate": 2.695317607732324e-06, + "loss": 1.0948, + "step": 12247 + }, + { + "epoch": 0.7679478337199824, + "grad_norm": 3.55993914604187, + "learning_rate": 2.6939308545279006e-06, + "loss": 1.1863, + "step": 12248 + }, + { + "epoch": 0.7680105335757728, + "grad_norm": 3.0114669799804688, + "learning_rate": 2.6925444026260873e-06, + "loss": 1.1501, + "step": 12249 + }, + { + "epoch": 0.7680732334315631, + "grad_norm": 3.4972145557403564, + "learning_rate": 2.6911582520840595e-06, + "loss": 1.1065, + "step": 12250 + }, + { + "epoch": 0.7681359332873534, + "grad_norm": 3.69509220123291, + "learning_rate": 2.689772402958988e-06, + "loss": 1.1038, + "step": 12251 + }, + { + "epoch": 0.7681986331431437, + "grad_norm": 3.5462725162506104, + "learning_rate": 2.6883868553080217e-06, + "loss": 1.0673, + "step": 12252 + }, + { + "epoch": 0.7682613329989341, + "grad_norm": 3.3086984157562256, + "learning_rate": 2.6870016091882968e-06, + "loss": 1.0139, + "step": 12253 + }, + { + "epoch": 0.7683240328547244, + "grad_norm": 3.1507327556610107, + "learning_rate": 2.685616664656949e-06, + "loss": 1.0266, + "step": 12254 + }, + { + "epoch": 0.7683867327105147, + "grad_norm": 3.243816375732422, + "learning_rate": 2.684232021771088e-06, + "loss": 1.0568, + "step": 12255 + }, + { + "epoch": 0.768449432566305, + "grad_norm": 3.53019118309021, + "learning_rate": 2.682847680587819e-06, + "loss": 1.1601, + "step": 12256 + }, + { + "epoch": 0.7685121324220955, + "grad_norm": 3.186089038848877, + "learning_rate": 2.681463641164228e-06, + "loss": 1.078, + "step": 12257 + }, + { + "epoch": 0.7685748322778858, + "grad_norm": 3.436988592147827, + "learning_rate": 2.6800799035574e-06, + "loss": 1.0762, + "step": 12258 + }, + { + "epoch": 0.7686375321336761, + "grad_norm": 3.442629337310791, + "learning_rate": 2.6786964678243954e-06, + "loss": 1.161, + "step": 12259 + }, + { + "epoch": 0.7687002319894665, + "grad_norm": 3.2751541137695312, + "learning_rate": 2.6773133340222677e-06, + "loss": 1.0793, + "step": 12260 + }, + { + "epoch": 0.7687629318452568, + "grad_norm": 3.265838861465454, + "learning_rate": 2.6759305022080563e-06, + "loss": 1.1571, + "step": 12261 + }, + { + "epoch": 0.7688256317010471, + "grad_norm": 3.600883960723877, + "learning_rate": 2.6745479724387922e-06, + "loss": 1.108, + "step": 12262 + }, + { + "epoch": 0.7688883315568374, + "grad_norm": 3.16493821144104, + "learning_rate": 2.673165744771491e-06, + "loss": 1.0091, + "step": 12263 + }, + { + "epoch": 0.7689510314126278, + "grad_norm": 3.3118207454681396, + "learning_rate": 2.6717838192631505e-06, + "loss": 1.1084, + "step": 12264 + }, + { + "epoch": 0.7690137312684181, + "grad_norm": 3.031716823577881, + "learning_rate": 2.6704021959707695e-06, + "loss": 0.9864, + "step": 12265 + }, + { + "epoch": 0.7690764311242084, + "grad_norm": 2.919680595397949, + "learning_rate": 2.6690208749513204e-06, + "loss": 1.1523, + "step": 12266 + }, + { + "epoch": 0.7691391309799988, + "grad_norm": 3.2618722915649414, + "learning_rate": 2.667639856261771e-06, + "loss": 1.0989, + "step": 12267 + }, + { + "epoch": 0.7692018308357891, + "grad_norm": 3.410761833190918, + "learning_rate": 2.666259139959072e-06, + "loss": 1.0435, + "step": 12268 + }, + { + "epoch": 0.7692645306915794, + "grad_norm": 3.2986388206481934, + "learning_rate": 2.6648787261001686e-06, + "loss": 1.1396, + "step": 12269 + }, + { + "epoch": 0.7693272305473697, + "grad_norm": 3.3496744632720947, + "learning_rate": 2.663498614741987e-06, + "loss": 0.9596, + "step": 12270 + }, + { + "epoch": 0.7693899304031601, + "grad_norm": 3.5547943115234375, + "learning_rate": 2.662118805941444e-06, + "loss": 0.9823, + "step": 12271 + }, + { + "epoch": 0.7694526302589504, + "grad_norm": 3.2487528324127197, + "learning_rate": 2.660739299755437e-06, + "loss": 1.1892, + "step": 12272 + }, + { + "epoch": 0.7695153301147407, + "grad_norm": 3.3159451484680176, + "learning_rate": 2.659360096240866e-06, + "loss": 1.1702, + "step": 12273 + }, + { + "epoch": 0.769578029970531, + "grad_norm": 3.3395626544952393, + "learning_rate": 2.6579811954546054e-06, + "loss": 1.0677, + "step": 12274 + }, + { + "epoch": 0.7696407298263214, + "grad_norm": 3.281986713409424, + "learning_rate": 2.6566025974535172e-06, + "loss": 1.1189, + "step": 12275 + }, + { + "epoch": 0.7697034296821117, + "grad_norm": 3.2491817474365234, + "learning_rate": 2.655224302294461e-06, + "loss": 1.0477, + "step": 12276 + }, + { + "epoch": 0.769766129537902, + "grad_norm": 3.5773069858551025, + "learning_rate": 2.6538463100342773e-06, + "loss": 0.9998, + "step": 12277 + }, + { + "epoch": 0.7698288293936923, + "grad_norm": 3.218341112136841, + "learning_rate": 2.652468620729791e-06, + "loss": 0.9585, + "step": 12278 + }, + { + "epoch": 0.7698915292494827, + "grad_norm": 3.572158098220825, + "learning_rate": 2.651091234437817e-06, + "loss": 1.0636, + "step": 12279 + }, + { + "epoch": 0.769954229105273, + "grad_norm": 3.5910494327545166, + "learning_rate": 2.6497141512151646e-06, + "loss": 0.9827, + "step": 12280 + }, + { + "epoch": 0.7700169289610634, + "grad_norm": 3.5263874530792236, + "learning_rate": 2.6483373711186213e-06, + "loss": 0.963, + "step": 12281 + }, + { + "epoch": 0.7700796288168538, + "grad_norm": 3.4305851459503174, + "learning_rate": 2.646960894204963e-06, + "loss": 0.9429, + "step": 12282 + }, + { + "epoch": 0.7701423286726441, + "grad_norm": 3.400188684463501, + "learning_rate": 2.6455847205309615e-06, + "loss": 1.0897, + "step": 12283 + }, + { + "epoch": 0.7702050285284344, + "grad_norm": 3.376370668411255, + "learning_rate": 2.644208850153368e-06, + "loss": 1.0149, + "step": 12284 + }, + { + "epoch": 0.7702677283842247, + "grad_norm": 3.3898749351501465, + "learning_rate": 2.6428332831289214e-06, + "loss": 1.0086, + "step": 12285 + }, + { + "epoch": 0.7703304282400151, + "grad_norm": 3.377811908721924, + "learning_rate": 2.6414580195143492e-06, + "loss": 1.2604, + "step": 12286 + }, + { + "epoch": 0.7703931280958054, + "grad_norm": 3.287362813949585, + "learning_rate": 2.640083059366373e-06, + "loss": 1.0614, + "step": 12287 + }, + { + "epoch": 0.7704558279515957, + "grad_norm": 3.61318302154541, + "learning_rate": 2.638708402741692e-06, + "loss": 1.1013, + "step": 12288 + }, + { + "epoch": 0.7705185278073861, + "grad_norm": 3.396369695663452, + "learning_rate": 2.637334049696999e-06, + "loss": 1.0706, + "step": 12289 + }, + { + "epoch": 0.7705812276631764, + "grad_norm": 3.4231042861938477, + "learning_rate": 2.6359600002889674e-06, + "loss": 1.0428, + "step": 12290 + }, + { + "epoch": 0.7706439275189667, + "grad_norm": 3.6390154361724854, + "learning_rate": 2.6345862545742706e-06, + "loss": 1.0896, + "step": 12291 + }, + { + "epoch": 0.770706627374757, + "grad_norm": 3.7796695232391357, + "learning_rate": 2.633212812609558e-06, + "loss": 0.9589, + "step": 12292 + }, + { + "epoch": 0.7707693272305474, + "grad_norm": 3.3501100540161133, + "learning_rate": 2.6318396744514674e-06, + "loss": 1.1882, + "step": 12293 + }, + { + "epoch": 0.7708320270863377, + "grad_norm": 3.346022129058838, + "learning_rate": 2.6304668401566334e-06, + "loss": 1.1013, + "step": 12294 + }, + { + "epoch": 0.770894726942128, + "grad_norm": 3.3046915531158447, + "learning_rate": 2.629094309781669e-06, + "loss": 1.0945, + "step": 12295 + }, + { + "epoch": 0.7709574267979183, + "grad_norm": 3.174372911453247, + "learning_rate": 2.6277220833831774e-06, + "loss": 1.1974, + "step": 12296 + }, + { + "epoch": 0.7710201266537087, + "grad_norm": 3.1010851860046387, + "learning_rate": 2.6263501610177457e-06, + "loss": 1.1763, + "step": 12297 + }, + { + "epoch": 0.771082826509499, + "grad_norm": 3.3931527137756348, + "learning_rate": 2.6249785427419582e-06, + "loss": 1.1913, + "step": 12298 + }, + { + "epoch": 0.7711455263652893, + "grad_norm": 3.080960750579834, + "learning_rate": 2.6236072286123783e-06, + "loss": 1.1134, + "step": 12299 + }, + { + "epoch": 0.7712082262210797, + "grad_norm": 3.6404240131378174, + "learning_rate": 2.6222362186855588e-06, + "loss": 1.0508, + "step": 12300 + }, + { + "epoch": 0.77127092607687, + "grad_norm": 3.1791164875030518, + "learning_rate": 2.620865513018036e-06, + "loss": 1.0979, + "step": 12301 + }, + { + "epoch": 0.7713336259326603, + "grad_norm": 3.403859853744507, + "learning_rate": 2.6194951116663445e-06, + "loss": 1.0846, + "step": 12302 + }, + { + "epoch": 0.7713963257884506, + "grad_norm": 3.6001720428466797, + "learning_rate": 2.6181250146869974e-06, + "loss": 1.0719, + "step": 12303 + }, + { + "epoch": 0.7714590256442411, + "grad_norm": 3.2361373901367188, + "learning_rate": 2.6167552221364935e-06, + "loss": 1.088, + "step": 12304 + }, + { + "epoch": 0.7715217255000314, + "grad_norm": 3.7889299392700195, + "learning_rate": 2.615385734071331e-06, + "loss": 0.9191, + "step": 12305 + }, + { + "epoch": 0.7715844253558217, + "grad_norm": 3.281670093536377, + "learning_rate": 2.614016550547982e-06, + "loss": 0.9137, + "step": 12306 + }, + { + "epoch": 0.771647125211612, + "grad_norm": 3.522022247314453, + "learning_rate": 2.612647671622913e-06, + "loss": 1.0764, + "step": 12307 + }, + { + "epoch": 0.7717098250674024, + "grad_norm": 3.682218551635742, + "learning_rate": 2.611279097352574e-06, + "loss": 1.1671, + "step": 12308 + }, + { + "epoch": 0.7717725249231927, + "grad_norm": 3.280787944793701, + "learning_rate": 2.6099108277934105e-06, + "loss": 1.0224, + "step": 12309 + }, + { + "epoch": 0.771835224778983, + "grad_norm": 3.0641047954559326, + "learning_rate": 2.6085428630018472e-06, + "loss": 1.0861, + "step": 12310 + }, + { + "epoch": 0.7718979246347734, + "grad_norm": 3.1359126567840576, + "learning_rate": 2.607175203034299e-06, + "loss": 1.1825, + "step": 12311 + }, + { + "epoch": 0.7719606244905637, + "grad_norm": 3.1196141242980957, + "learning_rate": 2.605807847947166e-06, + "loss": 1.1134, + "step": 12312 + }, + { + "epoch": 0.772023324346354, + "grad_norm": 3.5255253314971924, + "learning_rate": 2.604440797796842e-06, + "loss": 1.0047, + "step": 12313 + }, + { + "epoch": 0.7720860242021443, + "grad_norm": 3.423088788986206, + "learning_rate": 2.6030740526397034e-06, + "loss": 1.0244, + "step": 12314 + }, + { + "epoch": 0.7721487240579347, + "grad_norm": 3.1604154109954834, + "learning_rate": 2.6017076125321104e-06, + "loss": 1.0072, + "step": 12315 + }, + { + "epoch": 0.772211423913725, + "grad_norm": 3.3490476608276367, + "learning_rate": 2.6003414775304225e-06, + "loss": 1.097, + "step": 12316 + }, + { + "epoch": 0.7722741237695153, + "grad_norm": 3.2551352977752686, + "learning_rate": 2.5989756476909746e-06, + "loss": 1.1638, + "step": 12317 + }, + { + "epoch": 0.7723368236253056, + "grad_norm": 3.4762027263641357, + "learning_rate": 2.5976101230700936e-06, + "loss": 1.0863, + "step": 12318 + }, + { + "epoch": 0.772399523481096, + "grad_norm": 3.3662989139556885, + "learning_rate": 2.5962449037240922e-06, + "loss": 1.0962, + "step": 12319 + }, + { + "epoch": 0.7724622233368863, + "grad_norm": 3.5273964405059814, + "learning_rate": 2.594879989709277e-06, + "loss": 1.2635, + "step": 12320 + }, + { + "epoch": 0.7725249231926766, + "grad_norm": 3.0450379848480225, + "learning_rate": 2.593515381081935e-06, + "loss": 1.1455, + "step": 12321 + }, + { + "epoch": 0.772587623048467, + "grad_norm": 3.250736713409424, + "learning_rate": 2.5921510778983405e-06, + "loss": 1.0297, + "step": 12322 + }, + { + "epoch": 0.7726503229042573, + "grad_norm": 3.14963960647583, + "learning_rate": 2.590787080214757e-06, + "loss": 1.1705, + "step": 12323 + }, + { + "epoch": 0.7727130227600476, + "grad_norm": 3.254626512527466, + "learning_rate": 2.5894233880874386e-06, + "loss": 1.0654, + "step": 12324 + }, + { + "epoch": 0.7727757226158379, + "grad_norm": 3.219783067703247, + "learning_rate": 2.5880600015726255e-06, + "loss": 1.0568, + "step": 12325 + }, + { + "epoch": 0.7728384224716283, + "grad_norm": 3.0813519954681396, + "learning_rate": 2.5866969207265412e-06, + "loss": 1.1991, + "step": 12326 + }, + { + "epoch": 0.7729011223274187, + "grad_norm": 3.2441537380218506, + "learning_rate": 2.5853341456053983e-06, + "loss": 1.0547, + "step": 12327 + }, + { + "epoch": 0.772963822183209, + "grad_norm": 3.246407985687256, + "learning_rate": 2.5839716762654e-06, + "loss": 1.0006, + "step": 12328 + }, + { + "epoch": 0.7730265220389994, + "grad_norm": 3.3309900760650635, + "learning_rate": 2.582609512762735e-06, + "loss": 1.1255, + "step": 12329 + }, + { + "epoch": 0.7730892218947897, + "grad_norm": 3.309244155883789, + "learning_rate": 2.581247655153577e-06, + "loss": 1.064, + "step": 12330 + }, + { + "epoch": 0.77315192175058, + "grad_norm": 3.06921124458313, + "learning_rate": 2.5798861034940868e-06, + "loss": 1.1623, + "step": 12331 + }, + { + "epoch": 0.7732146216063703, + "grad_norm": 3.574880361557007, + "learning_rate": 2.578524857840421e-06, + "loss": 1.0835, + "step": 12332 + }, + { + "epoch": 0.7732773214621607, + "grad_norm": 2.9561209678649902, + "learning_rate": 2.577163918248714e-06, + "loss": 1.0989, + "step": 12333 + }, + { + "epoch": 0.773340021317951, + "grad_norm": 3.6327474117279053, + "learning_rate": 2.575803284775088e-06, + "loss": 1.2103, + "step": 12334 + }, + { + "epoch": 0.7734027211737413, + "grad_norm": 3.1398630142211914, + "learning_rate": 2.5744429574756612e-06, + "loss": 1.0288, + "step": 12335 + }, + { + "epoch": 0.7734654210295316, + "grad_norm": 3.1679558753967285, + "learning_rate": 2.5730829364065325e-06, + "loss": 1.1263, + "step": 12336 + }, + { + "epoch": 0.773528120885322, + "grad_norm": 3.2719826698303223, + "learning_rate": 2.5717232216237865e-06, + "loss": 1.1896, + "step": 12337 + }, + { + "epoch": 0.7735908207411123, + "grad_norm": 3.537546396255493, + "learning_rate": 2.5703638131834973e-06, + "loss": 0.9567, + "step": 12338 + }, + { + "epoch": 0.7736535205969026, + "grad_norm": 3.0408055782318115, + "learning_rate": 2.5690047111417316e-06, + "loss": 1.0792, + "step": 12339 + }, + { + "epoch": 0.773716220452693, + "grad_norm": 3.6095616817474365, + "learning_rate": 2.567645915554535e-06, + "loss": 0.9369, + "step": 12340 + }, + { + "epoch": 0.7737789203084833, + "grad_norm": 3.0996971130371094, + "learning_rate": 2.566287426477947e-06, + "loss": 1.2223, + "step": 12341 + }, + { + "epoch": 0.7738416201642736, + "grad_norm": 3.0650687217712402, + "learning_rate": 2.5649292439679864e-06, + "loss": 0.9977, + "step": 12342 + }, + { + "epoch": 0.7739043200200639, + "grad_norm": 3.6039562225341797, + "learning_rate": 2.5635713680806716e-06, + "loss": 0.9309, + "step": 12343 + }, + { + "epoch": 0.7739670198758543, + "grad_norm": 3.6143951416015625, + "learning_rate": 2.5622137988719985e-06, + "loss": 0.8939, + "step": 12344 + }, + { + "epoch": 0.7740297197316446, + "grad_norm": 3.2152645587921143, + "learning_rate": 2.56085653639795e-06, + "loss": 1.0322, + "step": 12345 + }, + { + "epoch": 0.7740924195874349, + "grad_norm": 3.3236286640167236, + "learning_rate": 2.559499580714506e-06, + "loss": 1.1058, + "step": 12346 + }, + { + "epoch": 0.7741551194432252, + "grad_norm": 3.760202407836914, + "learning_rate": 2.558142931877624e-06, + "loss": 1.0833, + "step": 12347 + }, + { + "epoch": 0.7742178192990156, + "grad_norm": 3.5389792919158936, + "learning_rate": 2.556786589943252e-06, + "loss": 1.2916, + "step": 12348 + }, + { + "epoch": 0.7742805191548059, + "grad_norm": 3.0504677295684814, + "learning_rate": 2.555430554967324e-06, + "loss": 1.1618, + "step": 12349 + }, + { + "epoch": 0.7743432190105963, + "grad_norm": 3.1528027057647705, + "learning_rate": 2.554074827005767e-06, + "loss": 1.0899, + "step": 12350 + }, + { + "epoch": 0.7744059188663867, + "grad_norm": 3.407721519470215, + "learning_rate": 2.55271940611449e-06, + "loss": 1.0789, + "step": 12351 + }, + { + "epoch": 0.774468618722177, + "grad_norm": 3.3344345092773438, + "learning_rate": 2.5513642923493874e-06, + "loss": 0.9778, + "step": 12352 + }, + { + "epoch": 0.7745313185779673, + "grad_norm": 3.7319211959838867, + "learning_rate": 2.550009485766348e-06, + "loss": 1.0708, + "step": 12353 + }, + { + "epoch": 0.7745940184337576, + "grad_norm": 3.968026638031006, + "learning_rate": 2.548654986421244e-06, + "loss": 1.2322, + "step": 12354 + }, + { + "epoch": 0.774656718289548, + "grad_norm": 3.0036211013793945, + "learning_rate": 2.5473007943699335e-06, + "loss": 1.0939, + "step": 12355 + }, + { + "epoch": 0.7747194181453383, + "grad_norm": 3.502791404724121, + "learning_rate": 2.545946909668261e-06, + "loss": 1.1316, + "step": 12356 + }, + { + "epoch": 0.7747821180011286, + "grad_norm": 3.103280782699585, + "learning_rate": 2.544593332372066e-06, + "loss": 1.0966, + "step": 12357 + }, + { + "epoch": 0.7748448178569189, + "grad_norm": 3.4210469722747803, + "learning_rate": 2.543240062537168e-06, + "loss": 1.1707, + "step": 12358 + }, + { + "epoch": 0.7749075177127093, + "grad_norm": 3.257709503173828, + "learning_rate": 2.5418871002193756e-06, + "loss": 1.1167, + "step": 12359 + }, + { + "epoch": 0.7749702175684996, + "grad_norm": 2.9963088035583496, + "learning_rate": 2.540534445474482e-06, + "loss": 1.2117, + "step": 12360 + }, + { + "epoch": 0.7750329174242899, + "grad_norm": 3.4077117443084717, + "learning_rate": 2.539182098358276e-06, + "loss": 0.8272, + "step": 12361 + }, + { + "epoch": 0.7750956172800803, + "grad_norm": 3.462686777114868, + "learning_rate": 2.5378300589265258e-06, + "loss": 1.1335, + "step": 12362 + }, + { + "epoch": 0.7751583171358706, + "grad_norm": 3.2386276721954346, + "learning_rate": 2.536478327234987e-06, + "loss": 1.0643, + "step": 12363 + }, + { + "epoch": 0.7752210169916609, + "grad_norm": 3.2838494777679443, + "learning_rate": 2.535126903339411e-06, + "loss": 0.9652, + "step": 12364 + }, + { + "epoch": 0.7752837168474512, + "grad_norm": 3.4684877395629883, + "learning_rate": 2.5337757872955282e-06, + "loss": 1.0837, + "step": 12365 + }, + { + "epoch": 0.7753464167032416, + "grad_norm": 3.607701539993286, + "learning_rate": 2.5324249791590573e-06, + "loss": 0.9799, + "step": 12366 + }, + { + "epoch": 0.7754091165590319, + "grad_norm": 3.0521013736724854, + "learning_rate": 2.5310744789857034e-06, + "loss": 1.0459, + "step": 12367 + }, + { + "epoch": 0.7754718164148222, + "grad_norm": 3.8695261478424072, + "learning_rate": 2.529724286831168e-06, + "loss": 1.1831, + "step": 12368 + }, + { + "epoch": 0.7755345162706125, + "grad_norm": 3.3273534774780273, + "learning_rate": 2.528374402751128e-06, + "loss": 1.166, + "step": 12369 + }, + { + "epoch": 0.7755972161264029, + "grad_norm": 3.641791820526123, + "learning_rate": 2.5270248268012544e-06, + "loss": 1.0549, + "step": 12370 + }, + { + "epoch": 0.7756599159821932, + "grad_norm": 3.474200487136841, + "learning_rate": 2.5256755590372006e-06, + "loss": 1.1047, + "step": 12371 + }, + { + "epoch": 0.7757226158379835, + "grad_norm": 3.6859686374664307, + "learning_rate": 2.524326599514616e-06, + "loss": 0.9494, + "step": 12372 + }, + { + "epoch": 0.775785315693774, + "grad_norm": 3.2876856327056885, + "learning_rate": 2.522977948289128e-06, + "loss": 1.107, + "step": 12373 + }, + { + "epoch": 0.7758480155495643, + "grad_norm": 3.323561191558838, + "learning_rate": 2.5216296054163547e-06, + "loss": 0.9563, + "step": 12374 + }, + { + "epoch": 0.7759107154053546, + "grad_norm": 3.565880298614502, + "learning_rate": 2.5202815709519046e-06, + "loss": 1.0532, + "step": 12375 + }, + { + "epoch": 0.7759734152611449, + "grad_norm": 3.485994577407837, + "learning_rate": 2.51893384495137e-06, + "loss": 1.163, + "step": 12376 + }, + { + "epoch": 0.7760361151169353, + "grad_norm": 3.387730598449707, + "learning_rate": 2.5175864274703297e-06, + "loss": 1.1647, + "step": 12377 + }, + { + "epoch": 0.7760988149727256, + "grad_norm": 3.359621286392212, + "learning_rate": 2.5162393185643487e-06, + "loss": 1.0414, + "step": 12378 + }, + { + "epoch": 0.7761615148285159, + "grad_norm": 3.6807878017425537, + "learning_rate": 2.514892518288988e-06, + "loss": 1.0474, + "step": 12379 + }, + { + "epoch": 0.7762242146843062, + "grad_norm": 3.1240384578704834, + "learning_rate": 2.513546026699788e-06, + "loss": 1.1614, + "step": 12380 + }, + { + "epoch": 0.7762869145400966, + "grad_norm": 3.4667890071868896, + "learning_rate": 2.5121998438522755e-06, + "loss": 1.0922, + "step": 12381 + }, + { + "epoch": 0.7763496143958869, + "grad_norm": 3.286383867263794, + "learning_rate": 2.5108539698019663e-06, + "loss": 1.1073, + "step": 12382 + }, + { + "epoch": 0.7764123142516772, + "grad_norm": 3.6425933837890625, + "learning_rate": 2.5095084046043693e-06, + "loss": 1.0343, + "step": 12383 + }, + { + "epoch": 0.7764750141074676, + "grad_norm": 3.310941696166992, + "learning_rate": 2.5081631483149727e-06, + "loss": 0.9791, + "step": 12384 + }, + { + "epoch": 0.7765377139632579, + "grad_norm": 3.2857062816619873, + "learning_rate": 2.5068182009892517e-06, + "loss": 1.1328, + "step": 12385 + }, + { + "epoch": 0.7766004138190482, + "grad_norm": 3.1666932106018066, + "learning_rate": 2.505473562682679e-06, + "loss": 1.0982, + "step": 12386 + }, + { + "epoch": 0.7766631136748385, + "grad_norm": 3.447129964828491, + "learning_rate": 2.5041292334507028e-06, + "loss": 1.1183, + "step": 12387 + }, + { + "epoch": 0.7767258135306289, + "grad_norm": 3.2735915184020996, + "learning_rate": 2.502785213348765e-06, + "loss": 1.1232, + "step": 12388 + }, + { + "epoch": 0.7767885133864192, + "grad_norm": 3.500951051712036, + "learning_rate": 2.501441502432289e-06, + "loss": 1.0817, + "step": 12389 + }, + { + "epoch": 0.7768512132422095, + "grad_norm": 3.1953887939453125, + "learning_rate": 2.500098100756696e-06, + "loss": 1.0643, + "step": 12390 + }, + { + "epoch": 0.7769139130979998, + "grad_norm": 3.520517587661743, + "learning_rate": 2.4987550083773857e-06, + "loss": 1.1696, + "step": 12391 + }, + { + "epoch": 0.7769766129537902, + "grad_norm": 3.4283974170684814, + "learning_rate": 2.4974122253497456e-06, + "loss": 1.1366, + "step": 12392 + }, + { + "epoch": 0.7770393128095805, + "grad_norm": 3.132706642150879, + "learning_rate": 2.49606975172915e-06, + "loss": 1.1209, + "step": 12393 + }, + { + "epoch": 0.7771020126653708, + "grad_norm": 3.282447338104248, + "learning_rate": 2.4947275875709686e-06, + "loss": 1.1804, + "step": 12394 + }, + { + "epoch": 0.7771647125211611, + "grad_norm": 3.3639655113220215, + "learning_rate": 2.49338573293055e-06, + "loss": 0.9475, + "step": 12395 + }, + { + "epoch": 0.7772274123769516, + "grad_norm": 3.1032025814056396, + "learning_rate": 2.4920441878632273e-06, + "loss": 1.1539, + "step": 12396 + }, + { + "epoch": 0.7772901122327419, + "grad_norm": 3.22430157661438, + "learning_rate": 2.490702952424334e-06, + "loss": 1.128, + "step": 12397 + }, + { + "epoch": 0.7773528120885322, + "grad_norm": 3.3606724739074707, + "learning_rate": 2.489362026669179e-06, + "loss": 1.1944, + "step": 12398 + }, + { + "epoch": 0.7774155119443226, + "grad_norm": 2.8537416458129883, + "learning_rate": 2.488021410653061e-06, + "loss": 1.1738, + "step": 12399 + }, + { + "epoch": 0.7774782118001129, + "grad_norm": 3.1017439365386963, + "learning_rate": 2.4866811044312667e-06, + "loss": 1.0855, + "step": 12400 + }, + { + "epoch": 0.7775409116559032, + "grad_norm": 3.6568410396575928, + "learning_rate": 2.4853411080590727e-06, + "loss": 0.9616, + "step": 12401 + }, + { + "epoch": 0.7776036115116935, + "grad_norm": 3.73966908454895, + "learning_rate": 2.4840014215917417e-06, + "loss": 1.08, + "step": 12402 + }, + { + "epoch": 0.7776663113674839, + "grad_norm": 3.216054916381836, + "learning_rate": 2.4826620450845163e-06, + "loss": 1.1608, + "step": 12403 + }, + { + "epoch": 0.7777290112232742, + "grad_norm": 3.1767899990081787, + "learning_rate": 2.4813229785926397e-06, + "loss": 1.0291, + "step": 12404 + }, + { + "epoch": 0.7777917110790645, + "grad_norm": 3.808311700820923, + "learning_rate": 2.479984222171331e-06, + "loss": 1.1814, + "step": 12405 + }, + { + "epoch": 0.7778544109348549, + "grad_norm": 3.113351821899414, + "learning_rate": 2.478645775875802e-06, + "loss": 1.0636, + "step": 12406 + }, + { + "epoch": 0.7779171107906452, + "grad_norm": 3.328547716140747, + "learning_rate": 2.477307639761246e-06, + "loss": 1.0823, + "step": 12407 + }, + { + "epoch": 0.7779798106464355, + "grad_norm": 3.172133207321167, + "learning_rate": 2.475969813882856e-06, + "loss": 1.1401, + "step": 12408 + }, + { + "epoch": 0.7780425105022258, + "grad_norm": 3.5275380611419678, + "learning_rate": 2.4746322982957984e-06, + "loss": 1.1472, + "step": 12409 + }, + { + "epoch": 0.7781052103580162, + "grad_norm": 3.3238487243652344, + "learning_rate": 2.4732950930552333e-06, + "loss": 1.0425, + "step": 12410 + }, + { + "epoch": 0.7781679102138065, + "grad_norm": 3.3821756839752197, + "learning_rate": 2.4719581982163043e-06, + "loss": 1.1287, + "step": 12411 + }, + { + "epoch": 0.7782306100695968, + "grad_norm": 3.3145482540130615, + "learning_rate": 2.470621613834151e-06, + "loss": 1.242, + "step": 12412 + }, + { + "epoch": 0.7782933099253871, + "grad_norm": 3.3483142852783203, + "learning_rate": 2.469285339963892e-06, + "loss": 1.0868, + "step": 12413 + }, + { + "epoch": 0.7783560097811775, + "grad_norm": 3.5662801265716553, + "learning_rate": 2.46794937666063e-06, + "loss": 1.145, + "step": 12414 + }, + { + "epoch": 0.7784187096369678, + "grad_norm": 3.333991050720215, + "learning_rate": 2.4666137239794684e-06, + "loss": 1.0219, + "step": 12415 + }, + { + "epoch": 0.7784814094927581, + "grad_norm": 3.297445297241211, + "learning_rate": 2.4652783819754857e-06, + "loss": 1.1469, + "step": 12416 + }, + { + "epoch": 0.7785441093485485, + "grad_norm": 3.6377899646759033, + "learning_rate": 2.4639433507037524e-06, + "loss": 1.1845, + "step": 12417 + }, + { + "epoch": 0.7786068092043388, + "grad_norm": 3.1342697143554688, + "learning_rate": 2.4626086302193207e-06, + "loss": 1.2468, + "step": 12418 + }, + { + "epoch": 0.7786695090601292, + "grad_norm": 3.5380911827087402, + "learning_rate": 2.4612742205772412e-06, + "loss": 1.1558, + "step": 12419 + }, + { + "epoch": 0.7787322089159195, + "grad_norm": 3.384580373764038, + "learning_rate": 2.459940121832541e-06, + "loss": 1.066, + "step": 12420 + }, + { + "epoch": 0.7787949087717099, + "grad_norm": 3.437593936920166, + "learning_rate": 2.4586063340402412e-06, + "loss": 0.9823, + "step": 12421 + }, + { + "epoch": 0.7788576086275002, + "grad_norm": 3.496135950088501, + "learning_rate": 2.4572728572553418e-06, + "loss": 1.0392, + "step": 12422 + }, + { + "epoch": 0.7789203084832905, + "grad_norm": 3.401927947998047, + "learning_rate": 2.4559396915328427e-06, + "loss": 1.0906, + "step": 12423 + }, + { + "epoch": 0.7789830083390809, + "grad_norm": 3.4362385272979736, + "learning_rate": 2.45460683692772e-06, + "loss": 0.9998, + "step": 12424 + }, + { + "epoch": 0.7790457081948712, + "grad_norm": 3.388538360595703, + "learning_rate": 2.453274293494937e-06, + "loss": 0.9281, + "step": 12425 + }, + { + "epoch": 0.7791084080506615, + "grad_norm": 3.1970465183258057, + "learning_rate": 2.451942061289455e-06, + "loss": 1.1419, + "step": 12426 + }, + { + "epoch": 0.7791711079064518, + "grad_norm": 3.6848232746124268, + "learning_rate": 2.450610140366212e-06, + "loss": 1.1134, + "step": 12427 + }, + { + "epoch": 0.7792338077622422, + "grad_norm": 3.194664716720581, + "learning_rate": 2.449278530780137e-06, + "loss": 1.168, + "step": 12428 + }, + { + "epoch": 0.7792965076180325, + "grad_norm": 4.0019659996032715, + "learning_rate": 2.447947232586142e-06, + "loss": 1.0623, + "step": 12429 + }, + { + "epoch": 0.7793592074738228, + "grad_norm": 3.3697288036346436, + "learning_rate": 2.4466162458391364e-06, + "loss": 1.1165, + "step": 12430 + }, + { + "epoch": 0.7794219073296131, + "grad_norm": 3.6343350410461426, + "learning_rate": 2.445285570594006e-06, + "loss": 1.0868, + "step": 12431 + }, + { + "epoch": 0.7794846071854035, + "grad_norm": 3.498178005218506, + "learning_rate": 2.4439552069056283e-06, + "loss": 1.1021, + "step": 12432 + }, + { + "epoch": 0.7795473070411938, + "grad_norm": 3.405179023742676, + "learning_rate": 2.442625154828865e-06, + "loss": 1.055, + "step": 12433 + }, + { + "epoch": 0.7796100068969841, + "grad_norm": 3.3449065685272217, + "learning_rate": 2.441295414418574e-06, + "loss": 1.1039, + "step": 12434 + }, + { + "epoch": 0.7796727067527744, + "grad_norm": 3.437969207763672, + "learning_rate": 2.43996598572959e-06, + "loss": 0.9885, + "step": 12435 + }, + { + "epoch": 0.7797354066085648, + "grad_norm": 3.3565075397491455, + "learning_rate": 2.438636868816735e-06, + "loss": 1.0104, + "step": 12436 + }, + { + "epoch": 0.7797981064643551, + "grad_norm": 3.0801587104797363, + "learning_rate": 2.437308063734829e-06, + "loss": 1.1144, + "step": 12437 + }, + { + "epoch": 0.7798608063201454, + "grad_norm": 3.3925201892852783, + "learning_rate": 2.435979570538669e-06, + "loss": 1.0067, + "step": 12438 + }, + { + "epoch": 0.7799235061759358, + "grad_norm": 3.4773952960968018, + "learning_rate": 2.4346513892830427e-06, + "loss": 1.1731, + "step": 12439 + }, + { + "epoch": 0.7799862060317261, + "grad_norm": 3.659597396850586, + "learning_rate": 2.4333235200227202e-06, + "loss": 0.9384, + "step": 12440 + }, + { + "epoch": 0.7800489058875164, + "grad_norm": 3.1923065185546875, + "learning_rate": 2.431995962812468e-06, + "loss": 1.0883, + "step": 12441 + }, + { + "epoch": 0.7801116057433067, + "grad_norm": 3.784425735473633, + "learning_rate": 2.430668717707033e-06, + "loss": 0.8948, + "step": 12442 + }, + { + "epoch": 0.7801743055990972, + "grad_norm": 3.0717008113861084, + "learning_rate": 2.429341784761151e-06, + "loss": 1.048, + "step": 12443 + }, + { + "epoch": 0.7802370054548875, + "grad_norm": 3.2001547813415527, + "learning_rate": 2.4280151640295422e-06, + "loss": 0.9688, + "step": 12444 + }, + { + "epoch": 0.7802997053106778, + "grad_norm": 3.819972515106201, + "learning_rate": 2.426688855566921e-06, + "loss": 0.9501, + "step": 12445 + }, + { + "epoch": 0.7803624051664682, + "grad_norm": 3.3335022926330566, + "learning_rate": 2.425362859427982e-06, + "loss": 1.0344, + "step": 12446 + }, + { + "epoch": 0.7804251050222585, + "grad_norm": 3.526470899581909, + "learning_rate": 2.4240371756674063e-06, + "loss": 1.0108, + "step": 12447 + }, + { + "epoch": 0.7804878048780488, + "grad_norm": 3.9247493743896484, + "learning_rate": 2.422711804339872e-06, + "loss": 0.9222, + "step": 12448 + }, + { + "epoch": 0.7805505047338391, + "grad_norm": 3.2255678176879883, + "learning_rate": 2.421386745500034e-06, + "loss": 1.0818, + "step": 12449 + }, + { + "epoch": 0.7806132045896295, + "grad_norm": 3.814173936843872, + "learning_rate": 2.4200619992025364e-06, + "loss": 1.102, + "step": 12450 + }, + { + "epoch": 0.7806759044454198, + "grad_norm": 3.355729103088379, + "learning_rate": 2.4187375655020106e-06, + "loss": 1.2254, + "step": 12451 + }, + { + "epoch": 0.7807386043012101, + "grad_norm": 3.423942804336548, + "learning_rate": 2.4174134444530817e-06, + "loss": 1.1352, + "step": 12452 + }, + { + "epoch": 0.7808013041570004, + "grad_norm": 3.7809226512908936, + "learning_rate": 2.4160896361103537e-06, + "loss": 1.0219, + "step": 12453 + }, + { + "epoch": 0.7808640040127908, + "grad_norm": 3.031463623046875, + "learning_rate": 2.4147661405284195e-06, + "loss": 1.1978, + "step": 12454 + }, + { + "epoch": 0.7809267038685811, + "grad_norm": 3.5260298252105713, + "learning_rate": 2.413442957761859e-06, + "loss": 1.0688, + "step": 12455 + }, + { + "epoch": 0.7809894037243714, + "grad_norm": 3.4442129135131836, + "learning_rate": 2.412120087865244e-06, + "loss": 1.1574, + "step": 12456 + }, + { + "epoch": 0.7810521035801617, + "grad_norm": 3.528883457183838, + "learning_rate": 2.4107975308931285e-06, + "loss": 1.0733, + "step": 12457 + }, + { + "epoch": 0.7811148034359521, + "grad_norm": 3.0020432472229004, + "learning_rate": 2.4094752869000516e-06, + "loss": 1.0613, + "step": 12458 + }, + { + "epoch": 0.7811775032917424, + "grad_norm": 3.3815808296203613, + "learning_rate": 2.408153355940549e-06, + "loss": 1.065, + "step": 12459 + }, + { + "epoch": 0.7812402031475327, + "grad_norm": 3.7759594917297363, + "learning_rate": 2.406831738069132e-06, + "loss": 1.1774, + "step": 12460 + }, + { + "epoch": 0.7813029030033231, + "grad_norm": 3.4382851123809814, + "learning_rate": 2.4055104333403066e-06, + "loss": 1.0719, + "step": 12461 + }, + { + "epoch": 0.7813656028591134, + "grad_norm": 3.3777270317077637, + "learning_rate": 2.404189441808559e-06, + "loss": 1.0246, + "step": 12462 + }, + { + "epoch": 0.7814283027149037, + "grad_norm": 3.3935115337371826, + "learning_rate": 2.4028687635283743e-06, + "loss": 1.1255, + "step": 12463 + }, + { + "epoch": 0.781491002570694, + "grad_norm": 3.268733501434326, + "learning_rate": 2.401548398554213e-06, + "loss": 0.9706, + "step": 12464 + }, + { + "epoch": 0.7815537024264844, + "grad_norm": 2.995455026626587, + "learning_rate": 2.400228346940525e-06, + "loss": 1.1237, + "step": 12465 + }, + { + "epoch": 0.7816164022822748, + "grad_norm": 3.5834622383117676, + "learning_rate": 2.3989086087417547e-06, + "loss": 1.007, + "step": 12466 + }, + { + "epoch": 0.7816791021380651, + "grad_norm": 3.959622383117676, + "learning_rate": 2.3975891840123246e-06, + "loss": 1.1038, + "step": 12467 + }, + { + "epoch": 0.7817418019938555, + "grad_norm": 3.2964956760406494, + "learning_rate": 2.396270072806649e-06, + "loss": 1.134, + "step": 12468 + }, + { + "epoch": 0.7818045018496458, + "grad_norm": 3.542891025543213, + "learning_rate": 2.394951275179125e-06, + "loss": 0.9716, + "step": 12469 + }, + { + "epoch": 0.7818672017054361, + "grad_norm": 3.882935047149658, + "learning_rate": 2.393632791184145e-06, + "loss": 1.0328, + "step": 12470 + }, + { + "epoch": 0.7819299015612264, + "grad_norm": 3.1604409217834473, + "learning_rate": 2.3923146208760816e-06, + "loss": 1.0723, + "step": 12471 + }, + { + "epoch": 0.7819926014170168, + "grad_norm": 3.078596830368042, + "learning_rate": 2.3909967643092947e-06, + "loss": 1.0257, + "step": 12472 + }, + { + "epoch": 0.7820553012728071, + "grad_norm": 3.405953884124756, + "learning_rate": 2.3896792215381315e-06, + "loss": 1.1748, + "step": 12473 + }, + { + "epoch": 0.7821180011285974, + "grad_norm": 3.4401638507843018, + "learning_rate": 2.388361992616931e-06, + "loss": 1.1302, + "step": 12474 + }, + { + "epoch": 0.7821807009843877, + "grad_norm": 3.3509037494659424, + "learning_rate": 2.3870450776000154e-06, + "loss": 0.8773, + "step": 12475 + }, + { + "epoch": 0.7822434008401781, + "grad_norm": 3.0392067432403564, + "learning_rate": 2.38572847654169e-06, + "loss": 1.1549, + "step": 12476 + }, + { + "epoch": 0.7823061006959684, + "grad_norm": 3.1129751205444336, + "learning_rate": 2.384412189496258e-06, + "loss": 1.1932, + "step": 12477 + }, + { + "epoch": 0.7823688005517587, + "grad_norm": 3.282956838607788, + "learning_rate": 2.3830962165179995e-06, + "loss": 0.8582, + "step": 12478 + }, + { + "epoch": 0.782431500407549, + "grad_norm": 3.3133530616760254, + "learning_rate": 2.381780557661186e-06, + "loss": 0.9192, + "step": 12479 + }, + { + "epoch": 0.7824942002633394, + "grad_norm": 3.4198622703552246, + "learning_rate": 2.380465212980072e-06, + "loss": 1.097, + "step": 12480 + }, + { + "epoch": 0.7825569001191297, + "grad_norm": 3.7004237174987793, + "learning_rate": 2.379150182528909e-06, + "loss": 1.0876, + "step": 12481 + }, + { + "epoch": 0.78261959997492, + "grad_norm": 3.0874063968658447, + "learning_rate": 2.377835466361924e-06, + "loss": 1.3179, + "step": 12482 + }, + { + "epoch": 0.7826822998307104, + "grad_norm": 3.7243049144744873, + "learning_rate": 2.376521064533337e-06, + "loss": 1.0026, + "step": 12483 + }, + { + "epoch": 0.7827449996865007, + "grad_norm": 3.570485830307007, + "learning_rate": 2.375206977097353e-06, + "loss": 1.0422, + "step": 12484 + }, + { + "epoch": 0.782807699542291, + "grad_norm": 3.526928186416626, + "learning_rate": 2.3738932041081675e-06, + "loss": 1.1885, + "step": 12485 + }, + { + "epoch": 0.7828703993980813, + "grad_norm": 3.263690710067749, + "learning_rate": 2.3725797456199596e-06, + "loss": 1.141, + "step": 12486 + }, + { + "epoch": 0.7829330992538717, + "grad_norm": 3.1158363819122314, + "learning_rate": 2.3712666016868937e-06, + "loss": 1.1737, + "step": 12487 + }, + { + "epoch": 0.782995799109662, + "grad_norm": 3.2655298709869385, + "learning_rate": 2.3699537723631296e-06, + "loss": 1.2145, + "step": 12488 + }, + { + "epoch": 0.7830584989654524, + "grad_norm": 3.5925211906433105, + "learning_rate": 2.368641257702804e-06, + "loss": 1.1948, + "step": 12489 + }, + { + "epoch": 0.7831211988212428, + "grad_norm": 3.492729425430298, + "learning_rate": 2.3673290577600474e-06, + "loss": 1.0775, + "step": 12490 + }, + { + "epoch": 0.7831838986770331, + "grad_norm": 3.3862147331237793, + "learning_rate": 2.3660171725889703e-06, + "loss": 1.0742, + "step": 12491 + }, + { + "epoch": 0.7832465985328234, + "grad_norm": 3.367302417755127, + "learning_rate": 2.364705602243681e-06, + "loss": 1.0136, + "step": 12492 + }, + { + "epoch": 0.7833092983886137, + "grad_norm": 3.56354022026062, + "learning_rate": 2.363394346778266e-06, + "loss": 1.1221, + "step": 12493 + }, + { + "epoch": 0.7833719982444041, + "grad_norm": 3.1763291358947754, + "learning_rate": 2.3620834062468025e-06, + "loss": 1.0959, + "step": 12494 + }, + { + "epoch": 0.7834346981001944, + "grad_norm": 3.3902950286865234, + "learning_rate": 2.360772780703349e-06, + "loss": 1.1015, + "step": 12495 + }, + { + "epoch": 0.7834973979559847, + "grad_norm": 3.0284371376037598, + "learning_rate": 2.359462470201963e-06, + "loss": 1.2938, + "step": 12496 + }, + { + "epoch": 0.783560097811775, + "grad_norm": 3.2018582820892334, + "learning_rate": 2.3581524747966776e-06, + "loss": 1.0627, + "step": 12497 + }, + { + "epoch": 0.7836227976675654, + "grad_norm": 3.3972880840301514, + "learning_rate": 2.3568427945415163e-06, + "loss": 1.0371, + "step": 12498 + }, + { + "epoch": 0.7836854975233557, + "grad_norm": 3.6166915893554688, + "learning_rate": 2.355533429490494e-06, + "loss": 1.0352, + "step": 12499 + }, + { + "epoch": 0.783748197379146, + "grad_norm": 3.2935283184051514, + "learning_rate": 2.3542243796976073e-06, + "loss": 0.8958, + "step": 12500 + }, + { + "epoch": 0.7838108972349364, + "grad_norm": 3.3378169536590576, + "learning_rate": 2.352915645216841e-06, + "loss": 0.8626, + "step": 12501 + }, + { + "epoch": 0.7838735970907267, + "grad_norm": 4.026036262512207, + "learning_rate": 2.351607226102164e-06, + "loss": 1.0894, + "step": 12502 + }, + { + "epoch": 0.783936296946517, + "grad_norm": 3.4985342025756836, + "learning_rate": 2.350299122407542e-06, + "loss": 1.2073, + "step": 12503 + }, + { + "epoch": 0.7839989968023073, + "grad_norm": 3.774566411972046, + "learning_rate": 2.34899133418692e-06, + "loss": 0.9997, + "step": 12504 + }, + { + "epoch": 0.7840616966580977, + "grad_norm": 3.051311731338501, + "learning_rate": 2.347683861494228e-06, + "loss": 0.9903, + "step": 12505 + }, + { + "epoch": 0.784124396513888, + "grad_norm": 3.1422226428985596, + "learning_rate": 2.346376704383384e-06, + "loss": 1.0514, + "step": 12506 + }, + { + "epoch": 0.7841870963696783, + "grad_norm": 3.1492249965667725, + "learning_rate": 2.3450698629083034e-06, + "loss": 1.1164, + "step": 12507 + }, + { + "epoch": 0.7842497962254686, + "grad_norm": 3.120861291885376, + "learning_rate": 2.343763337122875e-06, + "loss": 1.169, + "step": 12508 + }, + { + "epoch": 0.784312496081259, + "grad_norm": 3.3525962829589844, + "learning_rate": 2.342457127080978e-06, + "loss": 1.158, + "step": 12509 + }, + { + "epoch": 0.7843751959370493, + "grad_norm": 3.359788179397583, + "learning_rate": 2.3411512328364872e-06, + "loss": 1.2129, + "step": 12510 + }, + { + "epoch": 0.7844378957928396, + "grad_norm": 3.5804872512817383, + "learning_rate": 2.3398456544432534e-06, + "loss": 1.0147, + "step": 12511 + }, + { + "epoch": 0.7845005956486301, + "grad_norm": 3.7030370235443115, + "learning_rate": 2.3385403919551197e-06, + "loss": 0.9988, + "step": 12512 + }, + { + "epoch": 0.7845632955044204, + "grad_norm": 3.3317008018493652, + "learning_rate": 2.3372354454259114e-06, + "loss": 1.0513, + "step": 12513 + }, + { + "epoch": 0.7846259953602107, + "grad_norm": 3.081756114959717, + "learning_rate": 2.3359308149094505e-06, + "loss": 1.031, + "step": 12514 + }, + { + "epoch": 0.784688695216001, + "grad_norm": 3.402301788330078, + "learning_rate": 2.334626500459539e-06, + "loss": 1.0005, + "step": 12515 + }, + { + "epoch": 0.7847513950717914, + "grad_norm": 3.487267255783081, + "learning_rate": 2.333322502129961e-06, + "loss": 1.0106, + "step": 12516 + }, + { + "epoch": 0.7848140949275817, + "grad_norm": 3.327425479888916, + "learning_rate": 2.332018819974502e-06, + "loss": 1.1678, + "step": 12517 + }, + { + "epoch": 0.784876794783372, + "grad_norm": 3.253270149230957, + "learning_rate": 2.3307154540469213e-06, + "loss": 1.0313, + "step": 12518 + }, + { + "epoch": 0.7849394946391623, + "grad_norm": 2.885838747024536, + "learning_rate": 2.329412404400969e-06, + "loss": 1.0832, + "step": 12519 + }, + { + "epoch": 0.7850021944949527, + "grad_norm": 3.5909016132354736, + "learning_rate": 2.328109671090383e-06, + "loss": 1.1482, + "step": 12520 + }, + { + "epoch": 0.785064894350743, + "grad_norm": 3.0873146057128906, + "learning_rate": 2.3268072541688913e-06, + "loss": 1.125, + "step": 12521 + }, + { + "epoch": 0.7851275942065333, + "grad_norm": 3.1947309970855713, + "learning_rate": 2.325505153690203e-06, + "loss": 1.0554, + "step": 12522 + }, + { + "epoch": 0.7851902940623237, + "grad_norm": 3.768777370452881, + "learning_rate": 2.324203369708018e-06, + "loss": 1.1021, + "step": 12523 + }, + { + "epoch": 0.785252993918114, + "grad_norm": 3.1928324699401855, + "learning_rate": 2.322901902276018e-06, + "loss": 0.9655, + "step": 12524 + }, + { + "epoch": 0.7853156937739043, + "grad_norm": 3.335939884185791, + "learning_rate": 2.321600751447882e-06, + "loss": 1.2173, + "step": 12525 + }, + { + "epoch": 0.7853783936296946, + "grad_norm": 3.6997358798980713, + "learning_rate": 2.3202999172772657e-06, + "loss": 1.1008, + "step": 12526 + }, + { + "epoch": 0.785441093485485, + "grad_norm": 3.387141704559326, + "learning_rate": 2.318999399817814e-06, + "loss": 1.1839, + "step": 12527 + }, + { + "epoch": 0.7855037933412753, + "grad_norm": 3.5722389221191406, + "learning_rate": 2.317699199123166e-06, + "loss": 1.0524, + "step": 12528 + }, + { + "epoch": 0.7855664931970656, + "grad_norm": 2.9718692302703857, + "learning_rate": 2.3163993152469365e-06, + "loss": 1.0611, + "step": 12529 + }, + { + "epoch": 0.785629193052856, + "grad_norm": 3.3150899410247803, + "learning_rate": 2.3150997482427364e-06, + "loss": 0.968, + "step": 12530 + }, + { + "epoch": 0.7856918929086463, + "grad_norm": 3.749697208404541, + "learning_rate": 2.3138004981641547e-06, + "loss": 1.0757, + "step": 12531 + }, + { + "epoch": 0.7857545927644366, + "grad_norm": 3.750264883041382, + "learning_rate": 2.3125015650647798e-06, + "loss": 1.2096, + "step": 12532 + }, + { + "epoch": 0.7858172926202269, + "grad_norm": 3.6595370769500732, + "learning_rate": 2.3112029489981746e-06, + "loss": 0.9166, + "step": 12533 + }, + { + "epoch": 0.7858799924760173, + "grad_norm": 3.4799160957336426, + "learning_rate": 2.3099046500178968e-06, + "loss": 1.1829, + "step": 12534 + }, + { + "epoch": 0.7859426923318077, + "grad_norm": 3.199392795562744, + "learning_rate": 2.3086066681774833e-06, + "loss": 1.0606, + "step": 12535 + }, + { + "epoch": 0.786005392187598, + "grad_norm": 3.22698974609375, + "learning_rate": 2.30730900353047e-06, + "loss": 1.0157, + "step": 12536 + }, + { + "epoch": 0.7860680920433883, + "grad_norm": 3.05309796333313, + "learning_rate": 2.30601165613037e-06, + "loss": 1.1626, + "step": 12537 + }, + { + "epoch": 0.7861307918991787, + "grad_norm": 3.9373316764831543, + "learning_rate": 2.3047146260306817e-06, + "loss": 1.0453, + "step": 12538 + }, + { + "epoch": 0.786193491754969, + "grad_norm": 3.223097562789917, + "learning_rate": 2.303417913284901e-06, + "loss": 1.1479, + "step": 12539 + }, + { + "epoch": 0.7862561916107593, + "grad_norm": 3.6667540073394775, + "learning_rate": 2.302121517946503e-06, + "loss": 0.9191, + "step": 12540 + }, + { + "epoch": 0.7863188914665497, + "grad_norm": 3.6224496364593506, + "learning_rate": 2.3008254400689477e-06, + "loss": 0.9811, + "step": 12541 + }, + { + "epoch": 0.78638159132234, + "grad_norm": 3.2759392261505127, + "learning_rate": 2.2995296797056864e-06, + "loss": 1.0871, + "step": 12542 + }, + { + "epoch": 0.7864442911781303, + "grad_norm": 3.5103180408477783, + "learning_rate": 2.2982342369101597e-06, + "loss": 1.0295, + "step": 12543 + }, + { + "epoch": 0.7865069910339206, + "grad_norm": 3.358607530593872, + "learning_rate": 2.296939111735789e-06, + "loss": 1.0069, + "step": 12544 + }, + { + "epoch": 0.786569690889711, + "grad_norm": 3.272792100906372, + "learning_rate": 2.2956443042359854e-06, + "loss": 1.0577, + "step": 12545 + }, + { + "epoch": 0.7866323907455013, + "grad_norm": 3.5421903133392334, + "learning_rate": 2.294349814464145e-06, + "loss": 1.2048, + "step": 12546 + }, + { + "epoch": 0.7866950906012916, + "grad_norm": 3.7091290950775146, + "learning_rate": 2.2930556424736574e-06, + "loss": 0.9203, + "step": 12547 + }, + { + "epoch": 0.7867577904570819, + "grad_norm": 3.924792528152466, + "learning_rate": 2.291761788317892e-06, + "loss": 1.1058, + "step": 12548 + }, + { + "epoch": 0.7868204903128723, + "grad_norm": 3.40720534324646, + "learning_rate": 2.290468252050204e-06, + "loss": 1.0872, + "step": 12549 + }, + { + "epoch": 0.7868831901686626, + "grad_norm": 3.6573586463928223, + "learning_rate": 2.289175033723945e-06, + "loss": 0.9946, + "step": 12550 + }, + { + "epoch": 0.7869458900244529, + "grad_norm": 2.9805948734283447, + "learning_rate": 2.287882133392444e-06, + "loss": 1.061, + "step": 12551 + }, + { + "epoch": 0.7870085898802432, + "grad_norm": 3.217132091522217, + "learning_rate": 2.2865895511090197e-06, + "loss": 1.2196, + "step": 12552 + }, + { + "epoch": 0.7870712897360336, + "grad_norm": 3.3004443645477295, + "learning_rate": 2.2852972869269773e-06, + "loss": 1.0048, + "step": 12553 + }, + { + "epoch": 0.7871339895918239, + "grad_norm": 3.116656541824341, + "learning_rate": 2.2840053408996154e-06, + "loss": 1.0398, + "step": 12554 + }, + { + "epoch": 0.7871966894476142, + "grad_norm": 3.8918845653533936, + "learning_rate": 2.282713713080209e-06, + "loss": 1.0925, + "step": 12555 + }, + { + "epoch": 0.7872593893034046, + "grad_norm": 3.2415900230407715, + "learning_rate": 2.2814224035220268e-06, + "loss": 1.1066, + "step": 12556 + }, + { + "epoch": 0.7873220891591949, + "grad_norm": 3.2484257221221924, + "learning_rate": 2.280131412278318e-06, + "loss": 1.1133, + "step": 12557 + }, + { + "epoch": 0.7873847890149853, + "grad_norm": 3.5317888259887695, + "learning_rate": 2.2788407394023302e-06, + "loss": 1.1697, + "step": 12558 + }, + { + "epoch": 0.7874474888707756, + "grad_norm": 3.408411979675293, + "learning_rate": 2.277550384947288e-06, + "loss": 1.1818, + "step": 12559 + }, + { + "epoch": 0.787510188726566, + "grad_norm": 3.243283271789551, + "learning_rate": 2.2762603489664014e-06, + "loss": 1.1014, + "step": 12560 + }, + { + "epoch": 0.7875728885823563, + "grad_norm": 3.5447044372558594, + "learning_rate": 2.274970631512878e-06, + "loss": 1.1867, + "step": 12561 + }, + { + "epoch": 0.7876355884381466, + "grad_norm": 3.356311559677124, + "learning_rate": 2.2736812326399038e-06, + "loss": 0.9881, + "step": 12562 + }, + { + "epoch": 0.787698288293937, + "grad_norm": 3.445375919342041, + "learning_rate": 2.2723921524006533e-06, + "loss": 1.1484, + "step": 12563 + }, + { + "epoch": 0.7877609881497273, + "grad_norm": 3.18037748336792, + "learning_rate": 2.271103390848285e-06, + "loss": 1.1228, + "step": 12564 + }, + { + "epoch": 0.7878236880055176, + "grad_norm": 3.463627576828003, + "learning_rate": 2.269814948035952e-06, + "loss": 1.1844, + "step": 12565 + }, + { + "epoch": 0.7878863878613079, + "grad_norm": 3.4395334720611572, + "learning_rate": 2.26852682401679e-06, + "loss": 1.0681, + "step": 12566 + }, + { + "epoch": 0.7879490877170983, + "grad_norm": 3.401561737060547, + "learning_rate": 2.2672390188439154e-06, + "loss": 0.9925, + "step": 12567 + }, + { + "epoch": 0.7880117875728886, + "grad_norm": 3.3706727027893066, + "learning_rate": 2.265951532570444e-06, + "loss": 1.1239, + "step": 12568 + }, + { + "epoch": 0.7880744874286789, + "grad_norm": 3.120560884475708, + "learning_rate": 2.2646643652494693e-06, + "loss": 1.1864, + "step": 12569 + }, + { + "epoch": 0.7881371872844692, + "grad_norm": 3.8028340339660645, + "learning_rate": 2.2633775169340744e-06, + "loss": 0.9848, + "step": 12570 + }, + { + "epoch": 0.7881998871402596, + "grad_norm": 3.4350035190582275, + "learning_rate": 2.2620909876773256e-06, + "loss": 1.0018, + "step": 12571 + }, + { + "epoch": 0.7882625869960499, + "grad_norm": 3.441852331161499, + "learning_rate": 2.260804777532285e-06, + "loss": 1.1239, + "step": 12572 + }, + { + "epoch": 0.7883252868518402, + "grad_norm": 3.2598764896392822, + "learning_rate": 2.2595188865519934e-06, + "loss": 1.0469, + "step": 12573 + }, + { + "epoch": 0.7883879867076306, + "grad_norm": 3.29787278175354, + "learning_rate": 2.2582333147894806e-06, + "loss": 1.0324, + "step": 12574 + }, + { + "epoch": 0.7884506865634209, + "grad_norm": 3.344754695892334, + "learning_rate": 2.2569480622977626e-06, + "loss": 1.2023, + "step": 12575 + }, + { + "epoch": 0.7885133864192112, + "grad_norm": 3.4672350883483887, + "learning_rate": 2.2556631291298457e-06, + "loss": 1.1638, + "step": 12576 + }, + { + "epoch": 0.7885760862750015, + "grad_norm": 3.2201881408691406, + "learning_rate": 2.254378515338721e-06, + "loss": 1.1573, + "step": 12577 + }, + { + "epoch": 0.7886387861307919, + "grad_norm": 3.3951327800750732, + "learning_rate": 2.253094220977361e-06, + "loss": 0.9788, + "step": 12578 + }, + { + "epoch": 0.7887014859865822, + "grad_norm": 3.2105135917663574, + "learning_rate": 2.2518102460987355e-06, + "loss": 1.2136, + "step": 12579 + }, + { + "epoch": 0.7887641858423725, + "grad_norm": 3.354140520095825, + "learning_rate": 2.250526590755796e-06, + "loss": 1.0284, + "step": 12580 + }, + { + "epoch": 0.788826885698163, + "grad_norm": 3.2370402812957764, + "learning_rate": 2.2492432550014763e-06, + "loss": 1.2314, + "step": 12581 + }, + { + "epoch": 0.7888895855539533, + "grad_norm": 3.127902030944824, + "learning_rate": 2.2479602388887013e-06, + "loss": 1.1067, + "step": 12582 + }, + { + "epoch": 0.7889522854097436, + "grad_norm": 3.1246793270111084, + "learning_rate": 2.246677542470388e-06, + "loss": 1.0487, + "step": 12583 + }, + { + "epoch": 0.7890149852655339, + "grad_norm": 3.2480971813201904, + "learning_rate": 2.24539516579943e-06, + "loss": 1.1527, + "step": 12584 + }, + { + "epoch": 0.7890776851213243, + "grad_norm": 3.653884172439575, + "learning_rate": 2.244113108928715e-06, + "loss": 1.0421, + "step": 12585 + }, + { + "epoch": 0.7891403849771146, + "grad_norm": 3.5326356887817383, + "learning_rate": 2.2428313719111118e-06, + "loss": 1.0844, + "step": 12586 + }, + { + "epoch": 0.7892030848329049, + "grad_norm": 3.2987778186798096, + "learning_rate": 2.2415499547994833e-06, + "loss": 1.0556, + "step": 12587 + }, + { + "epoch": 0.7892657846886952, + "grad_norm": 3.9655637741088867, + "learning_rate": 2.2402688576466736e-06, + "loss": 1.046, + "step": 12588 + }, + { + "epoch": 0.7893284845444856, + "grad_norm": 3.4327335357666016, + "learning_rate": 2.238988080505513e-06, + "loss": 1.1281, + "step": 12589 + }, + { + "epoch": 0.7893911844002759, + "grad_norm": 3.290353775024414, + "learning_rate": 2.2377076234288253e-06, + "loss": 1.2142, + "step": 12590 + }, + { + "epoch": 0.7894538842560662, + "grad_norm": 3.281710624694824, + "learning_rate": 2.2364274864694137e-06, + "loss": 1.0039, + "step": 12591 + }, + { + "epoch": 0.7895165841118565, + "grad_norm": 3.2792532444000244, + "learning_rate": 2.235147669680072e-06, + "loss": 0.965, + "step": 12592 + }, + { + "epoch": 0.7895792839676469, + "grad_norm": 3.221928596496582, + "learning_rate": 2.233868173113577e-06, + "loss": 1.1195, + "step": 12593 + }, + { + "epoch": 0.7896419838234372, + "grad_norm": 3.5937387943267822, + "learning_rate": 2.2325889968226997e-06, + "loss": 1.0247, + "step": 12594 + }, + { + "epoch": 0.7897046836792275, + "grad_norm": 3.407571315765381, + "learning_rate": 2.231310140860192e-06, + "loss": 1.189, + "step": 12595 + }, + { + "epoch": 0.7897673835350179, + "grad_norm": 3.485448122024536, + "learning_rate": 2.230031605278793e-06, + "loss": 1.1567, + "step": 12596 + }, + { + "epoch": 0.7898300833908082, + "grad_norm": 3.513066530227661, + "learning_rate": 2.2287533901312273e-06, + "loss": 0.989, + "step": 12597 + }, + { + "epoch": 0.7898927832465985, + "grad_norm": 3.453068494796753, + "learning_rate": 2.227475495470214e-06, + "loss": 1.1253, + "step": 12598 + }, + { + "epoch": 0.7899554831023888, + "grad_norm": 3.1755809783935547, + "learning_rate": 2.2261979213484507e-06, + "loss": 1.0888, + "step": 12599 + }, + { + "epoch": 0.7900181829581792, + "grad_norm": 3.634531259536743, + "learning_rate": 2.224920667818622e-06, + "loss": 1.0936, + "step": 12600 + }, + { + "epoch": 0.7900808828139695, + "grad_norm": 3.687238931655884, + "learning_rate": 2.2236437349334072e-06, + "loss": 0.8794, + "step": 12601 + }, + { + "epoch": 0.7901435826697598, + "grad_norm": 3.4757931232452393, + "learning_rate": 2.2223671227454636e-06, + "loss": 1.0944, + "step": 12602 + }, + { + "epoch": 0.7902062825255501, + "grad_norm": 3.8378100395202637, + "learning_rate": 2.221090831307441e-06, + "loss": 1.2161, + "step": 12603 + }, + { + "epoch": 0.7902689823813405, + "grad_norm": 3.1692018508911133, + "learning_rate": 2.219814860671968e-06, + "loss": 1.1438, + "step": 12604 + }, + { + "epoch": 0.7903316822371309, + "grad_norm": 3.365345001220703, + "learning_rate": 2.218539210891674e-06, + "loss": 1.1774, + "step": 12605 + }, + { + "epoch": 0.7903943820929212, + "grad_norm": 3.4712610244750977, + "learning_rate": 2.2172638820191616e-06, + "loss": 1.0285, + "step": 12606 + }, + { + "epoch": 0.7904570819487116, + "grad_norm": 3.4143948554992676, + "learning_rate": 2.215988874107027e-06, + "loss": 1.1838, + "step": 12607 + }, + { + "epoch": 0.7905197818045019, + "grad_norm": 3.203950881958008, + "learning_rate": 2.214714187207848e-06, + "loss": 1.2265, + "step": 12608 + }, + { + "epoch": 0.7905824816602922, + "grad_norm": 3.2868666648864746, + "learning_rate": 2.213439821374199e-06, + "loss": 1.2094, + "step": 12609 + }, + { + "epoch": 0.7906451815160825, + "grad_norm": 3.35929536819458, + "learning_rate": 2.2121657766586326e-06, + "loss": 1.2402, + "step": 12610 + }, + { + "epoch": 0.7907078813718729, + "grad_norm": 3.1225876808166504, + "learning_rate": 2.2108920531136855e-06, + "loss": 1.173, + "step": 12611 + }, + { + "epoch": 0.7907705812276632, + "grad_norm": 3.4588093757629395, + "learning_rate": 2.2096186507918937e-06, + "loss": 1.0113, + "step": 12612 + }, + { + "epoch": 0.7908332810834535, + "grad_norm": 3.6603715419769287, + "learning_rate": 2.208345569745769e-06, + "loss": 1.1254, + "step": 12613 + }, + { + "epoch": 0.7908959809392438, + "grad_norm": 3.3078832626342773, + "learning_rate": 2.207072810027814e-06, + "loss": 1.1005, + "step": 12614 + }, + { + "epoch": 0.7909586807950342, + "grad_norm": 3.4937117099761963, + "learning_rate": 2.205800371690513e-06, + "loss": 1.0487, + "step": 12615 + }, + { + "epoch": 0.7910213806508245, + "grad_norm": 3.4497594833374023, + "learning_rate": 2.2045282547863477e-06, + "loss": 0.952, + "step": 12616 + }, + { + "epoch": 0.7910840805066148, + "grad_norm": 3.4229860305786133, + "learning_rate": 2.2032564593677773e-06, + "loss": 1.1144, + "step": 12617 + }, + { + "epoch": 0.7911467803624052, + "grad_norm": 3.3929193019866943, + "learning_rate": 2.2019849854872522e-06, + "loss": 0.9304, + "step": 12618 + }, + { + "epoch": 0.7912094802181955, + "grad_norm": 2.875119924545288, + "learning_rate": 2.2007138331972033e-06, + "loss": 1.214, + "step": 12619 + }, + { + "epoch": 0.7912721800739858, + "grad_norm": 3.181490659713745, + "learning_rate": 2.199443002550059e-06, + "loss": 1.1492, + "step": 12620 + }, + { + "epoch": 0.7913348799297761, + "grad_norm": 3.1071784496307373, + "learning_rate": 2.1981724935982264e-06, + "loss": 1.1112, + "step": 12621 + }, + { + "epoch": 0.7913975797855665, + "grad_norm": 3.406562089920044, + "learning_rate": 2.196902306394099e-06, + "loss": 0.9836, + "step": 12622 + }, + { + "epoch": 0.7914602796413568, + "grad_norm": 3.0681729316711426, + "learning_rate": 2.195632440990062e-06, + "loss": 1.0794, + "step": 12623 + }, + { + "epoch": 0.7915229794971471, + "grad_norm": 3.4325578212738037, + "learning_rate": 2.1943628974384858e-06, + "loss": 0.8928, + "step": 12624 + }, + { + "epoch": 0.7915856793529374, + "grad_norm": 3.3361403942108154, + "learning_rate": 2.1930936757917232e-06, + "loss": 0.9992, + "step": 12625 + }, + { + "epoch": 0.7916483792087278, + "grad_norm": 3.214691638946533, + "learning_rate": 2.191824776102116e-06, + "loss": 1.1431, + "step": 12626 + }, + { + "epoch": 0.7917110790645181, + "grad_norm": 3.2866363525390625, + "learning_rate": 2.190556198421999e-06, + "loss": 1.1558, + "step": 12627 + }, + { + "epoch": 0.7917737789203085, + "grad_norm": 3.232909917831421, + "learning_rate": 2.1892879428036852e-06, + "loss": 1.0662, + "step": 12628 + }, + { + "epoch": 0.7918364787760989, + "grad_norm": 3.4805593490600586, + "learning_rate": 2.1880200092994754e-06, + "loss": 1.2955, + "step": 12629 + }, + { + "epoch": 0.7918991786318892, + "grad_norm": 3.1742892265319824, + "learning_rate": 2.1867523979616636e-06, + "loss": 0.8938, + "step": 12630 + }, + { + "epoch": 0.7919618784876795, + "grad_norm": 3.2391653060913086, + "learning_rate": 2.1854851088425245e-06, + "loss": 1.1323, + "step": 12631 + }, + { + "epoch": 0.7920245783434698, + "grad_norm": 3.434939384460449, + "learning_rate": 2.184218141994321e-06, + "loss": 1.1688, + "step": 12632 + }, + { + "epoch": 0.7920872781992602, + "grad_norm": 3.0953524112701416, + "learning_rate": 2.1829514974693e-06, + "loss": 0.9693, + "step": 12633 + }, + { + "epoch": 0.7921499780550505, + "grad_norm": 3.2714059352874756, + "learning_rate": 2.1816851753197023e-06, + "loss": 0.9866, + "step": 12634 + }, + { + "epoch": 0.7922126779108408, + "grad_norm": 3.2889652252197266, + "learning_rate": 2.1804191755977513e-06, + "loss": 1.2351, + "step": 12635 + }, + { + "epoch": 0.7922753777666312, + "grad_norm": 3.1778409481048584, + "learning_rate": 2.179153498355653e-06, + "loss": 1.068, + "step": 12636 + }, + { + "epoch": 0.7923380776224215, + "grad_norm": 3.5793888568878174, + "learning_rate": 2.1778881436456045e-06, + "loss": 1.1977, + "step": 12637 + }, + { + "epoch": 0.7924007774782118, + "grad_norm": 4.057021617889404, + "learning_rate": 2.176623111519792e-06, + "loss": 1.1766, + "step": 12638 + }, + { + "epoch": 0.7924634773340021, + "grad_norm": 3.510206460952759, + "learning_rate": 2.1753584020303852e-06, + "loss": 1.0101, + "step": 12639 + }, + { + "epoch": 0.7925261771897925, + "grad_norm": 3.215994358062744, + "learning_rate": 2.1740940152295363e-06, + "loss": 1.0152, + "step": 12640 + }, + { + "epoch": 0.7925888770455828, + "grad_norm": 3.189535140991211, + "learning_rate": 2.1728299511693952e-06, + "loss": 0.9709, + "step": 12641 + }, + { + "epoch": 0.7926515769013731, + "grad_norm": 3.3547823429107666, + "learning_rate": 2.1715662099020886e-06, + "loss": 1.0834, + "step": 12642 + }, + { + "epoch": 0.7927142767571634, + "grad_norm": 3.231598377227783, + "learning_rate": 2.1703027914797335e-06, + "loss": 1.1276, + "step": 12643 + }, + { + "epoch": 0.7927769766129538, + "grad_norm": 3.1290597915649414, + "learning_rate": 2.1690396959544304e-06, + "loss": 1.2048, + "step": 12644 + }, + { + "epoch": 0.7928396764687441, + "grad_norm": 3.2859833240509033, + "learning_rate": 2.167776923378274e-06, + "loss": 1.1369, + "step": 12645 + }, + { + "epoch": 0.7929023763245344, + "grad_norm": 3.502450942993164, + "learning_rate": 2.1665144738033407e-06, + "loss": 1.1165, + "step": 12646 + }, + { + "epoch": 0.7929650761803247, + "grad_norm": 3.3208487033843994, + "learning_rate": 2.165252347281692e-06, + "loss": 1.0575, + "step": 12647 + }, + { + "epoch": 0.7930277760361151, + "grad_norm": 3.4689764976501465, + "learning_rate": 2.163990543865375e-06, + "loss": 1.0939, + "step": 12648 + }, + { + "epoch": 0.7930904758919054, + "grad_norm": 3.129150629043579, + "learning_rate": 2.1627290636064346e-06, + "loss": 1.207, + "step": 12649 + }, + { + "epoch": 0.7931531757476957, + "grad_norm": 3.284501075744629, + "learning_rate": 2.161467906556889e-06, + "loss": 1.2297, + "step": 12650 + }, + { + "epoch": 0.7932158756034862, + "grad_norm": 3.318100929260254, + "learning_rate": 2.1602070727687463e-06, + "loss": 1.1551, + "step": 12651 + }, + { + "epoch": 0.7932785754592765, + "grad_norm": 3.3950448036193848, + "learning_rate": 2.1589465622940097e-06, + "loss": 0.9688, + "step": 12652 + }, + { + "epoch": 0.7933412753150668, + "grad_norm": 3.333115339279175, + "learning_rate": 2.157686375184659e-06, + "loss": 1.0199, + "step": 12653 + }, + { + "epoch": 0.7934039751708571, + "grad_norm": 3.8388235569000244, + "learning_rate": 2.156426511492664e-06, + "loss": 1.0855, + "step": 12654 + }, + { + "epoch": 0.7934666750266475, + "grad_norm": 3.5642802715301514, + "learning_rate": 2.1551669712699798e-06, + "loss": 1.0215, + "step": 12655 + }, + { + "epoch": 0.7935293748824378, + "grad_norm": 3.2072227001190186, + "learning_rate": 2.1539077545685563e-06, + "loss": 0.9179, + "step": 12656 + }, + { + "epoch": 0.7935920747382281, + "grad_norm": 3.426171064376831, + "learning_rate": 2.1526488614403174e-06, + "loss": 1.1, + "step": 12657 + }, + { + "epoch": 0.7936547745940185, + "grad_norm": 3.5620768070220947, + "learning_rate": 2.151390291937183e-06, + "loss": 0.957, + "step": 12658 + }, + { + "epoch": 0.7937174744498088, + "grad_norm": 3.3897974491119385, + "learning_rate": 2.150132046111054e-06, + "loss": 1.0887, + "step": 12659 + }, + { + "epoch": 0.7937801743055991, + "grad_norm": 3.6330881118774414, + "learning_rate": 2.1488741240138245e-06, + "loss": 1.1749, + "step": 12660 + }, + { + "epoch": 0.7938428741613894, + "grad_norm": 3.1844944953918457, + "learning_rate": 2.147616525697368e-06, + "loss": 1.2646, + "step": 12661 + }, + { + "epoch": 0.7939055740171798, + "grad_norm": 3.6822986602783203, + "learning_rate": 2.146359251213548e-06, + "loss": 1.0989, + "step": 12662 + }, + { + "epoch": 0.7939682738729701, + "grad_norm": 3.5362236499786377, + "learning_rate": 2.145102300614217e-06, + "loss": 0.977, + "step": 12663 + }, + { + "epoch": 0.7940309737287604, + "grad_norm": 3.2165632247924805, + "learning_rate": 2.143845673951209e-06, + "loss": 1.0742, + "step": 12664 + }, + { + "epoch": 0.7940936735845507, + "grad_norm": 3.398003101348877, + "learning_rate": 2.14258937127635e-06, + "loss": 1.041, + "step": 12665 + }, + { + "epoch": 0.7941563734403411, + "grad_norm": 3.3710968494415283, + "learning_rate": 2.141333392641446e-06, + "loss": 1.2052, + "step": 12666 + }, + { + "epoch": 0.7942190732961314, + "grad_norm": 3.8112733364105225, + "learning_rate": 2.1400777380982986e-06, + "loss": 1.1141, + "step": 12667 + }, + { + "epoch": 0.7942817731519217, + "grad_norm": 3.1552939414978027, + "learning_rate": 2.1388224076986872e-06, + "loss": 1.1232, + "step": 12668 + }, + { + "epoch": 0.794344473007712, + "grad_norm": 3.1353001594543457, + "learning_rate": 2.1375674014943846e-06, + "loss": 1.2275, + "step": 12669 + }, + { + "epoch": 0.7944071728635024, + "grad_norm": 3.5050675868988037, + "learning_rate": 2.1363127195371424e-06, + "loss": 1.0327, + "step": 12670 + }, + { + "epoch": 0.7944698727192927, + "grad_norm": 3.3407602310180664, + "learning_rate": 2.135058361878709e-06, + "loss": 0.9994, + "step": 12671 + }, + { + "epoch": 0.794532572575083, + "grad_norm": 3.1725730895996094, + "learning_rate": 2.1338043285708132e-06, + "loss": 1.0028, + "step": 12672 + }, + { + "epoch": 0.7945952724308734, + "grad_norm": 3.5279107093811035, + "learning_rate": 2.132550619665168e-06, + "loss": 1.1109, + "step": 12673 + }, + { + "epoch": 0.7946579722866638, + "grad_norm": 3.298954725265503, + "learning_rate": 2.1312972352134807e-06, + "loss": 1.0463, + "step": 12674 + }, + { + "epoch": 0.7947206721424541, + "grad_norm": 3.170818328857422, + "learning_rate": 2.1300441752674394e-06, + "loss": 1.1285, + "step": 12675 + }, + { + "epoch": 0.7947833719982444, + "grad_norm": 3.523745536804199, + "learning_rate": 2.1287914398787203e-06, + "loss": 1.0678, + "step": 12676 + }, + { + "epoch": 0.7948460718540348, + "grad_norm": 3.312591552734375, + "learning_rate": 2.127539029098983e-06, + "loss": 1.1031, + "step": 12677 + }, + { + "epoch": 0.7949087717098251, + "grad_norm": 3.3081676959991455, + "learning_rate": 2.1262869429798817e-06, + "loss": 1.0091, + "step": 12678 + }, + { + "epoch": 0.7949714715656154, + "grad_norm": 3.653827667236328, + "learning_rate": 2.1250351815730517e-06, + "loss": 1.0184, + "step": 12679 + }, + { + "epoch": 0.7950341714214058, + "grad_norm": 3.555053234100342, + "learning_rate": 2.1237837449301114e-06, + "loss": 1.0038, + "step": 12680 + }, + { + "epoch": 0.7950968712771961, + "grad_norm": 3.2215893268585205, + "learning_rate": 2.1225326331026775e-06, + "loss": 1.1342, + "step": 12681 + }, + { + "epoch": 0.7951595711329864, + "grad_norm": 3.5325369834899902, + "learning_rate": 2.12128184614234e-06, + "loss": 1.1442, + "step": 12682 + }, + { + "epoch": 0.7952222709887767, + "grad_norm": 3.9185001850128174, + "learning_rate": 2.1200313841006846e-06, + "loss": 0.9495, + "step": 12683 + }, + { + "epoch": 0.7952849708445671, + "grad_norm": 3.2805423736572266, + "learning_rate": 2.1187812470292758e-06, + "loss": 1.1149, + "step": 12684 + }, + { + "epoch": 0.7953476707003574, + "grad_norm": 3.4187021255493164, + "learning_rate": 2.117531434979675e-06, + "loss": 1.0323, + "step": 12685 + }, + { + "epoch": 0.7954103705561477, + "grad_norm": 3.4578206539154053, + "learning_rate": 2.1162819480034226e-06, + "loss": 1.0785, + "step": 12686 + }, + { + "epoch": 0.795473070411938, + "grad_norm": 3.55684232711792, + "learning_rate": 2.115032786152047e-06, + "loss": 1.1975, + "step": 12687 + }, + { + "epoch": 0.7955357702677284, + "grad_norm": 3.50514817237854, + "learning_rate": 2.1137839494770605e-06, + "loss": 1.1513, + "step": 12688 + }, + { + "epoch": 0.7955984701235187, + "grad_norm": 3.3604273796081543, + "learning_rate": 2.112535438029971e-06, + "loss": 1.0724, + "step": 12689 + }, + { + "epoch": 0.795661169979309, + "grad_norm": 3.3288309574127197, + "learning_rate": 2.111287251862264e-06, + "loss": 1.0049, + "step": 12690 + }, + { + "epoch": 0.7957238698350994, + "grad_norm": 3.816274642944336, + "learning_rate": 2.1100393910254135e-06, + "loss": 1.0876, + "step": 12691 + }, + { + "epoch": 0.7957865696908897, + "grad_norm": 3.4259567260742188, + "learning_rate": 2.108791855570885e-06, + "loss": 1.2393, + "step": 12692 + }, + { + "epoch": 0.79584926954668, + "grad_norm": 3.1776742935180664, + "learning_rate": 2.1075446455501257e-06, + "loss": 1.0062, + "step": 12693 + }, + { + "epoch": 0.7959119694024703, + "grad_norm": 3.0684022903442383, + "learning_rate": 2.1062977610145697e-06, + "loss": 1.0518, + "step": 12694 + }, + { + "epoch": 0.7959746692582607, + "grad_norm": 3.5090301036834717, + "learning_rate": 2.1050512020156364e-06, + "loss": 0.9444, + "step": 12695 + }, + { + "epoch": 0.796037369114051, + "grad_norm": 3.7510628700256348, + "learning_rate": 2.103804968604738e-06, + "loss": 1.0535, + "step": 12696 + }, + { + "epoch": 0.7961000689698414, + "grad_norm": 3.434354305267334, + "learning_rate": 2.102559060833268e-06, + "loss": 1.0859, + "step": 12697 + }, + { + "epoch": 0.7961627688256318, + "grad_norm": 3.5696587562561035, + "learning_rate": 2.101313478752608e-06, + "loss": 1.1747, + "step": 12698 + }, + { + "epoch": 0.7962254686814221, + "grad_norm": 3.7333362102508545, + "learning_rate": 2.100068222414121e-06, + "loss": 1.0182, + "step": 12699 + }, + { + "epoch": 0.7962881685372124, + "grad_norm": 3.448909044265747, + "learning_rate": 2.0988232918691685e-06, + "loss": 1.0871, + "step": 12700 + }, + { + "epoch": 0.7963508683930027, + "grad_norm": 3.2617783546447754, + "learning_rate": 2.0975786871690897e-06, + "loss": 1.0328, + "step": 12701 + }, + { + "epoch": 0.7964135682487931, + "grad_norm": 3.7041473388671875, + "learning_rate": 2.096334408365207e-06, + "loss": 0.9827, + "step": 12702 + }, + { + "epoch": 0.7964762681045834, + "grad_norm": 3.2784626483917236, + "learning_rate": 2.0950904555088413e-06, + "loss": 0.993, + "step": 12703 + }, + { + "epoch": 0.7965389679603737, + "grad_norm": 3.2239575386047363, + "learning_rate": 2.0938468286512915e-06, + "loss": 1.1132, + "step": 12704 + }, + { + "epoch": 0.796601667816164, + "grad_norm": 3.401015520095825, + "learning_rate": 2.092603527843843e-06, + "loss": 1.0285, + "step": 12705 + }, + { + "epoch": 0.7966643676719544, + "grad_norm": 2.941573143005371, + "learning_rate": 2.091360553137767e-06, + "loss": 1.0702, + "step": 12706 + }, + { + "epoch": 0.7967270675277447, + "grad_norm": 3.6524624824523926, + "learning_rate": 2.09011790458433e-06, + "loss": 1.0852, + "step": 12707 + }, + { + "epoch": 0.796789767383535, + "grad_norm": 4.055318355560303, + "learning_rate": 2.0888755822347774e-06, + "loss": 1.1775, + "step": 12708 + }, + { + "epoch": 0.7968524672393253, + "grad_norm": 3.772991418838501, + "learning_rate": 2.0876335861403396e-06, + "loss": 0.9477, + "step": 12709 + }, + { + "epoch": 0.7969151670951157, + "grad_norm": 3.1540231704711914, + "learning_rate": 2.086391916352236e-06, + "loss": 1.0404, + "step": 12710 + }, + { + "epoch": 0.796977866950906, + "grad_norm": 3.5812137126922607, + "learning_rate": 2.085150572921678e-06, + "loss": 1.136, + "step": 12711 + }, + { + "epoch": 0.7970405668066963, + "grad_norm": 3.7325611114501953, + "learning_rate": 2.0839095558998566e-06, + "loss": 1.1946, + "step": 12712 + }, + { + "epoch": 0.7971032666624867, + "grad_norm": 2.9891574382781982, + "learning_rate": 2.082668865337948e-06, + "loss": 1.1607, + "step": 12713 + }, + { + "epoch": 0.797165966518277, + "grad_norm": 3.001417398452759, + "learning_rate": 2.081428501287124e-06, + "loss": 1.2782, + "step": 12714 + }, + { + "epoch": 0.7972286663740673, + "grad_norm": 3.570380926132202, + "learning_rate": 2.0801884637985337e-06, + "loss": 1.1729, + "step": 12715 + }, + { + "epoch": 0.7972913662298576, + "grad_norm": 3.3756539821624756, + "learning_rate": 2.0789487529233175e-06, + "loss": 1.1036, + "step": 12716 + }, + { + "epoch": 0.797354066085648, + "grad_norm": 3.276151418685913, + "learning_rate": 2.0777093687125984e-06, + "loss": 0.9343, + "step": 12717 + }, + { + "epoch": 0.7974167659414383, + "grad_norm": 3.2652902603149414, + "learning_rate": 2.076470311217492e-06, + "loss": 1.1983, + "step": 12718 + }, + { + "epoch": 0.7974794657972286, + "grad_norm": 3.2704105377197266, + "learning_rate": 2.075231580489098e-06, + "loss": 1.0978, + "step": 12719 + }, + { + "epoch": 0.797542165653019, + "grad_norm": 3.5460610389709473, + "learning_rate": 2.0739931765784983e-06, + "loss": 1.1981, + "step": 12720 + }, + { + "epoch": 0.7976048655088094, + "grad_norm": 3.4828622341156006, + "learning_rate": 2.072755099536764e-06, + "loss": 0.9926, + "step": 12721 + }, + { + "epoch": 0.7976675653645997, + "grad_norm": 3.396667242050171, + "learning_rate": 2.0715173494149578e-06, + "loss": 0.9855, + "step": 12722 + }, + { + "epoch": 0.79773026522039, + "grad_norm": 3.0462825298309326, + "learning_rate": 2.070279926264123e-06, + "loss": 1.1527, + "step": 12723 + }, + { + "epoch": 0.7977929650761804, + "grad_norm": 3.4410481452941895, + "learning_rate": 2.069042830135287e-06, + "loss": 1.174, + "step": 12724 + }, + { + "epoch": 0.7978556649319707, + "grad_norm": 3.665919542312622, + "learning_rate": 2.0678060610794748e-06, + "loss": 1.0444, + "step": 12725 + }, + { + "epoch": 0.797918364787761, + "grad_norm": 3.326843023300171, + "learning_rate": 2.066569619147686e-06, + "loss": 1.0793, + "step": 12726 + }, + { + "epoch": 0.7979810646435513, + "grad_norm": 2.9749081134796143, + "learning_rate": 2.065333504390913e-06, + "loss": 1.1214, + "step": 12727 + }, + { + "epoch": 0.7980437644993417, + "grad_norm": 3.457540273666382, + "learning_rate": 2.0640977168601294e-06, + "loss": 1.1398, + "step": 12728 + }, + { + "epoch": 0.798106464355132, + "grad_norm": 4.320376873016357, + "learning_rate": 2.0628622566063063e-06, + "loss": 1.2202, + "step": 12729 + }, + { + "epoch": 0.7981691642109223, + "grad_norm": 3.4554829597473145, + "learning_rate": 2.06162712368039e-06, + "loss": 1.225, + "step": 12730 + }, + { + "epoch": 0.7982318640667126, + "grad_norm": 3.2001919746398926, + "learning_rate": 2.060392318133314e-06, + "loss": 1.1279, + "step": 12731 + }, + { + "epoch": 0.798294563922503, + "grad_norm": 3.2024431228637695, + "learning_rate": 2.0591578400160094e-06, + "loss": 1.1099, + "step": 12732 + }, + { + "epoch": 0.7983572637782933, + "grad_norm": 3.5636003017425537, + "learning_rate": 2.057923689379382e-06, + "loss": 1.1339, + "step": 12733 + }, + { + "epoch": 0.7984199636340836, + "grad_norm": 3.2309508323669434, + "learning_rate": 2.0566898662743286e-06, + "loss": 1.0571, + "step": 12734 + }, + { + "epoch": 0.798482663489874, + "grad_norm": 3.6251978874206543, + "learning_rate": 2.0554563707517294e-06, + "loss": 0.982, + "step": 12735 + }, + { + "epoch": 0.7985453633456643, + "grad_norm": 2.903458833694458, + "learning_rate": 2.0542232028624585e-06, + "loss": 1.0684, + "step": 12736 + }, + { + "epoch": 0.7986080632014546, + "grad_norm": 3.2990758419036865, + "learning_rate": 2.05299036265737e-06, + "loss": 1.1765, + "step": 12737 + }, + { + "epoch": 0.7986707630572449, + "grad_norm": 3.5503084659576416, + "learning_rate": 2.051757850187306e-06, + "loss": 1.0945, + "step": 12738 + }, + { + "epoch": 0.7987334629130353, + "grad_norm": 3.460590124130249, + "learning_rate": 2.0505256655030926e-06, + "loss": 1.0624, + "step": 12739 + }, + { + "epoch": 0.7987961627688256, + "grad_norm": 3.504059314727783, + "learning_rate": 2.0492938086555514e-06, + "loss": 1.2441, + "step": 12740 + }, + { + "epoch": 0.7988588626246159, + "grad_norm": 3.365985870361328, + "learning_rate": 2.0480622796954797e-06, + "loss": 1.2178, + "step": 12741 + }, + { + "epoch": 0.7989215624804062, + "grad_norm": 3.354139566421509, + "learning_rate": 2.0468310786736647e-06, + "loss": 1.0643, + "step": 12742 + }, + { + "epoch": 0.7989842623361967, + "grad_norm": 3.1848294734954834, + "learning_rate": 2.045600205640885e-06, + "loss": 1.0326, + "step": 12743 + }, + { + "epoch": 0.799046962191987, + "grad_norm": 3.088977098464966, + "learning_rate": 2.0443696606479013e-06, + "loss": 1.1868, + "step": 12744 + }, + { + "epoch": 0.7991096620477773, + "grad_norm": 3.4986886978149414, + "learning_rate": 2.0431394437454597e-06, + "loss": 1.0856, + "step": 12745 + }, + { + "epoch": 0.7991723619035677, + "grad_norm": 3.620026111602783, + "learning_rate": 2.0419095549842925e-06, + "loss": 1.1506, + "step": 12746 + }, + { + "epoch": 0.799235061759358, + "grad_norm": 3.363621950149536, + "learning_rate": 2.0406799944151256e-06, + "loss": 1.04, + "step": 12747 + }, + { + "epoch": 0.7992977616151483, + "grad_norm": 3.791473388671875, + "learning_rate": 2.039450762088664e-06, + "loss": 1.0261, + "step": 12748 + }, + { + "epoch": 0.7993604614709386, + "grad_norm": 3.0486044883728027, + "learning_rate": 2.038221858055599e-06, + "loss": 1.0573, + "step": 12749 + }, + { + "epoch": 0.799423161326729, + "grad_norm": 3.496159553527832, + "learning_rate": 2.0369932823666104e-06, + "loss": 0.9872, + "step": 12750 + }, + { + "epoch": 0.7994858611825193, + "grad_norm": 3.3698465824127197, + "learning_rate": 2.035765035072369e-06, + "loss": 0.9798, + "step": 12751 + }, + { + "epoch": 0.7995485610383096, + "grad_norm": 3.5116803646087646, + "learning_rate": 2.034537116223526e-06, + "loss": 0.912, + "step": 12752 + }, + { + "epoch": 0.7996112608941, + "grad_norm": 3.1952924728393555, + "learning_rate": 2.033309525870717e-06, + "loss": 1.0208, + "step": 12753 + }, + { + "epoch": 0.7996739607498903, + "grad_norm": 3.2819900512695312, + "learning_rate": 2.0320822640645744e-06, + "loss": 1.0331, + "step": 12754 + }, + { + "epoch": 0.7997366606056806, + "grad_norm": 3.106137990951538, + "learning_rate": 2.0308553308557064e-06, + "loss": 1.1896, + "step": 12755 + }, + { + "epoch": 0.7997993604614709, + "grad_norm": 3.8193018436431885, + "learning_rate": 2.0296287262947122e-06, + "loss": 1.1443, + "step": 12756 + }, + { + "epoch": 0.7998620603172613, + "grad_norm": 3.333775520324707, + "learning_rate": 2.0284024504321754e-06, + "loss": 0.9674, + "step": 12757 + }, + { + "epoch": 0.7999247601730516, + "grad_norm": 3.562987804412842, + "learning_rate": 2.027176503318672e-06, + "loss": 0.9948, + "step": 12758 + }, + { + "epoch": 0.7999874600288419, + "grad_norm": 3.8920722007751465, + "learning_rate": 2.0259508850047584e-06, + "loss": 0.8964, + "step": 12759 + }, + { + "epoch": 0.8000501598846322, + "grad_norm": 3.1747522354125977, + "learning_rate": 2.0247255955409784e-06, + "loss": 1.2229, + "step": 12760 + }, + { + "epoch": 0.8001128597404226, + "grad_norm": 3.3417282104492188, + "learning_rate": 2.0235006349778595e-06, + "loss": 1.1122, + "step": 12761 + }, + { + "epoch": 0.8001755595962129, + "grad_norm": 3.2885425090789795, + "learning_rate": 2.0222760033659263e-06, + "loss": 1.0766, + "step": 12762 + }, + { + "epoch": 0.8002382594520032, + "grad_norm": 3.5934224128723145, + "learning_rate": 2.0210517007556797e-06, + "loss": 1.1292, + "step": 12763 + }, + { + "epoch": 0.8003009593077935, + "grad_norm": 3.32668137550354, + "learning_rate": 2.019827727197605e-06, + "loss": 1.0333, + "step": 12764 + }, + { + "epoch": 0.8003636591635839, + "grad_norm": 3.5769994258880615, + "learning_rate": 2.018604082742187e-06, + "loss": 1.0565, + "step": 12765 + }, + { + "epoch": 0.8004263590193742, + "grad_norm": 3.3674564361572266, + "learning_rate": 2.017380767439886e-06, + "loss": 1.0157, + "step": 12766 + }, + { + "epoch": 0.8004890588751646, + "grad_norm": 3.2960164546966553, + "learning_rate": 2.016157781341149e-06, + "loss": 0.9668, + "step": 12767 + }, + { + "epoch": 0.800551758730955, + "grad_norm": 3.149121046066284, + "learning_rate": 2.014935124496412e-06, + "loss": 1.1879, + "step": 12768 + }, + { + "epoch": 0.8006144585867453, + "grad_norm": 3.4996542930603027, + "learning_rate": 2.013712796956102e-06, + "loss": 1.1514, + "step": 12769 + }, + { + "epoch": 0.8006771584425356, + "grad_norm": 3.3134586811065674, + "learning_rate": 2.0124907987706243e-06, + "loss": 1.2739, + "step": 12770 + }, + { + "epoch": 0.800739858298326, + "grad_norm": 3.1925487518310547, + "learning_rate": 2.011269129990376e-06, + "loss": 0.9097, + "step": 12771 + }, + { + "epoch": 0.8008025581541163, + "grad_norm": 3.1825757026672363, + "learning_rate": 2.0100477906657346e-06, + "loss": 0.9882, + "step": 12772 + }, + { + "epoch": 0.8008652580099066, + "grad_norm": 3.5054280757904053, + "learning_rate": 2.008826780847073e-06, + "loss": 1.0219, + "step": 12773 + }, + { + "epoch": 0.8009279578656969, + "grad_norm": 3.6701459884643555, + "learning_rate": 2.0076061005847447e-06, + "loss": 1.0149, + "step": 12774 + }, + { + "epoch": 0.8009906577214873, + "grad_norm": 3.160034418106079, + "learning_rate": 2.0063857499290874e-06, + "loss": 1.0029, + "step": 12775 + }, + { + "epoch": 0.8010533575772776, + "grad_norm": 3.3504598140716553, + "learning_rate": 2.0051657289304327e-06, + "loss": 1.1371, + "step": 12776 + }, + { + "epoch": 0.8011160574330679, + "grad_norm": 3.105532169342041, + "learning_rate": 2.0039460376390927e-06, + "loss": 1.0861, + "step": 12777 + }, + { + "epoch": 0.8011787572888582, + "grad_norm": 3.808367967605591, + "learning_rate": 2.002726676105369e-06, + "loss": 1.0455, + "step": 12778 + }, + { + "epoch": 0.8012414571446486, + "grad_norm": 3.582277774810791, + "learning_rate": 2.001507644379541e-06, + "loss": 1.0744, + "step": 12779 + }, + { + "epoch": 0.8013041570004389, + "grad_norm": 3.1970579624176025, + "learning_rate": 2.000288942511892e-06, + "loss": 1.0704, + "step": 12780 + }, + { + "epoch": 0.8013668568562292, + "grad_norm": 3.710817813873291, + "learning_rate": 1.999070570552675e-06, + "loss": 1.0657, + "step": 12781 + }, + { + "epoch": 0.8014295567120195, + "grad_norm": 3.989793062210083, + "learning_rate": 1.9978525285521377e-06, + "loss": 0.9627, + "step": 12782 + }, + { + "epoch": 0.8014922565678099, + "grad_norm": 3.445819854736328, + "learning_rate": 1.9966348165605085e-06, + "loss": 1.1756, + "step": 12783 + }, + { + "epoch": 0.8015549564236002, + "grad_norm": 3.2607812881469727, + "learning_rate": 1.995417434628012e-06, + "loss": 1.121, + "step": 12784 + }, + { + "epoch": 0.8016176562793905, + "grad_norm": 3.349365711212158, + "learning_rate": 1.9942003828048497e-06, + "loss": 1.0537, + "step": 12785 + }, + { + "epoch": 0.8016803561351808, + "grad_norm": 3.359870672225952, + "learning_rate": 1.9929836611412103e-06, + "loss": 1.059, + "step": 12786 + }, + { + "epoch": 0.8017430559909712, + "grad_norm": 2.86848521232605, + "learning_rate": 1.991767269687278e-06, + "loss": 1.2877, + "step": 12787 + }, + { + "epoch": 0.8018057558467615, + "grad_norm": 3.7475552558898926, + "learning_rate": 1.990551208493212e-06, + "loss": 1.0611, + "step": 12788 + }, + { + "epoch": 0.8018684557025518, + "grad_norm": 3.3402528762817383, + "learning_rate": 1.9893354776091658e-06, + "loss": 1.0241, + "step": 12789 + }, + { + "epoch": 0.8019311555583423, + "grad_norm": 3.1460933685302734, + "learning_rate": 1.9881200770852695e-06, + "loss": 1.0018, + "step": 12790 + }, + { + "epoch": 0.8019938554141326, + "grad_norm": 3.0197081565856934, + "learning_rate": 1.9869050069716554e-06, + "loss": 1.2164, + "step": 12791 + }, + { + "epoch": 0.8020565552699229, + "grad_norm": 3.2183837890625, + "learning_rate": 1.9856902673184275e-06, + "loss": 1.0038, + "step": 12792 + }, + { + "epoch": 0.8021192551257132, + "grad_norm": 3.3778927326202393, + "learning_rate": 1.9844758581756807e-06, + "loss": 1.2058, + "step": 12793 + }, + { + "epoch": 0.8021819549815036, + "grad_norm": 3.362839937210083, + "learning_rate": 1.983261779593503e-06, + "loss": 0.9848, + "step": 12794 + }, + { + "epoch": 0.8022446548372939, + "grad_norm": 3.8500123023986816, + "learning_rate": 1.9820480316219595e-06, + "loss": 1.1223, + "step": 12795 + }, + { + "epoch": 0.8023073546930842, + "grad_norm": 3.1623761653900146, + "learning_rate": 1.9808346143111048e-06, + "loss": 1.1049, + "step": 12796 + }, + { + "epoch": 0.8023700545488746, + "grad_norm": 3.1639418601989746, + "learning_rate": 1.9796215277109778e-06, + "loss": 0.998, + "step": 12797 + }, + { + "epoch": 0.8024327544046649, + "grad_norm": 3.330770969390869, + "learning_rate": 1.9784087718716114e-06, + "loss": 1.2115, + "step": 12798 + }, + { + "epoch": 0.8024954542604552, + "grad_norm": 3.532283067703247, + "learning_rate": 1.977196346843019e-06, + "loss": 1.1088, + "step": 12799 + }, + { + "epoch": 0.8025581541162455, + "grad_norm": 3.185903787612915, + "learning_rate": 1.9759842526751995e-06, + "loss": 1.1435, + "step": 12800 + }, + { + "epoch": 0.8026208539720359, + "grad_norm": 3.3338165283203125, + "learning_rate": 1.9747724894181363e-06, + "loss": 1.1534, + "step": 12801 + }, + { + "epoch": 0.8026835538278262, + "grad_norm": 3.4766299724578857, + "learning_rate": 1.973561057121809e-06, + "loss": 1.0371, + "step": 12802 + }, + { + "epoch": 0.8027462536836165, + "grad_norm": 3.7502310276031494, + "learning_rate": 1.9723499558361746e-06, + "loss": 1.147, + "step": 12803 + }, + { + "epoch": 0.8028089535394068, + "grad_norm": 3.3657383918762207, + "learning_rate": 1.971139185611176e-06, + "loss": 0.8781, + "step": 12804 + }, + { + "epoch": 0.8028716533951972, + "grad_norm": 3.3638980388641357, + "learning_rate": 1.96992874649675e-06, + "loss": 1.1404, + "step": 12805 + }, + { + "epoch": 0.8029343532509875, + "grad_norm": 3.1027283668518066, + "learning_rate": 1.968718638542814e-06, + "loss": 1.0467, + "step": 12806 + }, + { + "epoch": 0.8029970531067778, + "grad_norm": 3.608293294906616, + "learning_rate": 1.967508861799269e-06, + "loss": 1.0779, + "step": 12807 + }, + { + "epoch": 0.8030597529625682, + "grad_norm": 3.5838727951049805, + "learning_rate": 1.966299416316012e-06, + "loss": 1.1007, + "step": 12808 + }, + { + "epoch": 0.8031224528183585, + "grad_norm": 3.0511181354522705, + "learning_rate": 1.9650903021429146e-06, + "loss": 1.0234, + "step": 12809 + }, + { + "epoch": 0.8031851526741488, + "grad_norm": 3.387596607208252, + "learning_rate": 1.9638815193298477e-06, + "loss": 1.0559, + "step": 12810 + }, + { + "epoch": 0.8032478525299391, + "grad_norm": 3.8447494506835938, + "learning_rate": 1.962673067926657e-06, + "loss": 1.1724, + "step": 12811 + }, + { + "epoch": 0.8033105523857295, + "grad_norm": 3.1438920497894287, + "learning_rate": 1.961464947983178e-06, + "loss": 0.9558, + "step": 12812 + }, + { + "epoch": 0.8033732522415199, + "grad_norm": 3.259533643722534, + "learning_rate": 1.960257159549238e-06, + "loss": 1.2356, + "step": 12813 + }, + { + "epoch": 0.8034359520973102, + "grad_norm": 3.3161721229553223, + "learning_rate": 1.9590497026746434e-06, + "loss": 1.0738, + "step": 12814 + }, + { + "epoch": 0.8034986519531006, + "grad_norm": 3.348886013031006, + "learning_rate": 1.9578425774091913e-06, + "loss": 1.0871, + "step": 12815 + }, + { + "epoch": 0.8035613518088909, + "grad_norm": 3.321712017059326, + "learning_rate": 1.9566357838026594e-06, + "loss": 1.0622, + "step": 12816 + }, + { + "epoch": 0.8036240516646812, + "grad_norm": 3.832604169845581, + "learning_rate": 1.9554293219048217e-06, + "loss": 1.0486, + "step": 12817 + }, + { + "epoch": 0.8036867515204715, + "grad_norm": 3.563387155532837, + "learning_rate": 1.9542231917654296e-06, + "loss": 0.9184, + "step": 12818 + }, + { + "epoch": 0.8037494513762619, + "grad_norm": 3.521941900253296, + "learning_rate": 1.9530173934342243e-06, + "loss": 1.0529, + "step": 12819 + }, + { + "epoch": 0.8038121512320522, + "grad_norm": 3.4098215103149414, + "learning_rate": 1.9518119269609313e-06, + "loss": 0.9188, + "step": 12820 + }, + { + "epoch": 0.8038748510878425, + "grad_norm": 3.6006181240081787, + "learning_rate": 1.9506067923952676e-06, + "loss": 1.0192, + "step": 12821 + }, + { + "epoch": 0.8039375509436328, + "grad_norm": 3.7725865840911865, + "learning_rate": 1.949401989786932e-06, + "loss": 1.108, + "step": 12822 + }, + { + "epoch": 0.8040002507994232, + "grad_norm": 3.2041962146759033, + "learning_rate": 1.948197519185607e-06, + "loss": 1.0694, + "step": 12823 + }, + { + "epoch": 0.8040629506552135, + "grad_norm": 3.3186042308807373, + "learning_rate": 1.9469933806409713e-06, + "loss": 1.1003, + "step": 12824 + }, + { + "epoch": 0.8041256505110038, + "grad_norm": 3.540020227432251, + "learning_rate": 1.945789574202679e-06, + "loss": 1.1125, + "step": 12825 + }, + { + "epoch": 0.8041883503667941, + "grad_norm": 3.268240213394165, + "learning_rate": 1.9445860999203767e-06, + "loss": 1.1812, + "step": 12826 + }, + { + "epoch": 0.8042510502225845, + "grad_norm": 3.2066810131073, + "learning_rate": 1.9433829578436937e-06, + "loss": 1.0438, + "step": 12827 + }, + { + "epoch": 0.8043137500783748, + "grad_norm": 3.3140029907226562, + "learning_rate": 1.9421801480222513e-06, + "loss": 1.0383, + "step": 12828 + }, + { + "epoch": 0.8043764499341651, + "grad_norm": 3.380624771118164, + "learning_rate": 1.9409776705056514e-06, + "loss": 1.1607, + "step": 12829 + }, + { + "epoch": 0.8044391497899555, + "grad_norm": 3.1099350452423096, + "learning_rate": 1.9397755253434847e-06, + "loss": 1.1384, + "step": 12830 + }, + { + "epoch": 0.8045018496457458, + "grad_norm": 3.3750598430633545, + "learning_rate": 1.9385737125853242e-06, + "loss": 1.105, + "step": 12831 + }, + { + "epoch": 0.8045645495015361, + "grad_norm": 3.662747621536255, + "learning_rate": 1.937372232280739e-06, + "loss": 1.0842, + "step": 12832 + }, + { + "epoch": 0.8046272493573264, + "grad_norm": 3.206063747406006, + "learning_rate": 1.936171084479275e-06, + "loss": 1.3578, + "step": 12833 + }, + { + "epoch": 0.8046899492131168, + "grad_norm": 4.0552849769592285, + "learning_rate": 1.934970269230464e-06, + "loss": 0.9763, + "step": 12834 + }, + { + "epoch": 0.8047526490689071, + "grad_norm": 3.3905105590820312, + "learning_rate": 1.9337697865838356e-06, + "loss": 1.2091, + "step": 12835 + }, + { + "epoch": 0.8048153489246975, + "grad_norm": 3.6624391078948975, + "learning_rate": 1.9325696365888924e-06, + "loss": 0.9101, + "step": 12836 + }, + { + "epoch": 0.8048780487804879, + "grad_norm": 3.248145580291748, + "learning_rate": 1.9313698192951293e-06, + "loss": 1.0482, + "step": 12837 + }, + { + "epoch": 0.8049407486362782, + "grad_norm": 3.722891330718994, + "learning_rate": 1.930170334752025e-06, + "loss": 1.0434, + "step": 12838 + }, + { + "epoch": 0.8050034484920685, + "grad_norm": 3.526975154876709, + "learning_rate": 1.9289711830090517e-06, + "loss": 1.0894, + "step": 12839 + }, + { + "epoch": 0.8050661483478588, + "grad_norm": 3.606513500213623, + "learning_rate": 1.9277723641156587e-06, + "loss": 1.0785, + "step": 12840 + }, + { + "epoch": 0.8051288482036492, + "grad_norm": 3.2340574264526367, + "learning_rate": 1.926573878121286e-06, + "loss": 1.0399, + "step": 12841 + }, + { + "epoch": 0.8051915480594395, + "grad_norm": 3.3434829711914062, + "learning_rate": 1.925375725075357e-06, + "loss": 1.3294, + "step": 12842 + }, + { + "epoch": 0.8052542479152298, + "grad_norm": 3.51098895072937, + "learning_rate": 1.924177905027288e-06, + "loss": 1.1183, + "step": 12843 + }, + { + "epoch": 0.8053169477710201, + "grad_norm": 3.2605276107788086, + "learning_rate": 1.9229804180264744e-06, + "loss": 1.0919, + "step": 12844 + }, + { + "epoch": 0.8053796476268105, + "grad_norm": 3.4361488819122314, + "learning_rate": 1.9217832641222987e-06, + "loss": 1.1729, + "step": 12845 + }, + { + "epoch": 0.8054423474826008, + "grad_norm": 3.273052930831909, + "learning_rate": 1.9205864433641364e-06, + "loss": 1.0704, + "step": 12846 + }, + { + "epoch": 0.8055050473383911, + "grad_norm": 3.274925708770752, + "learning_rate": 1.919389955801343e-06, + "loss": 1.1392, + "step": 12847 + }, + { + "epoch": 0.8055677471941814, + "grad_norm": 3.0739638805389404, + "learning_rate": 1.918193801483259e-06, + "loss": 1.1621, + "step": 12848 + }, + { + "epoch": 0.8056304470499718, + "grad_norm": 3.5724875926971436, + "learning_rate": 1.916997980459214e-06, + "loss": 1.0169, + "step": 12849 + }, + { + "epoch": 0.8056931469057621, + "grad_norm": 3.2216286659240723, + "learning_rate": 1.915802492778528e-06, + "loss": 1.0068, + "step": 12850 + }, + { + "epoch": 0.8057558467615524, + "grad_norm": 3.7237930297851562, + "learning_rate": 1.9146073384905007e-06, + "loss": 1.0404, + "step": 12851 + }, + { + "epoch": 0.8058185466173428, + "grad_norm": 3.6903469562530518, + "learning_rate": 1.9134125176444186e-06, + "loss": 1.1207, + "step": 12852 + }, + { + "epoch": 0.8058812464731331, + "grad_norm": 3.440626859664917, + "learning_rate": 1.9122180302895553e-06, + "loss": 0.973, + "step": 12853 + }, + { + "epoch": 0.8059439463289234, + "grad_norm": 3.482598304748535, + "learning_rate": 1.9110238764751764e-06, + "loss": 1.0524, + "step": 12854 + }, + { + "epoch": 0.8060066461847137, + "grad_norm": 4.059083461761475, + "learning_rate": 1.9098300562505266e-06, + "loss": 1.0013, + "step": 12855 + }, + { + "epoch": 0.8060693460405041, + "grad_norm": 3.012890100479126, + "learning_rate": 1.9086365696648355e-06, + "loss": 1.1121, + "step": 12856 + }, + { + "epoch": 0.8061320458962944, + "grad_norm": 2.9250125885009766, + "learning_rate": 1.9074434167673285e-06, + "loss": 1.1657, + "step": 12857 + }, + { + "epoch": 0.8061947457520847, + "grad_norm": 3.0841500759124756, + "learning_rate": 1.9062505976072078e-06, + "loss": 1.0705, + "step": 12858 + }, + { + "epoch": 0.8062574456078752, + "grad_norm": 3.6014387607574463, + "learning_rate": 1.9050581122336653e-06, + "loss": 1.129, + "step": 12859 + }, + { + "epoch": 0.8063201454636655, + "grad_norm": 3.278120756149292, + "learning_rate": 1.9038659606958787e-06, + "loss": 1.0675, + "step": 12860 + }, + { + "epoch": 0.8063828453194558, + "grad_norm": 3.615180253982544, + "learning_rate": 1.9026741430430152e-06, + "loss": 1.1726, + "step": 12861 + }, + { + "epoch": 0.8064455451752461, + "grad_norm": 3.414422035217285, + "learning_rate": 1.9014826593242241e-06, + "loss": 1.1397, + "step": 12862 + }, + { + "epoch": 0.8065082450310365, + "grad_norm": 3.3629589080810547, + "learning_rate": 1.9002915095886388e-06, + "loss": 1.0573, + "step": 12863 + }, + { + "epoch": 0.8065709448868268, + "grad_norm": 3.5155704021453857, + "learning_rate": 1.8991006938853873e-06, + "loss": 1.101, + "step": 12864 + }, + { + "epoch": 0.8066336447426171, + "grad_norm": 3.642765998840332, + "learning_rate": 1.8979102122635785e-06, + "loss": 1.1354, + "step": 12865 + }, + { + "epoch": 0.8066963445984074, + "grad_norm": 3.320021152496338, + "learning_rate": 1.8967200647723049e-06, + "loss": 1.0672, + "step": 12866 + }, + { + "epoch": 0.8067590444541978, + "grad_norm": 3.3731162548065186, + "learning_rate": 1.8955302514606477e-06, + "loss": 1.0006, + "step": 12867 + }, + { + "epoch": 0.8068217443099881, + "grad_norm": 3.404474973678589, + "learning_rate": 1.8943407723776796e-06, + "loss": 1.1225, + "step": 12868 + }, + { + "epoch": 0.8068844441657784, + "grad_norm": 3.1547634601593018, + "learning_rate": 1.8931516275724527e-06, + "loss": 1.0977, + "step": 12869 + }, + { + "epoch": 0.8069471440215688, + "grad_norm": 3.480928421020508, + "learning_rate": 1.8919628170940052e-06, + "loss": 1.0686, + "step": 12870 + }, + { + "epoch": 0.8070098438773591, + "grad_norm": 3.455615997314453, + "learning_rate": 1.8907743409913636e-06, + "loss": 1.0334, + "step": 12871 + }, + { + "epoch": 0.8070725437331494, + "grad_norm": 2.969463348388672, + "learning_rate": 1.8895861993135444e-06, + "loss": 1.1781, + "step": 12872 + }, + { + "epoch": 0.8071352435889397, + "grad_norm": 2.8854856491088867, + "learning_rate": 1.888398392109545e-06, + "loss": 1.0846, + "step": 12873 + }, + { + "epoch": 0.8071979434447301, + "grad_norm": 3.1448240280151367, + "learning_rate": 1.8872109194283471e-06, + "loss": 1.2276, + "step": 12874 + }, + { + "epoch": 0.8072606433005204, + "grad_norm": 3.5846433639526367, + "learning_rate": 1.8860237813189287e-06, + "loss": 0.903, + "step": 12875 + }, + { + "epoch": 0.8073233431563107, + "grad_norm": 3.3519389629364014, + "learning_rate": 1.8848369778302434e-06, + "loss": 1.2268, + "step": 12876 + }, + { + "epoch": 0.807386043012101, + "grad_norm": 3.154684066772461, + "learning_rate": 1.8836505090112344e-06, + "loss": 1.115, + "step": 12877 + }, + { + "epoch": 0.8074487428678914, + "grad_norm": 3.23319673538208, + "learning_rate": 1.8824643749108318e-06, + "loss": 1.2507, + "step": 12878 + }, + { + "epoch": 0.8075114427236817, + "grad_norm": 3.0801918506622314, + "learning_rate": 1.881278575577955e-06, + "loss": 1.0495, + "step": 12879 + }, + { + "epoch": 0.807574142579472, + "grad_norm": 3.8046982288360596, + "learning_rate": 1.880093111061504e-06, + "loss": 1.0683, + "step": 12880 + }, + { + "epoch": 0.8076368424352623, + "grad_norm": 3.4046669006347656, + "learning_rate": 1.8789079814103673e-06, + "loss": 1.0711, + "step": 12881 + }, + { + "epoch": 0.8076995422910528, + "grad_norm": 3.411184310913086, + "learning_rate": 1.877723186673418e-06, + "loss": 1.1571, + "step": 12882 + }, + { + "epoch": 0.8077622421468431, + "grad_norm": 3.4476568698883057, + "learning_rate": 1.8765387268995217e-06, + "loss": 1.0334, + "step": 12883 + }, + { + "epoch": 0.8078249420026334, + "grad_norm": 3.552946090698242, + "learning_rate": 1.8753546021375236e-06, + "loss": 1.0297, + "step": 12884 + }, + { + "epoch": 0.8078876418584238, + "grad_norm": 3.2730984687805176, + "learning_rate": 1.8741708124362535e-06, + "loss": 1.1066, + "step": 12885 + }, + { + "epoch": 0.8079503417142141, + "grad_norm": 3.2125115394592285, + "learning_rate": 1.8729873578445368e-06, + "loss": 1.1999, + "step": 12886 + }, + { + "epoch": 0.8080130415700044, + "grad_norm": 3.5332694053649902, + "learning_rate": 1.8718042384111756e-06, + "loss": 1.0209, + "step": 12887 + }, + { + "epoch": 0.8080757414257947, + "grad_norm": 3.092392683029175, + "learning_rate": 1.8706214541849632e-06, + "loss": 1.0144, + "step": 12888 + }, + { + "epoch": 0.8081384412815851, + "grad_norm": 3.2848119735717773, + "learning_rate": 1.8694390052146737e-06, + "loss": 1.0604, + "step": 12889 + }, + { + "epoch": 0.8082011411373754, + "grad_norm": 3.6139297485351562, + "learning_rate": 1.8682568915490785e-06, + "loss": 0.9794, + "step": 12890 + }, + { + "epoch": 0.8082638409931657, + "grad_norm": 3.5685391426086426, + "learning_rate": 1.8670751132369235e-06, + "loss": 1.0089, + "step": 12891 + }, + { + "epoch": 0.808326540848956, + "grad_norm": 3.78778338432312, + "learning_rate": 1.865893670326946e-06, + "loss": 0.9233, + "step": 12892 + }, + { + "epoch": 0.8083892407047464, + "grad_norm": 3.571040630340576, + "learning_rate": 1.8647125628678663e-06, + "loss": 1.1101, + "step": 12893 + }, + { + "epoch": 0.8084519405605367, + "grad_norm": 3.451847553253174, + "learning_rate": 1.8635317909083983e-06, + "loss": 1.0829, + "step": 12894 + }, + { + "epoch": 0.808514640416327, + "grad_norm": 3.484205961227417, + "learning_rate": 1.862351354497235e-06, + "loss": 1.1345, + "step": 12895 + }, + { + "epoch": 0.8085773402721174, + "grad_norm": 3.6636152267456055, + "learning_rate": 1.861171253683054e-06, + "loss": 1.0588, + "step": 12896 + }, + { + "epoch": 0.8086400401279077, + "grad_norm": 3.6009278297424316, + "learning_rate": 1.8599914885145287e-06, + "loss": 1.1062, + "step": 12897 + }, + { + "epoch": 0.808702739983698, + "grad_norm": 3.3380722999572754, + "learning_rate": 1.8588120590403102e-06, + "loss": 0.9491, + "step": 12898 + }, + { + "epoch": 0.8087654398394883, + "grad_norm": 3.25246000289917, + "learning_rate": 1.8576329653090375e-06, + "loss": 1.0577, + "step": 12899 + }, + { + "epoch": 0.8088281396952787, + "grad_norm": 3.202944278717041, + "learning_rate": 1.8564542073693336e-06, + "loss": 0.9894, + "step": 12900 + }, + { + "epoch": 0.808890839551069, + "grad_norm": 3.5427794456481934, + "learning_rate": 1.8552757852698177e-06, + "loss": 1.0629, + "step": 12901 + }, + { + "epoch": 0.8089535394068593, + "grad_norm": 3.450678586959839, + "learning_rate": 1.8540976990590841e-06, + "loss": 1.0691, + "step": 12902 + }, + { + "epoch": 0.8090162392626497, + "grad_norm": 3.91247820854187, + "learning_rate": 1.8529199487857163e-06, + "loss": 0.9883, + "step": 12903 + }, + { + "epoch": 0.80907893911844, + "grad_norm": 3.2910375595092773, + "learning_rate": 1.8517425344982831e-06, + "loss": 1.041, + "step": 12904 + }, + { + "epoch": 0.8091416389742303, + "grad_norm": 3.304962158203125, + "learning_rate": 1.8505654562453469e-06, + "loss": 1.1428, + "step": 12905 + }, + { + "epoch": 0.8092043388300207, + "grad_norm": 3.458916664123535, + "learning_rate": 1.8493887140754462e-06, + "loss": 1.0482, + "step": 12906 + }, + { + "epoch": 0.8092670386858111, + "grad_norm": 3.7260754108428955, + "learning_rate": 1.8482123080371095e-06, + "loss": 1.1081, + "step": 12907 + }, + { + "epoch": 0.8093297385416014, + "grad_norm": 3.5033047199249268, + "learning_rate": 1.8470362381788542e-06, + "loss": 1.1881, + "step": 12908 + }, + { + "epoch": 0.8093924383973917, + "grad_norm": 2.9929542541503906, + "learning_rate": 1.8458605045491817e-06, + "loss": 1.1643, + "step": 12909 + }, + { + "epoch": 0.809455138253182, + "grad_norm": 3.5893635749816895, + "learning_rate": 1.8446851071965777e-06, + "loss": 1.1172, + "step": 12910 + }, + { + "epoch": 0.8095178381089724, + "grad_norm": 2.9134867191314697, + "learning_rate": 1.8435100461695131e-06, + "loss": 1.2496, + "step": 12911 + }, + { + "epoch": 0.8095805379647627, + "grad_norm": 3.335437536239624, + "learning_rate": 1.842335321516454e-06, + "loss": 1.035, + "step": 12912 + }, + { + "epoch": 0.809643237820553, + "grad_norm": 3.0357537269592285, + "learning_rate": 1.8411609332858415e-06, + "loss": 1.193, + "step": 12913 + }, + { + "epoch": 0.8097059376763434, + "grad_norm": 3.6243560314178467, + "learning_rate": 1.839986881526107e-06, + "loss": 1.0403, + "step": 12914 + }, + { + "epoch": 0.8097686375321337, + "grad_norm": 3.2932357788085938, + "learning_rate": 1.8388131662856712e-06, + "loss": 1.0454, + "step": 12915 + }, + { + "epoch": 0.809831337387924, + "grad_norm": 3.5176548957824707, + "learning_rate": 1.837639787612937e-06, + "loss": 1.1257, + "step": 12916 + }, + { + "epoch": 0.8098940372437143, + "grad_norm": 3.4453938007354736, + "learning_rate": 1.8364667455562945e-06, + "loss": 1.188, + "step": 12917 + }, + { + "epoch": 0.8099567370995047, + "grad_norm": 3.6991686820983887, + "learning_rate": 1.8352940401641173e-06, + "loss": 1.0289, + "step": 12918 + }, + { + "epoch": 0.810019436955295, + "grad_norm": 4.031849384307861, + "learning_rate": 1.8341216714847731e-06, + "loss": 1.0667, + "step": 12919 + }, + { + "epoch": 0.8100821368110853, + "grad_norm": 3.363525867462158, + "learning_rate": 1.8329496395666079e-06, + "loss": 1.1697, + "step": 12920 + }, + { + "epoch": 0.8101448366668756, + "grad_norm": 3.345963478088379, + "learning_rate": 1.8317779444579564e-06, + "loss": 1.0117, + "step": 12921 + }, + { + "epoch": 0.810207536522666, + "grad_norm": 3.5695104598999023, + "learning_rate": 1.8306065862071365e-06, + "loss": 1.0075, + "step": 12922 + }, + { + "epoch": 0.8102702363784563, + "grad_norm": 3.9239885807037354, + "learning_rate": 1.8294355648624607e-06, + "loss": 1.153, + "step": 12923 + }, + { + "epoch": 0.8103329362342466, + "grad_norm": 3.06628155708313, + "learning_rate": 1.8282648804722181e-06, + "loss": 1.0635, + "step": 12924 + }, + { + "epoch": 0.810395636090037, + "grad_norm": 3.584963083267212, + "learning_rate": 1.8270945330846868e-06, + "loss": 1.1318, + "step": 12925 + }, + { + "epoch": 0.8104583359458273, + "grad_norm": 3.429731607437134, + "learning_rate": 1.825924522748136e-06, + "loss": 1.0714, + "step": 12926 + }, + { + "epoch": 0.8105210358016176, + "grad_norm": 3.232433795928955, + "learning_rate": 1.8247548495108148e-06, + "loss": 1.197, + "step": 12927 + }, + { + "epoch": 0.8105837356574079, + "grad_norm": 3.238725185394287, + "learning_rate": 1.8235855134209613e-06, + "loss": 0.9726, + "step": 12928 + }, + { + "epoch": 0.8106464355131984, + "grad_norm": 3.028350591659546, + "learning_rate": 1.8224165145267947e-06, + "loss": 1.2307, + "step": 12929 + }, + { + "epoch": 0.8107091353689887, + "grad_norm": 2.798006534576416, + "learning_rate": 1.8212478528765321e-06, + "loss": 1.1902, + "step": 12930 + }, + { + "epoch": 0.810771835224779, + "grad_norm": 3.391188144683838, + "learning_rate": 1.8200795285183636e-06, + "loss": 1.0409, + "step": 12931 + }, + { + "epoch": 0.8108345350805694, + "grad_norm": 3.3222291469573975, + "learning_rate": 1.8189115415004744e-06, + "loss": 1.1598, + "step": 12932 + }, + { + "epoch": 0.8108972349363597, + "grad_norm": 3.73832631111145, + "learning_rate": 1.8177438918710267e-06, + "loss": 0.9257, + "step": 12933 + }, + { + "epoch": 0.81095993479215, + "grad_norm": 3.374786138534546, + "learning_rate": 1.816576579678181e-06, + "loss": 1.0024, + "step": 12934 + }, + { + "epoch": 0.8110226346479403, + "grad_norm": 3.5439488887786865, + "learning_rate": 1.8154096049700741e-06, + "loss": 1.2789, + "step": 12935 + }, + { + "epoch": 0.8110853345037307, + "grad_norm": 3.2782559394836426, + "learning_rate": 1.8142429677948303e-06, + "loss": 1.1341, + "step": 12936 + }, + { + "epoch": 0.811148034359521, + "grad_norm": 3.460148572921753, + "learning_rate": 1.8130766682005663e-06, + "loss": 1.0924, + "step": 12937 + }, + { + "epoch": 0.8112107342153113, + "grad_norm": 3.630263566970825, + "learning_rate": 1.811910706235379e-06, + "loss": 1.213, + "step": 12938 + }, + { + "epoch": 0.8112734340711016, + "grad_norm": 2.9460678100585938, + "learning_rate": 1.8107450819473505e-06, + "loss": 1.2165, + "step": 12939 + }, + { + "epoch": 0.811336133926892, + "grad_norm": 3.7052841186523438, + "learning_rate": 1.8095797953845507e-06, + "loss": 1.1333, + "step": 12940 + }, + { + "epoch": 0.8113988337826823, + "grad_norm": 2.9495997428894043, + "learning_rate": 1.8084148465950402e-06, + "loss": 1.1961, + "step": 12941 + }, + { + "epoch": 0.8114615336384726, + "grad_norm": 3.5156612396240234, + "learning_rate": 1.8072502356268595e-06, + "loss": 1.0406, + "step": 12942 + }, + { + "epoch": 0.811524233494263, + "grad_norm": 3.5451855659484863, + "learning_rate": 1.8060859625280357e-06, + "loss": 0.9941, + "step": 12943 + }, + { + "epoch": 0.8115869333500533, + "grad_norm": 3.416264295578003, + "learning_rate": 1.8049220273465828e-06, + "loss": 1.1859, + "step": 12944 + }, + { + "epoch": 0.8116496332058436, + "grad_norm": 3.2337229251861572, + "learning_rate": 1.8037584301305066e-06, + "loss": 1.0809, + "step": 12945 + }, + { + "epoch": 0.8117123330616339, + "grad_norm": 3.2587878704071045, + "learning_rate": 1.80259517092779e-06, + "loss": 1.0561, + "step": 12946 + }, + { + "epoch": 0.8117750329174243, + "grad_norm": 3.4225351810455322, + "learning_rate": 1.8014322497864034e-06, + "loss": 0.9102, + "step": 12947 + }, + { + "epoch": 0.8118377327732146, + "grad_norm": 3.3653204441070557, + "learning_rate": 1.800269666754313e-06, + "loss": 1.1533, + "step": 12948 + }, + { + "epoch": 0.8119004326290049, + "grad_norm": 3.235507011413574, + "learning_rate": 1.799107421879458e-06, + "loss": 1.1361, + "step": 12949 + }, + { + "epoch": 0.8119631324847952, + "grad_norm": 3.381256580352783, + "learning_rate": 1.7979455152097713e-06, + "loss": 1.1901, + "step": 12950 + }, + { + "epoch": 0.8120258323405856, + "grad_norm": 3.408621311187744, + "learning_rate": 1.796783946793168e-06, + "loss": 1.2029, + "step": 12951 + }, + { + "epoch": 0.812088532196376, + "grad_norm": 3.976806163787842, + "learning_rate": 1.7956227166775552e-06, + "loss": 1.0551, + "step": 12952 + }, + { + "epoch": 0.8121512320521663, + "grad_norm": 3.166090726852417, + "learning_rate": 1.7944618249108204e-06, + "loss": 0.9419, + "step": 12953 + }, + { + "epoch": 0.8122139319079567, + "grad_norm": 3.3251075744628906, + "learning_rate": 1.7933012715408372e-06, + "loss": 1.0699, + "step": 12954 + }, + { + "epoch": 0.812276631763747, + "grad_norm": 3.9005885124206543, + "learning_rate": 1.7921410566154662e-06, + "loss": 1.2023, + "step": 12955 + }, + { + "epoch": 0.8123393316195373, + "grad_norm": 2.9621448516845703, + "learning_rate": 1.7909811801825583e-06, + "loss": 1.1283, + "step": 12956 + }, + { + "epoch": 0.8124020314753276, + "grad_norm": 3.4533607959747314, + "learning_rate": 1.789821642289945e-06, + "loss": 0.9706, + "step": 12957 + }, + { + "epoch": 0.812464731331118, + "grad_norm": 3.2630774974823, + "learning_rate": 1.7886624429854437e-06, + "loss": 1.196, + "step": 12958 + }, + { + "epoch": 0.8125274311869083, + "grad_norm": 3.5871617794036865, + "learning_rate": 1.7875035823168641e-06, + "loss": 0.8972, + "step": 12959 + }, + { + "epoch": 0.8125901310426986, + "grad_norm": 3.1846816539764404, + "learning_rate": 1.7863450603319953e-06, + "loss": 1.1072, + "step": 12960 + }, + { + "epoch": 0.8126528308984889, + "grad_norm": 3.263777732849121, + "learning_rate": 1.7851868770786153e-06, + "loss": 1.1895, + "step": 12961 + }, + { + "epoch": 0.8127155307542793, + "grad_norm": 3.3395016193389893, + "learning_rate": 1.7840290326044851e-06, + "loss": 1.1021, + "step": 12962 + }, + { + "epoch": 0.8127782306100696, + "grad_norm": 2.9642724990844727, + "learning_rate": 1.7828715269573583e-06, + "loss": 1.1728, + "step": 12963 + }, + { + "epoch": 0.8128409304658599, + "grad_norm": 3.36433744430542, + "learning_rate": 1.7817143601849685e-06, + "loss": 1.0565, + "step": 12964 + }, + { + "epoch": 0.8129036303216503, + "grad_norm": 3.220212697982788, + "learning_rate": 1.7805575323350378e-06, + "loss": 1.1682, + "step": 12965 + }, + { + "epoch": 0.8129663301774406, + "grad_norm": 3.2977287769317627, + "learning_rate": 1.77940104345527e-06, + "loss": 1.1098, + "step": 12966 + }, + { + "epoch": 0.8130290300332309, + "grad_norm": 3.3882973194122314, + "learning_rate": 1.7782448935933649e-06, + "loss": 1.2066, + "step": 12967 + }, + { + "epoch": 0.8130917298890212, + "grad_norm": 3.1002209186553955, + "learning_rate": 1.777089082797e-06, + "loss": 1.0818, + "step": 12968 + }, + { + "epoch": 0.8131544297448116, + "grad_norm": 3.0386674404144287, + "learning_rate": 1.7759336111138381e-06, + "loss": 1.1083, + "step": 12969 + }, + { + "epoch": 0.8132171296006019, + "grad_norm": 3.365410327911377, + "learning_rate": 1.7747784785915356e-06, + "loss": 1.1114, + "step": 12970 + }, + { + "epoch": 0.8132798294563922, + "grad_norm": 3.6718060970306396, + "learning_rate": 1.7736236852777267e-06, + "loss": 1.1452, + "step": 12971 + }, + { + "epoch": 0.8133425293121825, + "grad_norm": 3.210641860961914, + "learning_rate": 1.7724692312200376e-06, + "loss": 1.085, + "step": 12972 + }, + { + "epoch": 0.8134052291679729, + "grad_norm": 3.5906481742858887, + "learning_rate": 1.7713151164660736e-06, + "loss": 1.0374, + "step": 12973 + }, + { + "epoch": 0.8134679290237632, + "grad_norm": 3.529632568359375, + "learning_rate": 1.7701613410634367e-06, + "loss": 1.161, + "step": 12974 + }, + { + "epoch": 0.8135306288795536, + "grad_norm": 3.3199100494384766, + "learning_rate": 1.7690079050597043e-06, + "loss": 1.0543, + "step": 12975 + }, + { + "epoch": 0.813593328735344, + "grad_norm": 3.519507646560669, + "learning_rate": 1.7678548085024439e-06, + "loss": 1.0641, + "step": 12976 + }, + { + "epoch": 0.8136560285911343, + "grad_norm": 3.869516611099243, + "learning_rate": 1.766702051439213e-06, + "loss": 1.0284, + "step": 12977 + }, + { + "epoch": 0.8137187284469246, + "grad_norm": 3.4026007652282715, + "learning_rate": 1.7655496339175483e-06, + "loss": 1.0412, + "step": 12978 + }, + { + "epoch": 0.8137814283027149, + "grad_norm": 3.614166021347046, + "learning_rate": 1.7643975559849768e-06, + "loss": 0.9012, + "step": 12979 + }, + { + "epoch": 0.8138441281585053, + "grad_norm": 2.917928457260132, + "learning_rate": 1.7632458176890077e-06, + "loss": 0.9929, + "step": 12980 + }, + { + "epoch": 0.8139068280142956, + "grad_norm": 2.9367165565490723, + "learning_rate": 1.7620944190771427e-06, + "loss": 0.9205, + "step": 12981 + }, + { + "epoch": 0.8139695278700859, + "grad_norm": 3.3738930225372314, + "learning_rate": 1.760943360196864e-06, + "loss": 1.0928, + "step": 12982 + }, + { + "epoch": 0.8140322277258762, + "grad_norm": 4.24379825592041, + "learning_rate": 1.7597926410956412e-06, + "loss": 0.9621, + "step": 12983 + }, + { + "epoch": 0.8140949275816666, + "grad_norm": 3.018653154373169, + "learning_rate": 1.7586422618209264e-06, + "loss": 1.0452, + "step": 12984 + }, + { + "epoch": 0.8141576274374569, + "grad_norm": 3.4470865726470947, + "learning_rate": 1.7574922224201674e-06, + "loss": 1.0661, + "step": 12985 + }, + { + "epoch": 0.8142203272932472, + "grad_norm": 3.2006635665893555, + "learning_rate": 1.7563425229407892e-06, + "loss": 1.1887, + "step": 12986 + }, + { + "epoch": 0.8142830271490376, + "grad_norm": 3.379730224609375, + "learning_rate": 1.7551931634302034e-06, + "loss": 1.005, + "step": 12987 + }, + { + "epoch": 0.8143457270048279, + "grad_norm": 3.6526052951812744, + "learning_rate": 1.7540441439358124e-06, + "loss": 1.0936, + "step": 12988 + }, + { + "epoch": 0.8144084268606182, + "grad_norm": 3.411067485809326, + "learning_rate": 1.7528954645050024e-06, + "loss": 1.1388, + "step": 12989 + }, + { + "epoch": 0.8144711267164085, + "grad_norm": 3.5664405822753906, + "learning_rate": 1.7517471251851437e-06, + "loss": 1.1028, + "step": 12990 + }, + { + "epoch": 0.8145338265721989, + "grad_norm": 3.484246015548706, + "learning_rate": 1.750599126023591e-06, + "loss": 1.0965, + "step": 12991 + }, + { + "epoch": 0.8145965264279892, + "grad_norm": 3.183166265487671, + "learning_rate": 1.7494514670676931e-06, + "loss": 1.1175, + "step": 12992 + }, + { + "epoch": 0.8146592262837795, + "grad_norm": 3.3708348274230957, + "learning_rate": 1.7483041483647767e-06, + "loss": 1.0912, + "step": 12993 + }, + { + "epoch": 0.8147219261395698, + "grad_norm": 3.3052022457122803, + "learning_rate": 1.747157169962157e-06, + "loss": 0.9957, + "step": 12994 + }, + { + "epoch": 0.8147846259953602, + "grad_norm": 3.337641716003418, + "learning_rate": 1.7460105319071353e-06, + "loss": 1.0594, + "step": 12995 + }, + { + "epoch": 0.8148473258511505, + "grad_norm": 3.1040077209472656, + "learning_rate": 1.7448642342470002e-06, + "loss": 1.054, + "step": 12996 + }, + { + "epoch": 0.8149100257069408, + "grad_norm": 3.3507471084594727, + "learning_rate": 1.7437182770290262e-06, + "loss": 1.2028, + "step": 12997 + }, + { + "epoch": 0.8149727255627313, + "grad_norm": 3.37233829498291, + "learning_rate": 1.7425726603004678e-06, + "loss": 0.9793, + "step": 12998 + }, + { + "epoch": 0.8150354254185216, + "grad_norm": 3.5362958908081055, + "learning_rate": 1.741427384108576e-06, + "loss": 0.9945, + "step": 12999 + }, + { + "epoch": 0.8150981252743119, + "grad_norm": 3.6466407775878906, + "learning_rate": 1.74028244850058e-06, + "loss": 1.1236, + "step": 13000 + }, + { + "epoch": 0.8150981252743119, + "eval_loss": 1.0969406366348267, + "eval_runtime": 144.0213, + "eval_samples_per_second": 4.374, + "eval_steps_per_second": 1.097, + "step": 13000 + }, + { + "epoch": 0.8151608251301022, + "grad_norm": 2.9916889667510986, + "learning_rate": 1.739137853523696e-06, + "loss": 1.1457, + "step": 13001 + }, + { + "epoch": 0.8152235249858926, + "grad_norm": 3.584688425064087, + "learning_rate": 1.7379935992251252e-06, + "loss": 1.1005, + "step": 13002 + }, + { + "epoch": 0.8152862248416829, + "grad_norm": 3.656390428543091, + "learning_rate": 1.7368496856520612e-06, + "loss": 1.0788, + "step": 13003 + }, + { + "epoch": 0.8153489246974732, + "grad_norm": 3.142181396484375, + "learning_rate": 1.7357061128516784e-06, + "loss": 1.2124, + "step": 13004 + }, + { + "epoch": 0.8154116245532635, + "grad_norm": 3.1818974018096924, + "learning_rate": 1.7345628808711345e-06, + "loss": 1.0707, + "step": 13005 + }, + { + "epoch": 0.8154743244090539, + "grad_norm": 3.5113532543182373, + "learning_rate": 1.7334199897575776e-06, + "loss": 1.1659, + "step": 13006 + }, + { + "epoch": 0.8155370242648442, + "grad_norm": 3.7669200897216797, + "learning_rate": 1.7322774395581432e-06, + "loss": 1.1179, + "step": 13007 + }, + { + "epoch": 0.8155997241206345, + "grad_norm": 3.254647970199585, + "learning_rate": 1.731135230319948e-06, + "loss": 1.1318, + "step": 13008 + }, + { + "epoch": 0.8156624239764249, + "grad_norm": 3.3975908756256104, + "learning_rate": 1.7299933620900945e-06, + "loss": 0.977, + "step": 13009 + }, + { + "epoch": 0.8157251238322152, + "grad_norm": 3.3809847831726074, + "learning_rate": 1.728851834915679e-06, + "loss": 1.1977, + "step": 13010 + }, + { + "epoch": 0.8157878236880055, + "grad_norm": 3.3594088554382324, + "learning_rate": 1.7277106488437745e-06, + "loss": 1.0815, + "step": 13011 + }, + { + "epoch": 0.8158505235437958, + "grad_norm": 3.609550952911377, + "learning_rate": 1.7265698039214441e-06, + "loss": 1.0628, + "step": 13012 + }, + { + "epoch": 0.8159132233995862, + "grad_norm": 3.363213539123535, + "learning_rate": 1.7254293001957345e-06, + "loss": 1.156, + "step": 13013 + }, + { + "epoch": 0.8159759232553765, + "grad_norm": 3.332603693008423, + "learning_rate": 1.724289137713684e-06, + "loss": 0.9116, + "step": 13014 + }, + { + "epoch": 0.8160386231111668, + "grad_norm": 3.7589519023895264, + "learning_rate": 1.7231493165223102e-06, + "loss": 1.0711, + "step": 13015 + }, + { + "epoch": 0.8161013229669571, + "grad_norm": 3.268049716949463, + "learning_rate": 1.72200983666862e-06, + "loss": 1.2411, + "step": 13016 + }, + { + "epoch": 0.8161640228227475, + "grad_norm": 3.6766693592071533, + "learning_rate": 1.7208706981996037e-06, + "loss": 1.2813, + "step": 13017 + }, + { + "epoch": 0.8162267226785378, + "grad_norm": 3.1166317462921143, + "learning_rate": 1.7197319011622437e-06, + "loss": 1.0318, + "step": 13018 + }, + { + "epoch": 0.8162894225343281, + "grad_norm": 3.4735300540924072, + "learning_rate": 1.7185934456035025e-06, + "loss": 1.0522, + "step": 13019 + }, + { + "epoch": 0.8163521223901185, + "grad_norm": 2.954758644104004, + "learning_rate": 1.7174553315703256e-06, + "loss": 1.2017, + "step": 13020 + }, + { + "epoch": 0.8164148222459089, + "grad_norm": 3.1535451412200928, + "learning_rate": 1.7163175591096559e-06, + "loss": 1.1485, + "step": 13021 + }, + { + "epoch": 0.8164775221016992, + "grad_norm": 3.357151985168457, + "learning_rate": 1.715180128268411e-06, + "loss": 1.0122, + "step": 13022 + }, + { + "epoch": 0.8165402219574895, + "grad_norm": 3.1264026165008545, + "learning_rate": 1.7140430390935003e-06, + "loss": 1.1105, + "step": 13023 + }, + { + "epoch": 0.8166029218132799, + "grad_norm": 3.821465492248535, + "learning_rate": 1.712906291631814e-06, + "loss": 1.0693, + "step": 13024 + }, + { + "epoch": 0.8166656216690702, + "grad_norm": 3.272348403930664, + "learning_rate": 1.7117698859302357e-06, + "loss": 1.0459, + "step": 13025 + }, + { + "epoch": 0.8167283215248605, + "grad_norm": 3.1786327362060547, + "learning_rate": 1.71063382203563e-06, + "loss": 1.218, + "step": 13026 + }, + { + "epoch": 0.8167910213806509, + "grad_norm": 3.3707656860351562, + "learning_rate": 1.709498099994844e-06, + "loss": 1.0711, + "step": 13027 + }, + { + "epoch": 0.8168537212364412, + "grad_norm": 3.800795078277588, + "learning_rate": 1.7083627198547215e-06, + "loss": 1.0307, + "step": 13028 + }, + { + "epoch": 0.8169164210922315, + "grad_norm": 3.269986867904663, + "learning_rate": 1.7072276816620825e-06, + "loss": 1.127, + "step": 13029 + }, + { + "epoch": 0.8169791209480218, + "grad_norm": 3.4575512409210205, + "learning_rate": 1.7060929854637365e-06, + "loss": 1.0111, + "step": 13030 + }, + { + "epoch": 0.8170418208038122, + "grad_norm": 3.3519527912139893, + "learning_rate": 1.704958631306476e-06, + "loss": 1.1294, + "step": 13031 + }, + { + "epoch": 0.8171045206596025, + "grad_norm": 3.501396417617798, + "learning_rate": 1.7038246192370845e-06, + "loss": 0.957, + "step": 13032 + }, + { + "epoch": 0.8171672205153928, + "grad_norm": 3.4196622371673584, + "learning_rate": 1.702690949302329e-06, + "loss": 1.0612, + "step": 13033 + }, + { + "epoch": 0.8172299203711831, + "grad_norm": 3.854229211807251, + "learning_rate": 1.7015576215489615e-06, + "loss": 1.0033, + "step": 13034 + }, + { + "epoch": 0.8172926202269735, + "grad_norm": 3.4749794006347656, + "learning_rate": 1.7004246360237176e-06, + "loss": 1.0674, + "step": 13035 + }, + { + "epoch": 0.8173553200827638, + "grad_norm": 3.4521636962890625, + "learning_rate": 1.6992919927733253e-06, + "loss": 1.0471, + "step": 13036 + }, + { + "epoch": 0.8174180199385541, + "grad_norm": 3.1905829906463623, + "learning_rate": 1.6981596918444953e-06, + "loss": 1.0834, + "step": 13037 + }, + { + "epoch": 0.8174807197943444, + "grad_norm": 3.3424553871154785, + "learning_rate": 1.697027733283918e-06, + "loss": 1.1657, + "step": 13038 + }, + { + "epoch": 0.8175434196501348, + "grad_norm": 3.314169406890869, + "learning_rate": 1.6958961171382838e-06, + "loss": 1.1819, + "step": 13039 + }, + { + "epoch": 0.8176061195059251, + "grad_norm": 3.430607557296753, + "learning_rate": 1.6947648434542552e-06, + "loss": 1.0477, + "step": 13040 + }, + { + "epoch": 0.8176688193617154, + "grad_norm": 3.0549862384796143, + "learning_rate": 1.6936339122784872e-06, + "loss": 1.1605, + "step": 13041 + }, + { + "epoch": 0.8177315192175058, + "grad_norm": 3.3352103233337402, + "learning_rate": 1.692503323657617e-06, + "loss": 0.9425, + "step": 13042 + }, + { + "epoch": 0.8177942190732961, + "grad_norm": 3.134397506713867, + "learning_rate": 1.6913730776382765e-06, + "loss": 1.1466, + "step": 13043 + }, + { + "epoch": 0.8178569189290865, + "grad_norm": 4.035140514373779, + "learning_rate": 1.690243174267071e-06, + "loss": 0.9304, + "step": 13044 + }, + { + "epoch": 0.8179196187848768, + "grad_norm": 3.3682525157928467, + "learning_rate": 1.6891136135906005e-06, + "loss": 1.1782, + "step": 13045 + }, + { + "epoch": 0.8179823186406672, + "grad_norm": 3.230006456375122, + "learning_rate": 1.6879843956554453e-06, + "loss": 1.0476, + "step": 13046 + }, + { + "epoch": 0.8180450184964575, + "grad_norm": 3.2239551544189453, + "learning_rate": 1.6868555205081794e-06, + "loss": 1.3063, + "step": 13047 + }, + { + "epoch": 0.8181077183522478, + "grad_norm": 3.262982130050659, + "learning_rate": 1.6857269881953543e-06, + "loss": 1.0225, + "step": 13048 + }, + { + "epoch": 0.8181704182080382, + "grad_norm": 3.345033884048462, + "learning_rate": 1.6845987987635094e-06, + "loss": 1.1083, + "step": 13049 + }, + { + "epoch": 0.8182331180638285, + "grad_norm": 3.349043846130371, + "learning_rate": 1.6834709522591753e-06, + "loss": 1.1181, + "step": 13050 + }, + { + "epoch": 0.8182958179196188, + "grad_norm": 3.6747689247131348, + "learning_rate": 1.6823434487288626e-06, + "loss": 1.0854, + "step": 13051 + }, + { + "epoch": 0.8183585177754091, + "grad_norm": 3.276010513305664, + "learning_rate": 1.6812162882190687e-06, + "loss": 1.2322, + "step": 13052 + }, + { + "epoch": 0.8184212176311995, + "grad_norm": 3.587217330932617, + "learning_rate": 1.680089470776276e-06, + "loss": 1.213, + "step": 13053 + }, + { + "epoch": 0.8184839174869898, + "grad_norm": 3.129791259765625, + "learning_rate": 1.67896299644696e-06, + "loss": 1.003, + "step": 13054 + }, + { + "epoch": 0.8185466173427801, + "grad_norm": 3.198582172393799, + "learning_rate": 1.6778368652775723e-06, + "loss": 1.1073, + "step": 13055 + }, + { + "epoch": 0.8186093171985704, + "grad_norm": 2.914480686187744, + "learning_rate": 1.6767110773145567e-06, + "loss": 1.0721, + "step": 13056 + }, + { + "epoch": 0.8186720170543608, + "grad_norm": 3.604888439178467, + "learning_rate": 1.6755856326043373e-06, + "loss": 1.1806, + "step": 13057 + }, + { + "epoch": 0.8187347169101511, + "grad_norm": 3.546494245529175, + "learning_rate": 1.6744605311933316e-06, + "loss": 0.991, + "step": 13058 + }, + { + "epoch": 0.8187974167659414, + "grad_norm": 3.351048231124878, + "learning_rate": 1.6733357731279375e-06, + "loss": 1.0847, + "step": 13059 + }, + { + "epoch": 0.8188601166217317, + "grad_norm": 3.35669207572937, + "learning_rate": 1.6722113584545373e-06, + "loss": 1.0886, + "step": 13060 + }, + { + "epoch": 0.8189228164775221, + "grad_norm": 3.2415478229522705, + "learning_rate": 1.6710872872195072e-06, + "loss": 1.0685, + "step": 13061 + }, + { + "epoch": 0.8189855163333124, + "grad_norm": 3.3132429122924805, + "learning_rate": 1.669963559469201e-06, + "loss": 0.9759, + "step": 13062 + }, + { + "epoch": 0.8190482161891027, + "grad_norm": 3.4493021965026855, + "learning_rate": 1.6688401752499606e-06, + "loss": 1.1269, + "step": 13063 + }, + { + "epoch": 0.8191109160448931, + "grad_norm": 3.3445444107055664, + "learning_rate": 1.6677171346081134e-06, + "loss": 1.2016, + "step": 13064 + }, + { + "epoch": 0.8191736159006834, + "grad_norm": 2.999696731567383, + "learning_rate": 1.6665944375899778e-06, + "loss": 1.1093, + "step": 13065 + }, + { + "epoch": 0.8192363157564737, + "grad_norm": 3.5054445266723633, + "learning_rate": 1.6654720842418516e-06, + "loss": 1.1813, + "step": 13066 + }, + { + "epoch": 0.819299015612264, + "grad_norm": 3.4508609771728516, + "learning_rate": 1.6643500746100205e-06, + "loss": 1.0794, + "step": 13067 + }, + { + "epoch": 0.8193617154680545, + "grad_norm": 3.4665462970733643, + "learning_rate": 1.6632284087407546e-06, + "loss": 1.049, + "step": 13068 + }, + { + "epoch": 0.8194244153238448, + "grad_norm": 3.3611092567443848, + "learning_rate": 1.6621070866803146e-06, + "loss": 1.2742, + "step": 13069 + }, + { + "epoch": 0.8194871151796351, + "grad_norm": 3.4079720973968506, + "learning_rate": 1.6609861084749425e-06, + "loss": 1.1672, + "step": 13070 + }, + { + "epoch": 0.8195498150354255, + "grad_norm": 3.164388656616211, + "learning_rate": 1.6598654741708653e-06, + "loss": 1.2947, + "step": 13071 + }, + { + "epoch": 0.8196125148912158, + "grad_norm": 3.4728493690490723, + "learning_rate": 1.6587451838143032e-06, + "loss": 1.2102, + "step": 13072 + }, + { + "epoch": 0.8196752147470061, + "grad_norm": 3.147494077682495, + "learning_rate": 1.6576252374514535e-06, + "loss": 1.0409, + "step": 13073 + }, + { + "epoch": 0.8197379146027964, + "grad_norm": 3.2863552570343018, + "learning_rate": 1.6565056351285025e-06, + "loss": 1.1565, + "step": 13074 + }, + { + "epoch": 0.8198006144585868, + "grad_norm": 3.8420941829681396, + "learning_rate": 1.6553863768916222e-06, + "loss": 1.1585, + "step": 13075 + }, + { + "epoch": 0.8198633143143771, + "grad_norm": 3.3836309909820557, + "learning_rate": 1.6542674627869738e-06, + "loss": 1.0231, + "step": 13076 + }, + { + "epoch": 0.8199260141701674, + "grad_norm": 3.684368371963501, + "learning_rate": 1.6531488928607009e-06, + "loss": 1.1359, + "step": 13077 + }, + { + "epoch": 0.8199887140259577, + "grad_norm": 3.618478298187256, + "learning_rate": 1.6520306671589281e-06, + "loss": 0.9227, + "step": 13078 + }, + { + "epoch": 0.8200514138817481, + "grad_norm": 3.3457746505737305, + "learning_rate": 1.6509127857277784e-06, + "loss": 1.1327, + "step": 13079 + }, + { + "epoch": 0.8201141137375384, + "grad_norm": 3.644399642944336, + "learning_rate": 1.6497952486133505e-06, + "loss": 1.2232, + "step": 13080 + }, + { + "epoch": 0.8201768135933287, + "grad_norm": 3.4860777854919434, + "learning_rate": 1.64867805586173e-06, + "loss": 0.9728, + "step": 13081 + }, + { + "epoch": 0.820239513449119, + "grad_norm": 3.4716527462005615, + "learning_rate": 1.6475612075189895e-06, + "loss": 1.0091, + "step": 13082 + }, + { + "epoch": 0.8203022133049094, + "grad_norm": 3.1658077239990234, + "learning_rate": 1.6464447036311925e-06, + "loss": 1.0226, + "step": 13083 + }, + { + "epoch": 0.8203649131606997, + "grad_norm": 3.4596495628356934, + "learning_rate": 1.64532854424438e-06, + "loss": 1.1208, + "step": 13084 + }, + { + "epoch": 0.82042761301649, + "grad_norm": 3.599207639694214, + "learning_rate": 1.644212729404583e-06, + "loss": 1.0452, + "step": 13085 + }, + { + "epoch": 0.8204903128722804, + "grad_norm": 3.2665178775787354, + "learning_rate": 1.6430972591578155e-06, + "loss": 1.0442, + "step": 13086 + }, + { + "epoch": 0.8205530127280707, + "grad_norm": 3.435330867767334, + "learning_rate": 1.6419821335500852e-06, + "loss": 1.1346, + "step": 13087 + }, + { + "epoch": 0.820615712583861, + "grad_norm": 3.4698715209960938, + "learning_rate": 1.6408673526273765e-06, + "loss": 1.0506, + "step": 13088 + }, + { + "epoch": 0.8206784124396513, + "grad_norm": 3.3976902961730957, + "learning_rate": 1.6397529164356606e-06, + "loss": 1.189, + "step": 13089 + }, + { + "epoch": 0.8207411122954417, + "grad_norm": 3.5828864574432373, + "learning_rate": 1.6386388250209018e-06, + "loss": 1.0847, + "step": 13090 + }, + { + "epoch": 0.8208038121512321, + "grad_norm": 3.843015193939209, + "learning_rate": 1.637525078429043e-06, + "loss": 1.027, + "step": 13091 + }, + { + "epoch": 0.8208665120070224, + "grad_norm": 3.408843517303467, + "learning_rate": 1.6364116767060146e-06, + "loss": 1.0544, + "step": 13092 + }, + { + "epoch": 0.8209292118628128, + "grad_norm": 3.3531923294067383, + "learning_rate": 1.6352986198977327e-06, + "loss": 1.0633, + "step": 13093 + }, + { + "epoch": 0.8209919117186031, + "grad_norm": 3.331960678100586, + "learning_rate": 1.634185908050102e-06, + "loss": 1.212, + "step": 13094 + }, + { + "epoch": 0.8210546115743934, + "grad_norm": 3.500051259994507, + "learning_rate": 1.6330735412090114e-06, + "loss": 1.1946, + "step": 13095 + }, + { + "epoch": 0.8211173114301837, + "grad_norm": 3.4184305667877197, + "learning_rate": 1.631961519420332e-06, + "loss": 0.8796, + "step": 13096 + }, + { + "epoch": 0.8211800112859741, + "grad_norm": 3.293689489364624, + "learning_rate": 1.6308498427299235e-06, + "loss": 1.1682, + "step": 13097 + }, + { + "epoch": 0.8212427111417644, + "grad_norm": 3.679661989212036, + "learning_rate": 1.629738511183635e-06, + "loss": 1.075, + "step": 13098 + }, + { + "epoch": 0.8213054109975547, + "grad_norm": 3.0819904804229736, + "learning_rate": 1.6286275248272954e-06, + "loss": 1.1235, + "step": 13099 + }, + { + "epoch": 0.821368110853345, + "grad_norm": 3.123936653137207, + "learning_rate": 1.62751688370672e-06, + "loss": 0.8444, + "step": 13100 + }, + { + "epoch": 0.8214308107091354, + "grad_norm": 3.197782278060913, + "learning_rate": 1.6264065878677171e-06, + "loss": 1.179, + "step": 13101 + }, + { + "epoch": 0.8214935105649257, + "grad_norm": 3.2781522274017334, + "learning_rate": 1.625296637356072e-06, + "loss": 1.0422, + "step": 13102 + }, + { + "epoch": 0.821556210420716, + "grad_norm": 3.5766899585723877, + "learning_rate": 1.624187032217559e-06, + "loss": 1.1367, + "step": 13103 + }, + { + "epoch": 0.8216189102765064, + "grad_norm": 3.556516408920288, + "learning_rate": 1.6230777724979364e-06, + "loss": 1.0584, + "step": 13104 + }, + { + "epoch": 0.8216816101322967, + "grad_norm": 3.0781357288360596, + "learning_rate": 1.6219688582429538e-06, + "loss": 1.242, + "step": 13105 + }, + { + "epoch": 0.821744309988087, + "grad_norm": 3.3915750980377197, + "learning_rate": 1.6208602894983428e-06, + "loss": 1.2205, + "step": 13106 + }, + { + "epoch": 0.8218070098438773, + "grad_norm": 3.3724734783172607, + "learning_rate": 1.6197520663098177e-06, + "loss": 0.9556, + "step": 13107 + }, + { + "epoch": 0.8218697096996677, + "grad_norm": 3.4982268810272217, + "learning_rate": 1.618644188723082e-06, + "loss": 1.0307, + "step": 13108 + }, + { + "epoch": 0.821932409555458, + "grad_norm": 3.254556894302368, + "learning_rate": 1.617536656783828e-06, + "loss": 1.2167, + "step": 13109 + }, + { + "epoch": 0.8219951094112483, + "grad_norm": 3.5627729892730713, + "learning_rate": 1.6164294705377292e-06, + "loss": 1.154, + "step": 13110 + }, + { + "epoch": 0.8220578092670386, + "grad_norm": 3.857178211212158, + "learning_rate": 1.6153226300304415e-06, + "loss": 1.2097, + "step": 13111 + }, + { + "epoch": 0.822120509122829, + "grad_norm": 3.215930700302124, + "learning_rate": 1.614216135307618e-06, + "loss": 1.2058, + "step": 13112 + }, + { + "epoch": 0.8221832089786193, + "grad_norm": 3.5861589908599854, + "learning_rate": 1.613109986414887e-06, + "loss": 1.0865, + "step": 13113 + }, + { + "epoch": 0.8222459088344097, + "grad_norm": 3.380044937133789, + "learning_rate": 1.6120041833978662e-06, + "loss": 1.0527, + "step": 13114 + }, + { + "epoch": 0.8223086086902001, + "grad_norm": 3.3723227977752686, + "learning_rate": 1.610898726302157e-06, + "loss": 1.151, + "step": 13115 + }, + { + "epoch": 0.8223713085459904, + "grad_norm": 3.2330172061920166, + "learning_rate": 1.6097936151733528e-06, + "loss": 1.073, + "step": 13116 + }, + { + "epoch": 0.8224340084017807, + "grad_norm": 3.4016306400299072, + "learning_rate": 1.6086888500570275e-06, + "loss": 1.0001, + "step": 13117 + }, + { + "epoch": 0.822496708257571, + "grad_norm": 3.6230599880218506, + "learning_rate": 1.6075844309987388e-06, + "loss": 1.0868, + "step": 13118 + }, + { + "epoch": 0.8225594081133614, + "grad_norm": 3.8287477493286133, + "learning_rate": 1.6064803580440335e-06, + "loss": 1.0446, + "step": 13119 + }, + { + "epoch": 0.8226221079691517, + "grad_norm": 3.7967631816864014, + "learning_rate": 1.6053766312384467e-06, + "loss": 1.0714, + "step": 13120 + }, + { + "epoch": 0.822684807824942, + "grad_norm": 3.0097665786743164, + "learning_rate": 1.6042732506274939e-06, + "loss": 1.2157, + "step": 13121 + }, + { + "epoch": 0.8227475076807323, + "grad_norm": 3.507483720779419, + "learning_rate": 1.6031702162566765e-06, + "loss": 1.0028, + "step": 13122 + }, + { + "epoch": 0.8228102075365227, + "grad_norm": 3.3294639587402344, + "learning_rate": 1.6020675281714893e-06, + "loss": 1.2105, + "step": 13123 + }, + { + "epoch": 0.822872907392313, + "grad_norm": 3.7015979290008545, + "learning_rate": 1.6009651864174036e-06, + "loss": 0.9137, + "step": 13124 + }, + { + "epoch": 0.8229356072481033, + "grad_norm": 3.147740125656128, + "learning_rate": 1.59986319103988e-06, + "loss": 1.0982, + "step": 13125 + }, + { + "epoch": 0.8229983071038937, + "grad_norm": 3.7584457397460938, + "learning_rate": 1.5987615420843627e-06, + "loss": 1.1331, + "step": 13126 + }, + { + "epoch": 0.823061006959684, + "grad_norm": 3.5054781436920166, + "learning_rate": 1.5976602395962892e-06, + "loss": 1.1352, + "step": 13127 + }, + { + "epoch": 0.8231237068154743, + "grad_norm": 3.353750228881836, + "learning_rate": 1.596559283621074e-06, + "loss": 1.1356, + "step": 13128 + }, + { + "epoch": 0.8231864066712646, + "grad_norm": 3.649473190307617, + "learning_rate": 1.5954586742041212e-06, + "loss": 0.9802, + "step": 13129 + }, + { + "epoch": 0.823249106527055, + "grad_norm": 3.5623912811279297, + "learning_rate": 1.5943584113908173e-06, + "loss": 1.1149, + "step": 13130 + }, + { + "epoch": 0.8233118063828453, + "grad_norm": 3.2819671630859375, + "learning_rate": 1.5932584952265417e-06, + "loss": 0.974, + "step": 13131 + }, + { + "epoch": 0.8233745062386356, + "grad_norm": 3.6119906902313232, + "learning_rate": 1.5921589257566538e-06, + "loss": 0.9788, + "step": 13132 + }, + { + "epoch": 0.8234372060944259, + "grad_norm": 3.3420214653015137, + "learning_rate": 1.5910597030264962e-06, + "loss": 1.2517, + "step": 13133 + }, + { + "epoch": 0.8234999059502163, + "grad_norm": 3.627927780151367, + "learning_rate": 1.5899608270814049e-06, + "loss": 1.036, + "step": 13134 + }, + { + "epoch": 0.8235626058060066, + "grad_norm": 3.3754830360412598, + "learning_rate": 1.5888622979666978e-06, + "loss": 1.0603, + "step": 13135 + }, + { + "epoch": 0.8236253056617969, + "grad_norm": 3.192678689956665, + "learning_rate": 1.5877641157276757e-06, + "loss": 1.178, + "step": 13136 + }, + { + "epoch": 0.8236880055175874, + "grad_norm": 3.3584253787994385, + "learning_rate": 1.5866662804096277e-06, + "loss": 0.9278, + "step": 13137 + }, + { + "epoch": 0.8237507053733777, + "grad_norm": 3.5735111236572266, + "learning_rate": 1.5855687920578312e-06, + "loss": 1.0494, + "step": 13138 + }, + { + "epoch": 0.823813405229168, + "grad_norm": 3.395270347595215, + "learning_rate": 1.584471650717545e-06, + "loss": 1.1473, + "step": 13139 + }, + { + "epoch": 0.8238761050849583, + "grad_norm": 3.811739921569824, + "learning_rate": 1.5833748564340136e-06, + "loss": 0.9984, + "step": 13140 + }, + { + "epoch": 0.8239388049407487, + "grad_norm": 3.4726970195770264, + "learning_rate": 1.5822784092524734e-06, + "loss": 1.0741, + "step": 13141 + }, + { + "epoch": 0.824001504796539, + "grad_norm": 3.4439756870269775, + "learning_rate": 1.5811823092181378e-06, + "loss": 1.1285, + "step": 13142 + }, + { + "epoch": 0.8240642046523293, + "grad_norm": 3.3269214630126953, + "learning_rate": 1.5800865563762125e-06, + "loss": 1.1617, + "step": 13143 + }, + { + "epoch": 0.8241269045081197, + "grad_norm": 3.637434482574463, + "learning_rate": 1.5789911507718824e-06, + "loss": 0.8878, + "step": 13144 + }, + { + "epoch": 0.82418960436391, + "grad_norm": 3.4160144329071045, + "learning_rate": 1.5778960924503284e-06, + "loss": 1.2458, + "step": 13145 + }, + { + "epoch": 0.8242523042197003, + "grad_norm": 3.321747064590454, + "learning_rate": 1.5768013814567063e-06, + "loss": 1.0698, + "step": 13146 + }, + { + "epoch": 0.8243150040754906, + "grad_norm": 3.8917956352233887, + "learning_rate": 1.5757070178361633e-06, + "loss": 1.1624, + "step": 13147 + }, + { + "epoch": 0.824377703931281, + "grad_norm": 3.235285758972168, + "learning_rate": 1.574613001633829e-06, + "loss": 1.0692, + "step": 13148 + }, + { + "epoch": 0.8244404037870713, + "grad_norm": 3.415121078491211, + "learning_rate": 1.573519332894824e-06, + "loss": 1.1474, + "step": 13149 + }, + { + "epoch": 0.8245031036428616, + "grad_norm": 3.3554792404174805, + "learning_rate": 1.5724260116642498e-06, + "loss": 1.0284, + "step": 13150 + }, + { + "epoch": 0.8245658034986519, + "grad_norm": 3.2709665298461914, + "learning_rate": 1.5713330379871927e-06, + "loss": 1.0503, + "step": 13151 + }, + { + "epoch": 0.8246285033544423, + "grad_norm": 3.1339285373687744, + "learning_rate": 1.5702404119087323e-06, + "loss": 1.171, + "step": 13152 + }, + { + "epoch": 0.8246912032102326, + "grad_norm": 3.4672138690948486, + "learning_rate": 1.5691481334739244e-06, + "loss": 1.2377, + "step": 13153 + }, + { + "epoch": 0.8247539030660229, + "grad_norm": 3.807060718536377, + "learning_rate": 1.5680562027278156e-06, + "loss": 1.0776, + "step": 13154 + }, + { + "epoch": 0.8248166029218132, + "grad_norm": 3.2245254516601562, + "learning_rate": 1.5669646197154354e-06, + "loss": 1.0757, + "step": 13155 + }, + { + "epoch": 0.8248793027776036, + "grad_norm": 3.459207057952881, + "learning_rate": 1.5658733844818052e-06, + "loss": 1.0781, + "step": 13156 + }, + { + "epoch": 0.8249420026333939, + "grad_norm": 3.3112566471099854, + "learning_rate": 1.564782497071925e-06, + "loss": 1.0885, + "step": 13157 + }, + { + "epoch": 0.8250047024891842, + "grad_norm": 3.2563962936401367, + "learning_rate": 1.5636919575307819e-06, + "loss": 1.1236, + "step": 13158 + }, + { + "epoch": 0.8250674023449746, + "grad_norm": 3.1938655376434326, + "learning_rate": 1.5626017659033499e-06, + "loss": 1.1339, + "step": 13159 + }, + { + "epoch": 0.825130102200765, + "grad_norm": 3.2843568325042725, + "learning_rate": 1.561511922234591e-06, + "loss": 1.0412, + "step": 13160 + }, + { + "epoch": 0.8251928020565553, + "grad_norm": 3.8520216941833496, + "learning_rate": 1.560422426569449e-06, + "loss": 0.9355, + "step": 13161 + }, + { + "epoch": 0.8252555019123456, + "grad_norm": 3.5679078102111816, + "learning_rate": 1.5593332789528526e-06, + "loss": 1.1905, + "step": 13162 + }, + { + "epoch": 0.825318201768136, + "grad_norm": 3.6584932804107666, + "learning_rate": 1.5582444794297225e-06, + "loss": 1.1579, + "step": 13163 + }, + { + "epoch": 0.8253809016239263, + "grad_norm": 3.0737571716308594, + "learning_rate": 1.5571560280449582e-06, + "loss": 1.1075, + "step": 13164 + }, + { + "epoch": 0.8254436014797166, + "grad_norm": 3.474762201309204, + "learning_rate": 1.5560679248434484e-06, + "loss": 1.1337, + "step": 13165 + }, + { + "epoch": 0.825506301335507, + "grad_norm": 3.2974367141723633, + "learning_rate": 1.5549801698700639e-06, + "loss": 1.1138, + "step": 13166 + }, + { + "epoch": 0.8255690011912973, + "grad_norm": 3.4530868530273438, + "learning_rate": 1.5538927631696687e-06, + "loss": 1.0124, + "step": 13167 + }, + { + "epoch": 0.8256317010470876, + "grad_norm": 3.168592691421509, + "learning_rate": 1.5528057047871049e-06, + "loss": 1.0063, + "step": 13168 + }, + { + "epoch": 0.8256944009028779, + "grad_norm": 3.3730950355529785, + "learning_rate": 1.5517189947672018e-06, + "loss": 1.1099, + "step": 13169 + }, + { + "epoch": 0.8257571007586683, + "grad_norm": 3.5333633422851562, + "learning_rate": 1.5506326331547749e-06, + "loss": 1.0085, + "step": 13170 + }, + { + "epoch": 0.8258198006144586, + "grad_norm": 3.477710247039795, + "learning_rate": 1.5495466199946286e-06, + "loss": 1.115, + "step": 13171 + }, + { + "epoch": 0.8258825004702489, + "grad_norm": 3.2292096614837646, + "learning_rate": 1.5484609553315499e-06, + "loss": 1.1242, + "step": 13172 + }, + { + "epoch": 0.8259452003260392, + "grad_norm": 3.8007423877716064, + "learning_rate": 1.5473756392103067e-06, + "loss": 1.2032, + "step": 13173 + }, + { + "epoch": 0.8260079001818296, + "grad_norm": 3.1256401538848877, + "learning_rate": 1.5462906716756643e-06, + "loss": 1.0127, + "step": 13174 + }, + { + "epoch": 0.8260706000376199, + "grad_norm": 3.114753484725952, + "learning_rate": 1.5452060527723644e-06, + "loss": 1.1127, + "step": 13175 + }, + { + "epoch": 0.8261332998934102, + "grad_norm": 3.1156959533691406, + "learning_rate": 1.544121782545135e-06, + "loss": 1.1247, + "step": 13176 + }, + { + "epoch": 0.8261959997492005, + "grad_norm": 3.4377293586730957, + "learning_rate": 1.5430378610386909e-06, + "loss": 1.1165, + "step": 13177 + }, + { + "epoch": 0.8262586996049909, + "grad_norm": 3.6204020977020264, + "learning_rate": 1.5419542882977367e-06, + "loss": 1.0404, + "step": 13178 + }, + { + "epoch": 0.8263213994607812, + "grad_norm": 3.3984344005584717, + "learning_rate": 1.5408710643669578e-06, + "loss": 0.9909, + "step": 13179 + }, + { + "epoch": 0.8263840993165715, + "grad_norm": 2.8421688079833984, + "learning_rate": 1.539788189291025e-06, + "loss": 1.1468, + "step": 13180 + }, + { + "epoch": 0.8264467991723619, + "grad_norm": 3.5525271892547607, + "learning_rate": 1.5387056631145948e-06, + "loss": 1.1515, + "step": 13181 + }, + { + "epoch": 0.8265094990281522, + "grad_norm": 3.506235361099243, + "learning_rate": 1.5376234858823147e-06, + "loss": 1.1088, + "step": 13182 + }, + { + "epoch": 0.8265721988839426, + "grad_norm": 3.765092134475708, + "learning_rate": 1.5365416576388115e-06, + "loss": 0.9726, + "step": 13183 + }, + { + "epoch": 0.826634898739733, + "grad_norm": 3.386643886566162, + "learning_rate": 1.535460178428697e-06, + "loss": 1.0648, + "step": 13184 + }, + { + "epoch": 0.8266975985955233, + "grad_norm": 3.423523187637329, + "learning_rate": 1.534379048296577e-06, + "loss": 1.0577, + "step": 13185 + }, + { + "epoch": 0.8267602984513136, + "grad_norm": 3.4734091758728027, + "learning_rate": 1.5332982672870355e-06, + "loss": 1.008, + "step": 13186 + }, + { + "epoch": 0.8268229983071039, + "grad_norm": 3.4613218307495117, + "learning_rate": 1.5322178354446427e-06, + "loss": 0.904, + "step": 13187 + }, + { + "epoch": 0.8268856981628943, + "grad_norm": 3.1671767234802246, + "learning_rate": 1.5311377528139538e-06, + "loss": 0.9718, + "step": 13188 + }, + { + "epoch": 0.8269483980186846, + "grad_norm": 3.971161127090454, + "learning_rate": 1.5300580194395165e-06, + "loss": 1.1122, + "step": 13189 + }, + { + "epoch": 0.8270110978744749, + "grad_norm": 3.382145643234253, + "learning_rate": 1.5289786353658553e-06, + "loss": 1.0985, + "step": 13190 + }, + { + "epoch": 0.8270737977302652, + "grad_norm": 3.2984814643859863, + "learning_rate": 1.5278996006374836e-06, + "loss": 1.0394, + "step": 13191 + }, + { + "epoch": 0.8271364975860556, + "grad_norm": 3.2976887226104736, + "learning_rate": 1.5268209152989045e-06, + "loss": 1.13, + "step": 13192 + }, + { + "epoch": 0.8271991974418459, + "grad_norm": 3.3100903034210205, + "learning_rate": 1.5257425793946011e-06, + "loss": 1.0467, + "step": 13193 + }, + { + "epoch": 0.8272618972976362, + "grad_norm": 3.151348829269409, + "learning_rate": 1.524664592969044e-06, + "loss": 1.0319, + "step": 13194 + }, + { + "epoch": 0.8273245971534265, + "grad_norm": 3.3881876468658447, + "learning_rate": 1.523586956066686e-06, + "loss": 1.0602, + "step": 13195 + }, + { + "epoch": 0.8273872970092169, + "grad_norm": 3.700200080871582, + "learning_rate": 1.5225096687319751e-06, + "loss": 1.0426, + "step": 13196 + }, + { + "epoch": 0.8274499968650072, + "grad_norm": 3.389251947402954, + "learning_rate": 1.5214327310093358e-06, + "loss": 0.9347, + "step": 13197 + }, + { + "epoch": 0.8275126967207975, + "grad_norm": 3.4646618366241455, + "learning_rate": 1.5203561429431812e-06, + "loss": 1.0703, + "step": 13198 + }, + { + "epoch": 0.8275753965765879, + "grad_norm": 3.3283700942993164, + "learning_rate": 1.5192799045779062e-06, + "loss": 0.9905, + "step": 13199 + }, + { + "epoch": 0.8276380964323782, + "grad_norm": 3.441819667816162, + "learning_rate": 1.5182040159579015e-06, + "loss": 0.9894, + "step": 13200 + }, + { + "epoch": 0.8277007962881685, + "grad_norm": 3.024655818939209, + "learning_rate": 1.517128477127533e-06, + "loss": 1.0502, + "step": 13201 + }, + { + "epoch": 0.8277634961439588, + "grad_norm": 3.059267282485962, + "learning_rate": 1.516053288131154e-06, + "loss": 1.2554, + "step": 13202 + }, + { + "epoch": 0.8278261959997492, + "grad_norm": 3.4086618423461914, + "learning_rate": 1.5149784490131114e-06, + "loss": 1.0154, + "step": 13203 + }, + { + "epoch": 0.8278888958555395, + "grad_norm": 3.46234130859375, + "learning_rate": 1.5139039598177274e-06, + "loss": 0.9199, + "step": 13204 + }, + { + "epoch": 0.8279515957113298, + "grad_norm": 3.461703062057495, + "learning_rate": 1.5128298205893144e-06, + "loss": 1.1026, + "step": 13205 + }, + { + "epoch": 0.8280142955671203, + "grad_norm": 3.037966728210449, + "learning_rate": 1.511756031372168e-06, + "loss": 1.0115, + "step": 13206 + }, + { + "epoch": 0.8280769954229106, + "grad_norm": 3.7809698581695557, + "learning_rate": 1.5106825922105762e-06, + "loss": 1.1029, + "step": 13207 + }, + { + "epoch": 0.8281396952787009, + "grad_norm": 3.489227771759033, + "learning_rate": 1.5096095031488046e-06, + "loss": 1.1893, + "step": 13208 + }, + { + "epoch": 0.8282023951344912, + "grad_norm": 3.626006603240967, + "learning_rate": 1.508536764231109e-06, + "loss": 1.1471, + "step": 13209 + }, + { + "epoch": 0.8282650949902816, + "grad_norm": 3.612736940383911, + "learning_rate": 1.5074643755017237e-06, + "loss": 1.1912, + "step": 13210 + }, + { + "epoch": 0.8283277948460719, + "grad_norm": 3.267141580581665, + "learning_rate": 1.5063923370048827e-06, + "loss": 1.1362, + "step": 13211 + }, + { + "epoch": 0.8283904947018622, + "grad_norm": 3.3489205837249756, + "learning_rate": 1.5053206487847916e-06, + "loss": 1.0985, + "step": 13212 + }, + { + "epoch": 0.8284531945576525, + "grad_norm": 3.4276697635650635, + "learning_rate": 1.5042493108856459e-06, + "loss": 1.1721, + "step": 13213 + }, + { + "epoch": 0.8285158944134429, + "grad_norm": 3.1809022426605225, + "learning_rate": 1.5031783233516316e-06, + "loss": 1.2133, + "step": 13214 + }, + { + "epoch": 0.8285785942692332, + "grad_norm": 3.7153689861297607, + "learning_rate": 1.5021076862269145e-06, + "loss": 1.101, + "step": 13215 + }, + { + "epoch": 0.8286412941250235, + "grad_norm": 3.548438787460327, + "learning_rate": 1.5010373995556483e-06, + "loss": 0.9735, + "step": 13216 + }, + { + "epoch": 0.8287039939808138, + "grad_norm": 3.478566884994507, + "learning_rate": 1.4999674633819672e-06, + "loss": 1.2275, + "step": 13217 + }, + { + "epoch": 0.8287666938366042, + "grad_norm": 3.756863594055176, + "learning_rate": 1.498897877750003e-06, + "loss": 1.1469, + "step": 13218 + }, + { + "epoch": 0.8288293936923945, + "grad_norm": 3.1598997116088867, + "learning_rate": 1.4978286427038602e-06, + "loss": 1.0992, + "step": 13219 + }, + { + "epoch": 0.8288920935481848, + "grad_norm": 3.58832049369812, + "learning_rate": 1.4967597582876358e-06, + "loss": 1.1532, + "step": 13220 + }, + { + "epoch": 0.8289547934039752, + "grad_norm": 3.5856645107269287, + "learning_rate": 1.495691224545408e-06, + "loss": 1.1558, + "step": 13221 + }, + { + "epoch": 0.8290174932597655, + "grad_norm": 3.4400768280029297, + "learning_rate": 1.494623041521248e-06, + "loss": 1.1102, + "step": 13222 + }, + { + "epoch": 0.8290801931155558, + "grad_norm": 3.5743753910064697, + "learning_rate": 1.4935552092592054e-06, + "loss": 1.1612, + "step": 13223 + }, + { + "epoch": 0.8291428929713461, + "grad_norm": 3.658715009689331, + "learning_rate": 1.4924877278033146e-06, + "loss": 1.1338, + "step": 13224 + }, + { + "epoch": 0.8292055928271365, + "grad_norm": 3.6272823810577393, + "learning_rate": 1.4914205971976048e-06, + "loss": 1.0399, + "step": 13225 + }, + { + "epoch": 0.8292682926829268, + "grad_norm": 3.2984418869018555, + "learning_rate": 1.4903538174860798e-06, + "loss": 1.0685, + "step": 13226 + }, + { + "epoch": 0.8293309925387171, + "grad_norm": 3.3494064807891846, + "learning_rate": 1.4892873887127357e-06, + "loss": 0.8543, + "step": 13227 + }, + { + "epoch": 0.8293936923945074, + "grad_norm": 3.750568389892578, + "learning_rate": 1.4882213109215494e-06, + "loss": 0.9686, + "step": 13228 + }, + { + "epoch": 0.8294563922502978, + "grad_norm": 3.8676347732543945, + "learning_rate": 1.4871555841564889e-06, + "loss": 1.1517, + "step": 13229 + }, + { + "epoch": 0.8295190921060882, + "grad_norm": 2.9775989055633545, + "learning_rate": 1.4860902084615047e-06, + "loss": 1.0632, + "step": 13230 + }, + { + "epoch": 0.8295817919618785, + "grad_norm": 3.287764310836792, + "learning_rate": 1.485025183880533e-06, + "loss": 1.1224, + "step": 13231 + }, + { + "epoch": 0.8296444918176689, + "grad_norm": 3.325394868850708, + "learning_rate": 1.4839605104574905e-06, + "loss": 1.0034, + "step": 13232 + }, + { + "epoch": 0.8297071916734592, + "grad_norm": 3.347726821899414, + "learning_rate": 1.4828961882362925e-06, + "loss": 0.9482, + "step": 13233 + }, + { + "epoch": 0.8297698915292495, + "grad_norm": 3.8212451934814453, + "learning_rate": 1.4818322172608256e-06, + "loss": 0.9021, + "step": 13234 + }, + { + "epoch": 0.8298325913850398, + "grad_norm": 3.7394521236419678, + "learning_rate": 1.4807685975749698e-06, + "loss": 1.1967, + "step": 13235 + }, + { + "epoch": 0.8298952912408302, + "grad_norm": 3.5916831493377686, + "learning_rate": 1.4797053292225905e-06, + "loss": 1.1523, + "step": 13236 + }, + { + "epoch": 0.8299579910966205, + "grad_norm": 3.28432559967041, + "learning_rate": 1.478642412247535e-06, + "loss": 1.0399, + "step": 13237 + }, + { + "epoch": 0.8300206909524108, + "grad_norm": 3.715627670288086, + "learning_rate": 1.4775798466936397e-06, + "loss": 1.132, + "step": 13238 + }, + { + "epoch": 0.8300833908082011, + "grad_norm": 3.338768482208252, + "learning_rate": 1.4765176326047203e-06, + "loss": 1.015, + "step": 13239 + }, + { + "epoch": 0.8301460906639915, + "grad_norm": 3.13273024559021, + "learning_rate": 1.4754557700245887e-06, + "loss": 1.1838, + "step": 13240 + }, + { + "epoch": 0.8302087905197818, + "grad_norm": 3.102557420730591, + "learning_rate": 1.474394258997034e-06, + "loss": 1.0514, + "step": 13241 + }, + { + "epoch": 0.8302714903755721, + "grad_norm": 3.578232526779175, + "learning_rate": 1.473333099565829e-06, + "loss": 0.9902, + "step": 13242 + }, + { + "epoch": 0.8303341902313625, + "grad_norm": 3.9609768390655518, + "learning_rate": 1.4722722917747423e-06, + "loss": 1.0849, + "step": 13243 + }, + { + "epoch": 0.8303968900871528, + "grad_norm": 3.1471810340881348, + "learning_rate": 1.4712118356675188e-06, + "loss": 0.9136, + "step": 13244 + }, + { + "epoch": 0.8304595899429431, + "grad_norm": 3.464489221572876, + "learning_rate": 1.470151731287891e-06, + "loss": 1.0517, + "step": 13245 + }, + { + "epoch": 0.8305222897987334, + "grad_norm": 3.694369316101074, + "learning_rate": 1.4690919786795766e-06, + "loss": 1.1225, + "step": 13246 + }, + { + "epoch": 0.8305849896545238, + "grad_norm": 3.160922050476074, + "learning_rate": 1.4680325778862837e-06, + "loss": 1.0307, + "step": 13247 + }, + { + "epoch": 0.8306476895103141, + "grad_norm": 3.4182376861572266, + "learning_rate": 1.4669735289516995e-06, + "loss": 1.1984, + "step": 13248 + }, + { + "epoch": 0.8307103893661044, + "grad_norm": 3.390019178390503, + "learning_rate": 1.4659148319194993e-06, + "loss": 1.215, + "step": 13249 + }, + { + "epoch": 0.8307730892218947, + "grad_norm": 3.4255261421203613, + "learning_rate": 1.4648564868333426e-06, + "loss": 1.0837, + "step": 13250 + }, + { + "epoch": 0.8308357890776851, + "grad_norm": 4.020082473754883, + "learning_rate": 1.46379849373688e-06, + "loss": 1.0082, + "step": 13251 + }, + { + "epoch": 0.8308984889334754, + "grad_norm": 3.4605088233947754, + "learning_rate": 1.4627408526737386e-06, + "loss": 1.1418, + "step": 13252 + }, + { + "epoch": 0.8309611887892658, + "grad_norm": 3.244476318359375, + "learning_rate": 1.461683563687536e-06, + "loss": 1.204, + "step": 13253 + }, + { + "epoch": 0.8310238886450562, + "grad_norm": 3.3699796199798584, + "learning_rate": 1.4606266268218783e-06, + "loss": 0.9993, + "step": 13254 + }, + { + "epoch": 0.8310865885008465, + "grad_norm": 2.6715800762176514, + "learning_rate": 1.4595700421203519e-06, + "loss": 1.2132, + "step": 13255 + }, + { + "epoch": 0.8311492883566368, + "grad_norm": 3.2901816368103027, + "learning_rate": 1.4585138096265294e-06, + "loss": 1.0381, + "step": 13256 + }, + { + "epoch": 0.8312119882124271, + "grad_norm": 3.2014002799987793, + "learning_rate": 1.4574579293839674e-06, + "loss": 0.9764, + "step": 13257 + }, + { + "epoch": 0.8312746880682175, + "grad_norm": 3.6837940216064453, + "learning_rate": 1.4564024014362165e-06, + "loss": 1.1319, + "step": 13258 + }, + { + "epoch": 0.8313373879240078, + "grad_norm": 3.2971274852752686, + "learning_rate": 1.4553472258268043e-06, + "loss": 1.1758, + "step": 13259 + }, + { + "epoch": 0.8314000877797981, + "grad_norm": 3.4983651638031006, + "learning_rate": 1.4542924025992444e-06, + "loss": 1.0164, + "step": 13260 + }, + { + "epoch": 0.8314627876355885, + "grad_norm": 3.188810110092163, + "learning_rate": 1.453237931797038e-06, + "loss": 1.0031, + "step": 13261 + }, + { + "epoch": 0.8315254874913788, + "grad_norm": 3.6027185916900635, + "learning_rate": 1.4521838134636733e-06, + "loss": 0.9639, + "step": 13262 + }, + { + "epoch": 0.8315881873471691, + "grad_norm": 3.5605580806732178, + "learning_rate": 1.4511300476426227e-06, + "loss": 0.9221, + "step": 13263 + }, + { + "epoch": 0.8316508872029594, + "grad_norm": 3.4330058097839355, + "learning_rate": 1.4500766343773399e-06, + "loss": 1.0794, + "step": 13264 + }, + { + "epoch": 0.8317135870587498, + "grad_norm": 3.178156614303589, + "learning_rate": 1.4490235737112712e-06, + "loss": 0.9995, + "step": 13265 + }, + { + "epoch": 0.8317762869145401, + "grad_norm": 3.5538928508758545, + "learning_rate": 1.4479708656878444e-06, + "loss": 1.0638, + "step": 13266 + }, + { + "epoch": 0.8318389867703304, + "grad_norm": 3.5506136417388916, + "learning_rate": 1.4469185103504735e-06, + "loss": 1.0317, + "step": 13267 + }, + { + "epoch": 0.8319016866261207, + "grad_norm": 3.494495153427124, + "learning_rate": 1.4458665077425537e-06, + "loss": 1.2465, + "step": 13268 + }, + { + "epoch": 0.8319643864819111, + "grad_norm": 3.054853677749634, + "learning_rate": 1.444814857907475e-06, + "loss": 1.1775, + "step": 13269 + }, + { + "epoch": 0.8320270863377014, + "grad_norm": 3.391587734222412, + "learning_rate": 1.4437635608886047e-06, + "loss": 1.156, + "step": 13270 + }, + { + "epoch": 0.8320897861934917, + "grad_norm": 3.30039644241333, + "learning_rate": 1.4427126167292994e-06, + "loss": 1.0002, + "step": 13271 + }, + { + "epoch": 0.832152486049282, + "grad_norm": 3.2092833518981934, + "learning_rate": 1.4416620254728963e-06, + "loss": 1.0372, + "step": 13272 + }, + { + "epoch": 0.8322151859050724, + "grad_norm": 3.339460611343384, + "learning_rate": 1.4406117871627278e-06, + "loss": 1.0522, + "step": 13273 + }, + { + "epoch": 0.8322778857608627, + "grad_norm": 3.4423718452453613, + "learning_rate": 1.4395619018421036e-06, + "loss": 1.1905, + "step": 13274 + }, + { + "epoch": 0.832340585616653, + "grad_norm": 3.1336190700531006, + "learning_rate": 1.438512369554318e-06, + "loss": 1.2405, + "step": 13275 + }, + { + "epoch": 0.8324032854724435, + "grad_norm": 3.314316987991333, + "learning_rate": 1.4374631903426584e-06, + "loss": 1.1154, + "step": 13276 + }, + { + "epoch": 0.8324659853282338, + "grad_norm": 3.3230628967285156, + "learning_rate": 1.4364143642503903e-06, + "loss": 0.9717, + "step": 13277 + }, + { + "epoch": 0.8325286851840241, + "grad_norm": 3.4058849811553955, + "learning_rate": 1.4353658913207679e-06, + "loss": 0.8959, + "step": 13278 + }, + { + "epoch": 0.8325913850398144, + "grad_norm": 3.3218250274658203, + "learning_rate": 1.434317771597028e-06, + "loss": 1.2027, + "step": 13279 + }, + { + "epoch": 0.8326540848956048, + "grad_norm": 3.19734263420105, + "learning_rate": 1.433270005122399e-06, + "loss": 1.1963, + "step": 13280 + }, + { + "epoch": 0.8327167847513951, + "grad_norm": 3.24920654296875, + "learning_rate": 1.43222259194009e-06, + "loss": 1.1767, + "step": 13281 + }, + { + "epoch": 0.8327794846071854, + "grad_norm": 3.6922447681427, + "learning_rate": 1.431175532093295e-06, + "loss": 1.1225, + "step": 13282 + }, + { + "epoch": 0.8328421844629758, + "grad_norm": 3.364988327026367, + "learning_rate": 1.4301288256251933e-06, + "loss": 1.0874, + "step": 13283 + }, + { + "epoch": 0.8329048843187661, + "grad_norm": 3.5378317832946777, + "learning_rate": 1.4290824725789542e-06, + "loss": 1.039, + "step": 13284 + }, + { + "epoch": 0.8329675841745564, + "grad_norm": 3.5388519763946533, + "learning_rate": 1.428036472997729e-06, + "loss": 1.0776, + "step": 13285 + }, + { + "epoch": 0.8330302840303467, + "grad_norm": 3.666046619415283, + "learning_rate": 1.4269908269246523e-06, + "loss": 1.1404, + "step": 13286 + }, + { + "epoch": 0.8330929838861371, + "grad_norm": 3.6017143726348877, + "learning_rate": 1.4259455344028505e-06, + "loss": 1.1669, + "step": 13287 + }, + { + "epoch": 0.8331556837419274, + "grad_norm": 3.0027990341186523, + "learning_rate": 1.424900595475429e-06, + "loss": 1.2097, + "step": 13288 + }, + { + "epoch": 0.8332183835977177, + "grad_norm": 3.6907529830932617, + "learning_rate": 1.4238560101854815e-06, + "loss": 1.1863, + "step": 13289 + }, + { + "epoch": 0.833281083453508, + "grad_norm": 3.483586072921753, + "learning_rate": 1.4228117785760842e-06, + "loss": 0.9486, + "step": 13290 + }, + { + "epoch": 0.8333437833092984, + "grad_norm": 3.374161720275879, + "learning_rate": 1.421767900690303e-06, + "loss": 1.0183, + "step": 13291 + }, + { + "epoch": 0.8334064831650887, + "grad_norm": 3.339064359664917, + "learning_rate": 1.4207243765711909e-06, + "loss": 0.9772, + "step": 13292 + }, + { + "epoch": 0.833469183020879, + "grad_norm": 3.508033275604248, + "learning_rate": 1.4196812062617816e-06, + "loss": 1.0732, + "step": 13293 + }, + { + "epoch": 0.8335318828766693, + "grad_norm": 3.2172417640686035, + "learning_rate": 1.4186383898050903e-06, + "loss": 1.1531, + "step": 13294 + }, + { + "epoch": 0.8335945827324597, + "grad_norm": 3.2704882621765137, + "learning_rate": 1.4175959272441286e-06, + "loss": 1.0288, + "step": 13295 + }, + { + "epoch": 0.83365728258825, + "grad_norm": 3.3950464725494385, + "learning_rate": 1.4165538186218864e-06, + "loss": 1.2047, + "step": 13296 + }, + { + "epoch": 0.8337199824440403, + "grad_norm": 3.354092597961426, + "learning_rate": 1.4155120639813392e-06, + "loss": 1.0068, + "step": 13297 + }, + { + "epoch": 0.8337826822998307, + "grad_norm": 3.187127113342285, + "learning_rate": 1.414470663365447e-06, + "loss": 1.127, + "step": 13298 + }, + { + "epoch": 0.8338453821556211, + "grad_norm": 3.299328565597534, + "learning_rate": 1.4134296168171625e-06, + "loss": 1.1791, + "step": 13299 + }, + { + "epoch": 0.8339080820114114, + "grad_norm": 3.685119152069092, + "learning_rate": 1.4123889243794154e-06, + "loss": 1.0832, + "step": 13300 + }, + { + "epoch": 0.8339707818672017, + "grad_norm": 3.185187816619873, + "learning_rate": 1.4113485860951237e-06, + "loss": 1.0992, + "step": 13301 + }, + { + "epoch": 0.8340334817229921, + "grad_norm": 3.3285562992095947, + "learning_rate": 1.41030860200719e-06, + "loss": 1.2299, + "step": 13302 + }, + { + "epoch": 0.8340961815787824, + "grad_norm": 3.349364757537842, + "learning_rate": 1.4092689721585052e-06, + "loss": 1.1693, + "step": 13303 + }, + { + "epoch": 0.8341588814345727, + "grad_norm": 3.281018018722534, + "learning_rate": 1.408229696591945e-06, + "loss": 1.1415, + "step": 13304 + }, + { + "epoch": 0.8342215812903631, + "grad_norm": 3.6165809631347656, + "learning_rate": 1.4071907753503633e-06, + "loss": 1.0056, + "step": 13305 + }, + { + "epoch": 0.8342842811461534, + "grad_norm": 3.453853130340576, + "learning_rate": 1.4061522084766132e-06, + "loss": 1.048, + "step": 13306 + }, + { + "epoch": 0.8343469810019437, + "grad_norm": 3.0960471630096436, + "learning_rate": 1.4051139960135208e-06, + "loss": 1.0814, + "step": 13307 + }, + { + "epoch": 0.834409680857734, + "grad_norm": 3.4977471828460693, + "learning_rate": 1.4040761380039015e-06, + "loss": 1.1824, + "step": 13308 + }, + { + "epoch": 0.8344723807135244, + "grad_norm": 3.425010919570923, + "learning_rate": 1.4030386344905567e-06, + "loss": 1.134, + "step": 13309 + }, + { + "epoch": 0.8345350805693147, + "grad_norm": 4.11592435836792, + "learning_rate": 1.4020014855162755e-06, + "loss": 0.9704, + "step": 13310 + }, + { + "epoch": 0.834597780425105, + "grad_norm": 3.24222731590271, + "learning_rate": 1.4009646911238283e-06, + "loss": 1.1053, + "step": 13311 + }, + { + "epoch": 0.8346604802808953, + "grad_norm": 3.478083848953247, + "learning_rate": 1.3999282513559743e-06, + "loss": 1.0052, + "step": 13312 + }, + { + "epoch": 0.8347231801366857, + "grad_norm": 3.565648317337036, + "learning_rate": 1.3988921662554512e-06, + "loss": 0.9736, + "step": 13313 + }, + { + "epoch": 0.834785879992476, + "grad_norm": 3.7788901329040527, + "learning_rate": 1.3978564358649926e-06, + "loss": 1.2104, + "step": 13314 + }, + { + "epoch": 0.8348485798482663, + "grad_norm": 3.358550548553467, + "learning_rate": 1.3968210602273113e-06, + "loss": 1.1012, + "step": 13315 + }, + { + "epoch": 0.8349112797040567, + "grad_norm": 3.361748218536377, + "learning_rate": 1.395786039385103e-06, + "loss": 1.1919, + "step": 13316 + }, + { + "epoch": 0.834973979559847, + "grad_norm": 3.1729304790496826, + "learning_rate": 1.3947513733810558e-06, + "loss": 1.1583, + "step": 13317 + }, + { + "epoch": 0.8350366794156373, + "grad_norm": 3.0722241401672363, + "learning_rate": 1.3937170622578378e-06, + "loss": 1.1855, + "step": 13318 + }, + { + "epoch": 0.8350993792714276, + "grad_norm": 3.111236095428467, + "learning_rate": 1.3926831060581047e-06, + "loss": 1.1743, + "step": 13319 + }, + { + "epoch": 0.835162079127218, + "grad_norm": 3.376192569732666, + "learning_rate": 1.3916495048244927e-06, + "loss": 1.1428, + "step": 13320 + }, + { + "epoch": 0.8352247789830083, + "grad_norm": 3.3269155025482178, + "learning_rate": 1.390616258599634e-06, + "loss": 1.116, + "step": 13321 + }, + { + "epoch": 0.8352874788387987, + "grad_norm": 3.3368992805480957, + "learning_rate": 1.389583367426137e-06, + "loss": 1.2544, + "step": 13322 + }, + { + "epoch": 0.835350178694589, + "grad_norm": 3.5256292819976807, + "learning_rate": 1.3885508313465957e-06, + "loss": 1.12, + "step": 13323 + }, + { + "epoch": 0.8354128785503794, + "grad_norm": 3.4613661766052246, + "learning_rate": 1.3875186504035965e-06, + "loss": 1.1156, + "step": 13324 + }, + { + "epoch": 0.8354755784061697, + "grad_norm": 3.336374044418335, + "learning_rate": 1.3864868246397035e-06, + "loss": 1.1406, + "step": 13325 + }, + { + "epoch": 0.83553827826196, + "grad_norm": 3.549436330795288, + "learning_rate": 1.3854553540974701e-06, + "loss": 1.1018, + "step": 13326 + }, + { + "epoch": 0.8356009781177504, + "grad_norm": 3.371331214904785, + "learning_rate": 1.3844242388194329e-06, + "loss": 1.1147, + "step": 13327 + }, + { + "epoch": 0.8356636779735407, + "grad_norm": 3.2154619693756104, + "learning_rate": 1.3833934788481174e-06, + "loss": 1.0962, + "step": 13328 + }, + { + "epoch": 0.835726377829331, + "grad_norm": 3.3603222370147705, + "learning_rate": 1.382363074226032e-06, + "loss": 1.2082, + "step": 13329 + }, + { + "epoch": 0.8357890776851213, + "grad_norm": 3.5608410835266113, + "learning_rate": 1.3813330249956692e-06, + "loss": 1.0276, + "step": 13330 + }, + { + "epoch": 0.8358517775409117, + "grad_norm": 3.5544002056121826, + "learning_rate": 1.3803033311995072e-06, + "loss": 1.201, + "step": 13331 + }, + { + "epoch": 0.835914477396702, + "grad_norm": 3.2654008865356445, + "learning_rate": 1.3792739928800136e-06, + "loss": 1.0271, + "step": 13332 + }, + { + "epoch": 0.8359771772524923, + "grad_norm": 3.274947166442871, + "learning_rate": 1.3782450100796374e-06, + "loss": 1.0668, + "step": 13333 + }, + { + "epoch": 0.8360398771082826, + "grad_norm": 3.430586814880371, + "learning_rate": 1.3772163828408113e-06, + "loss": 1.1516, + "step": 13334 + }, + { + "epoch": 0.836102576964073, + "grad_norm": 3.0465610027313232, + "learning_rate": 1.3761881112059605e-06, + "loss": 1.1684, + "step": 13335 + }, + { + "epoch": 0.8361652768198633, + "grad_norm": 3.3354620933532715, + "learning_rate": 1.3751601952174887e-06, + "loss": 1.0343, + "step": 13336 + }, + { + "epoch": 0.8362279766756536, + "grad_norm": 3.5504820346832275, + "learning_rate": 1.3741326349177864e-06, + "loss": 0.8917, + "step": 13337 + }, + { + "epoch": 0.836290676531444, + "grad_norm": 3.5485386848449707, + "learning_rate": 1.3731054303492298e-06, + "loss": 1.0621, + "step": 13338 + }, + { + "epoch": 0.8363533763872343, + "grad_norm": 3.267622709274292, + "learning_rate": 1.3720785815541837e-06, + "loss": 1.0813, + "step": 13339 + }, + { + "epoch": 0.8364160762430246, + "grad_norm": 3.4176855087280273, + "learning_rate": 1.3710520885749934e-06, + "loss": 0.9939, + "step": 13340 + }, + { + "epoch": 0.8364787760988149, + "grad_norm": 3.7172887325286865, + "learning_rate": 1.3700259514539915e-06, + "loss": 0.972, + "step": 13341 + }, + { + "epoch": 0.8365414759546053, + "grad_norm": 3.185119867324829, + "learning_rate": 1.3690001702334943e-06, + "loss": 1.0414, + "step": 13342 + }, + { + "epoch": 0.8366041758103956, + "grad_norm": 3.8443751335144043, + "learning_rate": 1.3679747449558101e-06, + "loss": 1.1228, + "step": 13343 + }, + { + "epoch": 0.8366668756661859, + "grad_norm": 3.2852535247802734, + "learning_rate": 1.3669496756632229e-06, + "loss": 1.1853, + "step": 13344 + }, + { + "epoch": 0.8367295755219764, + "grad_norm": 3.487417459487915, + "learning_rate": 1.3659249623980075e-06, + "loss": 1.1615, + "step": 13345 + }, + { + "epoch": 0.8367922753777667, + "grad_norm": 3.6777279376983643, + "learning_rate": 1.364900605202425e-06, + "loss": 0.9989, + "step": 13346 + }, + { + "epoch": 0.836854975233557, + "grad_norm": 2.9777071475982666, + "learning_rate": 1.3638766041187178e-06, + "loss": 1.3077, + "step": 13347 + }, + { + "epoch": 0.8369176750893473, + "grad_norm": 3.6630868911743164, + "learning_rate": 1.3628529591891181e-06, + "loss": 0.9681, + "step": 13348 + }, + { + "epoch": 0.8369803749451377, + "grad_norm": 2.999295473098755, + "learning_rate": 1.3618296704558364e-06, + "loss": 1.0749, + "step": 13349 + }, + { + "epoch": 0.837043074800928, + "grad_norm": 3.5587613582611084, + "learning_rate": 1.3608067379610789e-06, + "loss": 1.1559, + "step": 13350 + }, + { + "epoch": 0.8371057746567183, + "grad_norm": 3.3446288108825684, + "learning_rate": 1.3597841617470286e-06, + "loss": 1.1119, + "step": 13351 + }, + { + "epoch": 0.8371684745125086, + "grad_norm": 3.7828643321990967, + "learning_rate": 1.3587619418558573e-06, + "loss": 1.1124, + "step": 13352 + }, + { + "epoch": 0.837231174368299, + "grad_norm": 3.454010248184204, + "learning_rate": 1.3577400783297178e-06, + "loss": 1.0152, + "step": 13353 + }, + { + "epoch": 0.8372938742240893, + "grad_norm": 3.2489211559295654, + "learning_rate": 1.3567185712107579e-06, + "loss": 1.2088, + "step": 13354 + }, + { + "epoch": 0.8373565740798796, + "grad_norm": 3.3869285583496094, + "learning_rate": 1.3556974205411011e-06, + "loss": 1.1492, + "step": 13355 + }, + { + "epoch": 0.83741927393567, + "grad_norm": 3.122048854827881, + "learning_rate": 1.3546766263628586e-06, + "loss": 1.0966, + "step": 13356 + }, + { + "epoch": 0.8374819737914603, + "grad_norm": 3.086275339126587, + "learning_rate": 1.353656188718131e-06, + "loss": 0.9787, + "step": 13357 + }, + { + "epoch": 0.8375446736472506, + "grad_norm": 3.545121669769287, + "learning_rate": 1.3526361076490002e-06, + "loss": 1.0545, + "step": 13358 + }, + { + "epoch": 0.8376073735030409, + "grad_norm": 3.2493128776550293, + "learning_rate": 1.3516163831975337e-06, + "loss": 1.1906, + "step": 13359 + }, + { + "epoch": 0.8376700733588313, + "grad_norm": 3.5591771602630615, + "learning_rate": 1.3505970154057835e-06, + "loss": 0.9662, + "step": 13360 + }, + { + "epoch": 0.8377327732146216, + "grad_norm": 3.1861987113952637, + "learning_rate": 1.3495780043157913e-06, + "loss": 0.9889, + "step": 13361 + }, + { + "epoch": 0.8377954730704119, + "grad_norm": 3.51159930229187, + "learning_rate": 1.3485593499695816e-06, + "loss": 1.0022, + "step": 13362 + }, + { + "epoch": 0.8378581729262022, + "grad_norm": 3.5876102447509766, + "learning_rate": 1.3475410524091604e-06, + "loss": 1.0521, + "step": 13363 + }, + { + "epoch": 0.8379208727819926, + "grad_norm": 3.5604140758514404, + "learning_rate": 1.3465231116765231e-06, + "loss": 1.0894, + "step": 13364 + }, + { + "epoch": 0.8379835726377829, + "grad_norm": 3.4790265560150146, + "learning_rate": 1.345505527813652e-06, + "loss": 1.1173, + "step": 13365 + }, + { + "epoch": 0.8380462724935732, + "grad_norm": 3.681748628616333, + "learning_rate": 1.344488300862511e-06, + "loss": 0.8905, + "step": 13366 + }, + { + "epoch": 0.8381089723493635, + "grad_norm": 3.4883408546447754, + "learning_rate": 1.3434714308650488e-06, + "loss": 0.8652, + "step": 13367 + }, + { + "epoch": 0.838171672205154, + "grad_norm": 3.3454396724700928, + "learning_rate": 1.342454917863204e-06, + "loss": 0.9431, + "step": 13368 + }, + { + "epoch": 0.8382343720609443, + "grad_norm": 3.7875139713287354, + "learning_rate": 1.3414387618988977e-06, + "loss": 1.0537, + "step": 13369 + }, + { + "epoch": 0.8382970719167346, + "grad_norm": 3.4188103675842285, + "learning_rate": 1.3404229630140342e-06, + "loss": 1.0754, + "step": 13370 + }, + { + "epoch": 0.838359771772525, + "grad_norm": 3.285977602005005, + "learning_rate": 1.3394075212505031e-06, + "loss": 1.0641, + "step": 13371 + }, + { + "epoch": 0.8384224716283153, + "grad_norm": 3.6920409202575684, + "learning_rate": 1.3383924366501876e-06, + "loss": 1.0281, + "step": 13372 + }, + { + "epoch": 0.8384851714841056, + "grad_norm": 3.623725652694702, + "learning_rate": 1.3373777092549455e-06, + "loss": 0.9887, + "step": 13373 + }, + { + "epoch": 0.838547871339896, + "grad_norm": 3.337749719619751, + "learning_rate": 1.3363633391066234e-06, + "loss": 1.0849, + "step": 13374 + }, + { + "epoch": 0.8386105711956863, + "grad_norm": 3.5267088413238525, + "learning_rate": 1.3353493262470564e-06, + "loss": 1.2554, + "step": 13375 + }, + { + "epoch": 0.8386732710514766, + "grad_norm": 3.3837826251983643, + "learning_rate": 1.3343356707180632e-06, + "loss": 1.0675, + "step": 13376 + }, + { + "epoch": 0.8387359709072669, + "grad_norm": 3.4754045009613037, + "learning_rate": 1.3333223725614442e-06, + "loss": 1.1744, + "step": 13377 + }, + { + "epoch": 0.8387986707630573, + "grad_norm": 3.2801036834716797, + "learning_rate": 1.3323094318189866e-06, + "loss": 1.1525, + "step": 13378 + }, + { + "epoch": 0.8388613706188476, + "grad_norm": 3.428471803665161, + "learning_rate": 1.3312968485324695e-06, + "loss": 1.2029, + "step": 13379 + }, + { + "epoch": 0.8389240704746379, + "grad_norm": 3.158844470977783, + "learning_rate": 1.3302846227436495e-06, + "loss": 1.1694, + "step": 13380 + }, + { + "epoch": 0.8389867703304282, + "grad_norm": 2.8034353256225586, + "learning_rate": 1.3292727544942695e-06, + "loss": 1.192, + "step": 13381 + }, + { + "epoch": 0.8390494701862186, + "grad_norm": 3.3213822841644287, + "learning_rate": 1.3282612438260578e-06, + "loss": 1.0082, + "step": 13382 + }, + { + "epoch": 0.8391121700420089, + "grad_norm": 3.1151435375213623, + "learning_rate": 1.327250090780733e-06, + "loss": 1.2672, + "step": 13383 + }, + { + "epoch": 0.8391748698977992, + "grad_norm": 3.5526580810546875, + "learning_rate": 1.3262392953999925e-06, + "loss": 0.8096, + "step": 13384 + }, + { + "epoch": 0.8392375697535895, + "grad_norm": 3.3752377033233643, + "learning_rate": 1.3252288577255212e-06, + "loss": 1.05, + "step": 13385 + }, + { + "epoch": 0.8393002696093799, + "grad_norm": 3.872076988220215, + "learning_rate": 1.3242187777989924e-06, + "loss": 0.9901, + "step": 13386 + }, + { + "epoch": 0.8393629694651702, + "grad_norm": 3.5714383125305176, + "learning_rate": 1.3232090556620602e-06, + "loss": 0.9267, + "step": 13387 + }, + { + "epoch": 0.8394256693209605, + "grad_norm": 3.2974934577941895, + "learning_rate": 1.322199691356366e-06, + "loss": 1.1149, + "step": 13388 + }, + { + "epoch": 0.8394883691767508, + "grad_norm": 3.3398044109344482, + "learning_rate": 1.3211906849235323e-06, + "loss": 1.0542, + "step": 13389 + }, + { + "epoch": 0.8395510690325412, + "grad_norm": 3.1140787601470947, + "learning_rate": 1.3201820364051764e-06, + "loss": 1.0445, + "step": 13390 + }, + { + "epoch": 0.8396137688883315, + "grad_norm": 3.7032124996185303, + "learning_rate": 1.3191737458428922e-06, + "loss": 0.9937, + "step": 13391 + }, + { + "epoch": 0.8396764687441219, + "grad_norm": 3.368762493133545, + "learning_rate": 1.3181658132782627e-06, + "loss": 1.2069, + "step": 13392 + }, + { + "epoch": 0.8397391685999123, + "grad_norm": 4.019108295440674, + "learning_rate": 1.3171582387528503e-06, + "loss": 1.2137, + "step": 13393 + }, + { + "epoch": 0.8398018684557026, + "grad_norm": 3.145482063293457, + "learning_rate": 1.3161510223082152e-06, + "loss": 1.2162, + "step": 13394 + }, + { + "epoch": 0.8398645683114929, + "grad_norm": 3.24460768699646, + "learning_rate": 1.3151441639858897e-06, + "loss": 0.9011, + "step": 13395 + }, + { + "epoch": 0.8399272681672832, + "grad_norm": 3.0692970752716064, + "learning_rate": 1.3141376638273972e-06, + "loss": 1.0719, + "step": 13396 + }, + { + "epoch": 0.8399899680230736, + "grad_norm": 3.5026493072509766, + "learning_rate": 1.313131521874248e-06, + "loss": 1.1, + "step": 13397 + }, + { + "epoch": 0.8400526678788639, + "grad_norm": 3.2892165184020996, + "learning_rate": 1.3121257381679354e-06, + "loss": 0.9488, + "step": 13398 + }, + { + "epoch": 0.8401153677346542, + "grad_norm": 3.2478995323181152, + "learning_rate": 1.311120312749935e-06, + "loss": 1.178, + "step": 13399 + }, + { + "epoch": 0.8401780675904446, + "grad_norm": 3.311065435409546, + "learning_rate": 1.3101152456617118e-06, + "loss": 1.177, + "step": 13400 + }, + { + "epoch": 0.8402407674462349, + "grad_norm": 3.2611217498779297, + "learning_rate": 1.3091105369447166e-06, + "loss": 1.11, + "step": 13401 + }, + { + "epoch": 0.8403034673020252, + "grad_norm": 3.4320926666259766, + "learning_rate": 1.3081061866403832e-06, + "loss": 1.0506, + "step": 13402 + }, + { + "epoch": 0.8403661671578155, + "grad_norm": 3.3357656002044678, + "learning_rate": 1.3071021947901298e-06, + "loss": 1.123, + "step": 13403 + }, + { + "epoch": 0.8404288670136059, + "grad_norm": 3.2510251998901367, + "learning_rate": 1.3060985614353583e-06, + "loss": 1.0636, + "step": 13404 + }, + { + "epoch": 0.8404915668693962, + "grad_norm": 3.4619998931884766, + "learning_rate": 1.305095286617466e-06, + "loss": 1.0451, + "step": 13405 + }, + { + "epoch": 0.8405542667251865, + "grad_norm": 3.6463911533355713, + "learning_rate": 1.3040923703778218e-06, + "loss": 1.1938, + "step": 13406 + }, + { + "epoch": 0.8406169665809768, + "grad_norm": 3.461421012878418, + "learning_rate": 1.3030898127577874e-06, + "loss": 1.1409, + "step": 13407 + }, + { + "epoch": 0.8406796664367672, + "grad_norm": 3.351900339126587, + "learning_rate": 1.3020876137987104e-06, + "loss": 1.2481, + "step": 13408 + }, + { + "epoch": 0.8407423662925575, + "grad_norm": 3.4947497844696045, + "learning_rate": 1.3010857735419203e-06, + "loss": 1.0262, + "step": 13409 + }, + { + "epoch": 0.8408050661483478, + "grad_norm": 3.490817070007324, + "learning_rate": 1.3000842920287316e-06, + "loss": 1.1188, + "step": 13410 + }, + { + "epoch": 0.8408677660041382, + "grad_norm": 3.6799676418304443, + "learning_rate": 1.2990831693004458e-06, + "loss": 1.0236, + "step": 13411 + }, + { + "epoch": 0.8409304658599285, + "grad_norm": 3.7858047485351562, + "learning_rate": 1.2980824053983521e-06, + "loss": 1.0795, + "step": 13412 + }, + { + "epoch": 0.8409931657157188, + "grad_norm": 3.4659602642059326, + "learning_rate": 1.2970820003637197e-06, + "loss": 1.0903, + "step": 13413 + }, + { + "epoch": 0.8410558655715091, + "grad_norm": 3.694929599761963, + "learning_rate": 1.2960819542378055e-06, + "loss": 1.0613, + "step": 13414 + }, + { + "epoch": 0.8411185654272996, + "grad_norm": 3.001122236251831, + "learning_rate": 1.2950822670618491e-06, + "loss": 1.1899, + "step": 13415 + }, + { + "epoch": 0.8411812652830899, + "grad_norm": 3.602160930633545, + "learning_rate": 1.2940829388770837e-06, + "loss": 1.1972, + "step": 13416 + }, + { + "epoch": 0.8412439651388802, + "grad_norm": 3.5221939086914062, + "learning_rate": 1.2930839697247167e-06, + "loss": 1.0134, + "step": 13417 + }, + { + "epoch": 0.8413066649946705, + "grad_norm": 3.495894432067871, + "learning_rate": 1.2920853596459459e-06, + "loss": 0.9909, + "step": 13418 + }, + { + "epoch": 0.8413693648504609, + "grad_norm": 3.843085289001465, + "learning_rate": 1.2910871086819564e-06, + "loss": 0.9931, + "step": 13419 + }, + { + "epoch": 0.8414320647062512, + "grad_norm": 3.1778430938720703, + "learning_rate": 1.2900892168739154e-06, + "loss": 1.0675, + "step": 13420 + }, + { + "epoch": 0.8414947645620415, + "grad_norm": 3.5328707695007324, + "learning_rate": 1.2890916842629753e-06, + "loss": 1.1742, + "step": 13421 + }, + { + "epoch": 0.8415574644178319, + "grad_norm": 3.5705113410949707, + "learning_rate": 1.288094510890272e-06, + "loss": 1.1186, + "step": 13422 + }, + { + "epoch": 0.8416201642736222, + "grad_norm": 3.5488264560699463, + "learning_rate": 1.287097696796934e-06, + "loss": 0.9644, + "step": 13423 + }, + { + "epoch": 0.8416828641294125, + "grad_norm": 3.2357840538024902, + "learning_rate": 1.2861012420240681e-06, + "loss": 1.1207, + "step": 13424 + }, + { + "epoch": 0.8417455639852028, + "grad_norm": 3.1969456672668457, + "learning_rate": 1.285105146612765e-06, + "loss": 1.1077, + "step": 13425 + }, + { + "epoch": 0.8418082638409932, + "grad_norm": 3.4500327110290527, + "learning_rate": 1.2841094106041085e-06, + "loss": 1.0951, + "step": 13426 + }, + { + "epoch": 0.8418709636967835, + "grad_norm": 3.6537821292877197, + "learning_rate": 1.2831140340391602e-06, + "loss": 0.8374, + "step": 13427 + }, + { + "epoch": 0.8419336635525738, + "grad_norm": 3.418958902359009, + "learning_rate": 1.2821190169589693e-06, + "loss": 1.0195, + "step": 13428 + }, + { + "epoch": 0.8419963634083641, + "grad_norm": 3.1777679920196533, + "learning_rate": 1.2811243594045697e-06, + "loss": 1.0616, + "step": 13429 + }, + { + "epoch": 0.8420590632641545, + "grad_norm": 2.8299999237060547, + "learning_rate": 1.2801300614169843e-06, + "loss": 1.2067, + "step": 13430 + }, + { + "epoch": 0.8421217631199448, + "grad_norm": 3.950122356414795, + "learning_rate": 1.2791361230372146e-06, + "loss": 1.0767, + "step": 13431 + }, + { + "epoch": 0.8421844629757351, + "grad_norm": 3.7406656742095947, + "learning_rate": 1.2781425443062535e-06, + "loss": 1.0971, + "step": 13432 + }, + { + "epoch": 0.8422471628315255, + "grad_norm": 3.219639301300049, + "learning_rate": 1.2771493252650723e-06, + "loss": 0.9736, + "step": 13433 + }, + { + "epoch": 0.8423098626873158, + "grad_norm": 3.5289244651794434, + "learning_rate": 1.2761564659546355e-06, + "loss": 1.1325, + "step": 13434 + }, + { + "epoch": 0.8423725625431061, + "grad_norm": 3.3447577953338623, + "learning_rate": 1.2751639664158877e-06, + "loss": 1.0733, + "step": 13435 + }, + { + "epoch": 0.8424352623988964, + "grad_norm": 3.554914712905884, + "learning_rate": 1.2741718266897562e-06, + "loss": 0.9335, + "step": 13436 + }, + { + "epoch": 0.8424979622546868, + "grad_norm": 3.6889119148254395, + "learning_rate": 1.273180046817163e-06, + "loss": 0.9906, + "step": 13437 + }, + { + "epoch": 0.8425606621104772, + "grad_norm": 3.6357011795043945, + "learning_rate": 1.2721886268390038e-06, + "loss": 1.0089, + "step": 13438 + }, + { + "epoch": 0.8426233619662675, + "grad_norm": 3.004053831100464, + "learning_rate": 1.2711975667961685e-06, + "loss": 1.0768, + "step": 13439 + }, + { + "epoch": 0.8426860618220579, + "grad_norm": 3.720989227294922, + "learning_rate": 1.270206866729523e-06, + "loss": 1.3416, + "step": 13440 + }, + { + "epoch": 0.8427487616778482, + "grad_norm": 3.144658327102661, + "learning_rate": 1.2692165266799316e-06, + "loss": 1.0531, + "step": 13441 + }, + { + "epoch": 0.8428114615336385, + "grad_norm": 3.4811766147613525, + "learning_rate": 1.2682265466882305e-06, + "loss": 1.1278, + "step": 13442 + }, + { + "epoch": 0.8428741613894288, + "grad_norm": 3.387202501296997, + "learning_rate": 1.2672369267952477e-06, + "loss": 1.0642, + "step": 13443 + }, + { + "epoch": 0.8429368612452192, + "grad_norm": 3.8163468837738037, + "learning_rate": 1.2662476670417946e-06, + "loss": 1.1634, + "step": 13444 + }, + { + "epoch": 0.8429995611010095, + "grad_norm": 3.424225091934204, + "learning_rate": 1.2652587674686701e-06, + "loss": 1.1866, + "step": 13445 + }, + { + "epoch": 0.8430622609567998, + "grad_norm": 3.5353078842163086, + "learning_rate": 1.2642702281166562e-06, + "loss": 1.1136, + "step": 13446 + }, + { + "epoch": 0.8431249608125901, + "grad_norm": 3.560689926147461, + "learning_rate": 1.2632820490265175e-06, + "loss": 1.0658, + "step": 13447 + }, + { + "epoch": 0.8431876606683805, + "grad_norm": 3.750596523284912, + "learning_rate": 1.2622942302390107e-06, + "loss": 0.981, + "step": 13448 + }, + { + "epoch": 0.8432503605241708, + "grad_norm": 3.3359458446502686, + "learning_rate": 1.2613067717948701e-06, + "loss": 1.1312, + "step": 13449 + }, + { + "epoch": 0.8433130603799611, + "grad_norm": 3.486032485961914, + "learning_rate": 1.2603196737348211e-06, + "loss": 1.1331, + "step": 13450 + }, + { + "epoch": 0.8433757602357514, + "grad_norm": 3.552624464035034, + "learning_rate": 1.2593329360995687e-06, + "loss": 1.1175, + "step": 13451 + }, + { + "epoch": 0.8434384600915418, + "grad_norm": 3.795327663421631, + "learning_rate": 1.2583465589298095e-06, + "loss": 0.9476, + "step": 13452 + }, + { + "epoch": 0.8435011599473321, + "grad_norm": 3.729647159576416, + "learning_rate": 1.2573605422662193e-06, + "loss": 1.0841, + "step": 13453 + }, + { + "epoch": 0.8435638598031224, + "grad_norm": 3.3679254055023193, + "learning_rate": 1.2563748861494629e-06, + "loss": 1.0469, + "step": 13454 + }, + { + "epoch": 0.8436265596589128, + "grad_norm": 3.6916961669921875, + "learning_rate": 1.2553895906201853e-06, + "loss": 1.0066, + "step": 13455 + }, + { + "epoch": 0.8436892595147031, + "grad_norm": 3.3678061962127686, + "learning_rate": 1.2544046557190247e-06, + "loss": 1.1232, + "step": 13456 + }, + { + "epoch": 0.8437519593704934, + "grad_norm": 3.202120542526245, + "learning_rate": 1.2534200814865993e-06, + "loss": 1.1103, + "step": 13457 + }, + { + "epoch": 0.8438146592262837, + "grad_norm": 3.418064832687378, + "learning_rate": 1.2524358679635085e-06, + "loss": 1.0254, + "step": 13458 + }, + { + "epoch": 0.8438773590820741, + "grad_norm": 3.6095104217529297, + "learning_rate": 1.2514520151903464e-06, + "loss": 1.0461, + "step": 13459 + }, + { + "epoch": 0.8439400589378644, + "grad_norm": 3.1771671772003174, + "learning_rate": 1.2504685232076863e-06, + "loss": 1.2068, + "step": 13460 + }, + { + "epoch": 0.8440027587936548, + "grad_norm": 3.131002426147461, + "learning_rate": 1.2494853920560857e-06, + "loss": 1.0396, + "step": 13461 + }, + { + "epoch": 0.8440654586494452, + "grad_norm": 3.157188653945923, + "learning_rate": 1.2485026217760877e-06, + "loss": 1.1447, + "step": 13462 + }, + { + "epoch": 0.8441281585052355, + "grad_norm": 3.8364787101745605, + "learning_rate": 1.2475202124082264e-06, + "loss": 1.1506, + "step": 13463 + }, + { + "epoch": 0.8441908583610258, + "grad_norm": 3.3655123710632324, + "learning_rate": 1.246538163993013e-06, + "loss": 1.1916, + "step": 13464 + }, + { + "epoch": 0.8442535582168161, + "grad_norm": 3.887214183807373, + "learning_rate": 1.2455564765709483e-06, + "loss": 1.0323, + "step": 13465 + }, + { + "epoch": 0.8443162580726065, + "grad_norm": 3.797322988510132, + "learning_rate": 1.2445751501825142e-06, + "loss": 1.1518, + "step": 13466 + }, + { + "epoch": 0.8443789579283968, + "grad_norm": 3.4308369159698486, + "learning_rate": 1.2435941848681864e-06, + "loss": 1.0494, + "step": 13467 + }, + { + "epoch": 0.8444416577841871, + "grad_norm": 3.4449591636657715, + "learning_rate": 1.2426135806684158e-06, + "loss": 1.1129, + "step": 13468 + }, + { + "epoch": 0.8445043576399774, + "grad_norm": 3.245049238204956, + "learning_rate": 1.2416333376236422e-06, + "loss": 1.1181, + "step": 13469 + }, + { + "epoch": 0.8445670574957678, + "grad_norm": 3.6389358043670654, + "learning_rate": 1.2406534557742945e-06, + "loss": 0.9691, + "step": 13470 + }, + { + "epoch": 0.8446297573515581, + "grad_norm": 3.8738508224487305, + "learning_rate": 1.23967393516078e-06, + "loss": 1.0349, + "step": 13471 + }, + { + "epoch": 0.8446924572073484, + "grad_norm": 4.033622741699219, + "learning_rate": 1.2386947758234957e-06, + "loss": 1.1136, + "step": 13472 + }, + { + "epoch": 0.8447551570631388, + "grad_norm": 3.2247016429901123, + "learning_rate": 1.237715977802818e-06, + "loss": 1.1021, + "step": 13473 + }, + { + "epoch": 0.8448178569189291, + "grad_norm": 3.3017239570617676, + "learning_rate": 1.236737541139119e-06, + "loss": 0.9192, + "step": 13474 + }, + { + "epoch": 0.8448805567747194, + "grad_norm": 3.3942148685455322, + "learning_rate": 1.2357594658727467e-06, + "loss": 1.2334, + "step": 13475 + }, + { + "epoch": 0.8449432566305097, + "grad_norm": 3.084684371948242, + "learning_rate": 1.2347817520440376e-06, + "loss": 1.1223, + "step": 13476 + }, + { + "epoch": 0.8450059564863001, + "grad_norm": 3.3355166912078857, + "learning_rate": 1.233804399693308e-06, + "loss": 1.2577, + "step": 13477 + }, + { + "epoch": 0.8450686563420904, + "grad_norm": 3.0686938762664795, + "learning_rate": 1.2328274088608715e-06, + "loss": 1.0417, + "step": 13478 + }, + { + "epoch": 0.8451313561978807, + "grad_norm": 2.887687921524048, + "learning_rate": 1.2318507795870138e-06, + "loss": 1.0618, + "step": 13479 + }, + { + "epoch": 0.845194056053671, + "grad_norm": 3.4492321014404297, + "learning_rate": 1.2308745119120126e-06, + "loss": 1.0619, + "step": 13480 + }, + { + "epoch": 0.8452567559094614, + "grad_norm": 3.5162999629974365, + "learning_rate": 1.2298986058761298e-06, + "loss": 0.9872, + "step": 13481 + }, + { + "epoch": 0.8453194557652517, + "grad_norm": 3.2640509605407715, + "learning_rate": 1.2289230615196125e-06, + "loss": 1.0035, + "step": 13482 + }, + { + "epoch": 0.845382155621042, + "grad_norm": 3.418166399002075, + "learning_rate": 1.2279478788826915e-06, + "loss": 0.9205, + "step": 13483 + }, + { + "epoch": 0.8454448554768325, + "grad_norm": 3.6238138675689697, + "learning_rate": 1.2269730580055806e-06, + "loss": 1.0463, + "step": 13484 + }, + { + "epoch": 0.8455075553326228, + "grad_norm": 3.6774351596832275, + "learning_rate": 1.2259985989284851e-06, + "loss": 1.1699, + "step": 13485 + }, + { + "epoch": 0.8455702551884131, + "grad_norm": 3.4256439208984375, + "learning_rate": 1.2250245016915918e-06, + "loss": 1.0337, + "step": 13486 + }, + { + "epoch": 0.8456329550442034, + "grad_norm": 3.7605090141296387, + "learning_rate": 1.2240507663350686e-06, + "loss": 1.2285, + "step": 13487 + }, + { + "epoch": 0.8456956548999938, + "grad_norm": 3.3517277240753174, + "learning_rate": 1.223077392899077e-06, + "loss": 1.1357, + "step": 13488 + }, + { + "epoch": 0.8457583547557841, + "grad_norm": 3.4138948917388916, + "learning_rate": 1.2221043814237566e-06, + "loss": 1.0757, + "step": 13489 + }, + { + "epoch": 0.8458210546115744, + "grad_norm": 3.5317680835723877, + "learning_rate": 1.2211317319492356e-06, + "loss": 1.0648, + "step": 13490 + }, + { + "epoch": 0.8458837544673647, + "grad_norm": 3.3888251781463623, + "learning_rate": 1.2201594445156229e-06, + "loss": 1.0545, + "step": 13491 + }, + { + "epoch": 0.8459464543231551, + "grad_norm": 3.083268165588379, + "learning_rate": 1.2191875191630209e-06, + "loss": 1.1115, + "step": 13492 + }, + { + "epoch": 0.8460091541789454, + "grad_norm": 3.1716277599334717, + "learning_rate": 1.2182159559315088e-06, + "loss": 1.0927, + "step": 13493 + }, + { + "epoch": 0.8460718540347357, + "grad_norm": 3.1086997985839844, + "learning_rate": 1.2172447548611533e-06, + "loss": 1.0148, + "step": 13494 + }, + { + "epoch": 0.846134553890526, + "grad_norm": 3.2710347175598145, + "learning_rate": 1.2162739159920067e-06, + "loss": 1.1379, + "step": 13495 + }, + { + "epoch": 0.8461972537463164, + "grad_norm": 3.3502354621887207, + "learning_rate": 1.2153034393641095e-06, + "loss": 1.0735, + "step": 13496 + }, + { + "epoch": 0.8462599536021067, + "grad_norm": 3.3188838958740234, + "learning_rate": 1.2143333250174805e-06, + "loss": 1.0175, + "step": 13497 + }, + { + "epoch": 0.846322653457897, + "grad_norm": 3.1786999702453613, + "learning_rate": 1.2133635729921279e-06, + "loss": 1.2302, + "step": 13498 + }, + { + "epoch": 0.8463853533136874, + "grad_norm": 3.3311686515808105, + "learning_rate": 1.2123941833280472e-06, + "loss": 1.0624, + "step": 13499 + }, + { + "epoch": 0.8464480531694777, + "grad_norm": 3.4236879348754883, + "learning_rate": 1.2114251560652147e-06, + "loss": 1.0484, + "step": 13500 + }, + { + "epoch": 0.846510753025268, + "grad_norm": 3.361518383026123, + "learning_rate": 1.2104564912435924e-06, + "loss": 1.2181, + "step": 13501 + }, + { + "epoch": 0.8465734528810583, + "grad_norm": 3.258970022201538, + "learning_rate": 1.2094881889031251e-06, + "loss": 1.0087, + "step": 13502 + }, + { + "epoch": 0.8466361527368487, + "grad_norm": 3.205911159515381, + "learning_rate": 1.208520249083752e-06, + "loss": 1.1042, + "step": 13503 + }, + { + "epoch": 0.846698852592639, + "grad_norm": 3.9434213638305664, + "learning_rate": 1.207552671825387e-06, + "loss": 1.0997, + "step": 13504 + }, + { + "epoch": 0.8467615524484293, + "grad_norm": 3.439788579940796, + "learning_rate": 1.2065854571679348e-06, + "loss": 1.0845, + "step": 13505 + }, + { + "epoch": 0.8468242523042196, + "grad_norm": 3.7327585220336914, + "learning_rate": 1.2056186051512796e-06, + "loss": 1.0354, + "step": 13506 + }, + { + "epoch": 0.8468869521600101, + "grad_norm": 3.851933240890503, + "learning_rate": 1.204652115815299e-06, + "loss": 0.9699, + "step": 13507 + }, + { + "epoch": 0.8469496520158004, + "grad_norm": 3.509438991546631, + "learning_rate": 1.2036859891998497e-06, + "loss": 1.0602, + "step": 13508 + }, + { + "epoch": 0.8470123518715907, + "grad_norm": 3.381925344467163, + "learning_rate": 1.2027202253447722e-06, + "loss": 1.1397, + "step": 13509 + }, + { + "epoch": 0.8470750517273811, + "grad_norm": 3.4986088275909424, + "learning_rate": 1.201754824289898e-06, + "loss": 1.0376, + "step": 13510 + }, + { + "epoch": 0.8471377515831714, + "grad_norm": 3.5536692142486572, + "learning_rate": 1.2007897860750406e-06, + "loss": 1.0129, + "step": 13511 + }, + { + "epoch": 0.8472004514389617, + "grad_norm": 3.500058889389038, + "learning_rate": 1.1998251107399961e-06, + "loss": 0.937, + "step": 13512 + }, + { + "epoch": 0.847263151294752, + "grad_norm": 3.726627826690674, + "learning_rate": 1.198860798324546e-06, + "loss": 0.8937, + "step": 13513 + }, + { + "epoch": 0.8473258511505424, + "grad_norm": 3.6824560165405273, + "learning_rate": 1.1978968488684629e-06, + "loss": 1.132, + "step": 13514 + }, + { + "epoch": 0.8473885510063327, + "grad_norm": 3.3834521770477295, + "learning_rate": 1.1969332624114982e-06, + "loss": 0.977, + "step": 13515 + }, + { + "epoch": 0.847451250862123, + "grad_norm": 3.2899413108825684, + "learning_rate": 1.1959700389933903e-06, + "loss": 1.1633, + "step": 13516 + }, + { + "epoch": 0.8475139507179134, + "grad_norm": 3.2308285236358643, + "learning_rate": 1.1950071786538597e-06, + "loss": 1.2131, + "step": 13517 + }, + { + "epoch": 0.8475766505737037, + "grad_norm": 3.5241799354553223, + "learning_rate": 1.19404468143262e-06, + "loss": 1.0961, + "step": 13518 + }, + { + "epoch": 0.847639350429494, + "grad_norm": 3.2915091514587402, + "learning_rate": 1.193082547369363e-06, + "loss": 1.0416, + "step": 13519 + }, + { + "epoch": 0.8477020502852843, + "grad_norm": 3.273554563522339, + "learning_rate": 1.1921207765037635e-06, + "loss": 1.1097, + "step": 13520 + }, + { + "epoch": 0.8477647501410747, + "grad_norm": 3.61418080329895, + "learning_rate": 1.1911593688754908e-06, + "loss": 0.9362, + "step": 13521 + }, + { + "epoch": 0.847827449996865, + "grad_norm": 3.528073310852051, + "learning_rate": 1.1901983245241898e-06, + "loss": 1.075, + "step": 13522 + }, + { + "epoch": 0.8478901498526553, + "grad_norm": 3.2003517150878906, + "learning_rate": 1.1892376434894947e-06, + "loss": 1.2905, + "step": 13523 + }, + { + "epoch": 0.8479528497084456, + "grad_norm": 3.8646275997161865, + "learning_rate": 1.1882773258110215e-06, + "loss": 1.1147, + "step": 13524 + }, + { + "epoch": 0.848015549564236, + "grad_norm": 3.031716823577881, + "learning_rate": 1.1873173715283782e-06, + "loss": 1.1636, + "step": 13525 + }, + { + "epoch": 0.8480782494200263, + "grad_norm": 3.3138980865478516, + "learning_rate": 1.186357780681152e-06, + "loss": 0.9662, + "step": 13526 + }, + { + "epoch": 0.8481409492758166, + "grad_norm": 3.6204802989959717, + "learning_rate": 1.185398553308915e-06, + "loss": 1.2336, + "step": 13527 + }, + { + "epoch": 0.848203649131607, + "grad_norm": 3.364980459213257, + "learning_rate": 1.1844396894512256e-06, + "loss": 0.9259, + "step": 13528 + }, + { + "epoch": 0.8482663489873973, + "grad_norm": 3.6717379093170166, + "learning_rate": 1.1834811891476294e-06, + "loss": 1.0956, + "step": 13529 + }, + { + "epoch": 0.8483290488431876, + "grad_norm": 3.5894439220428467, + "learning_rate": 1.1825230524376552e-06, + "loss": 1.0493, + "step": 13530 + }, + { + "epoch": 0.848391748698978, + "grad_norm": 3.545527219772339, + "learning_rate": 1.1815652793608123e-06, + "loss": 1.0783, + "step": 13531 + }, + { + "epoch": 0.8484544485547684, + "grad_norm": 3.4386026859283447, + "learning_rate": 1.1806078699566047e-06, + "loss": 1.0266, + "step": 13532 + }, + { + "epoch": 0.8485171484105587, + "grad_norm": 3.3115482330322266, + "learning_rate": 1.1796508242645132e-06, + "loss": 1.1398, + "step": 13533 + }, + { + "epoch": 0.848579848266349, + "grad_norm": 3.572204828262329, + "learning_rate": 1.1786941423240072e-06, + "loss": 0.9747, + "step": 13534 + }, + { + "epoch": 0.8486425481221394, + "grad_norm": 3.4648923873901367, + "learning_rate": 1.1777378241745385e-06, + "loss": 0.8731, + "step": 13535 + }, + { + "epoch": 0.8487052479779297, + "grad_norm": 3.4845685958862305, + "learning_rate": 1.176781869855549e-06, + "loss": 1.0702, + "step": 13536 + }, + { + "epoch": 0.84876794783372, + "grad_norm": 3.600534677505493, + "learning_rate": 1.1758262794064602e-06, + "loss": 0.9588, + "step": 13537 + }, + { + "epoch": 0.8488306476895103, + "grad_norm": 3.0702154636383057, + "learning_rate": 1.1748710528666797e-06, + "loss": 1.1891, + "step": 13538 + }, + { + "epoch": 0.8488933475453007, + "grad_norm": 3.2942216396331787, + "learning_rate": 1.1739161902756035e-06, + "loss": 1.1144, + "step": 13539 + }, + { + "epoch": 0.848956047401091, + "grad_norm": 3.301848888397217, + "learning_rate": 1.1729616916726095e-06, + "loss": 1.1296, + "step": 13540 + }, + { + "epoch": 0.8490187472568813, + "grad_norm": 3.731682777404785, + "learning_rate": 1.1720075570970602e-06, + "loss": 0.9977, + "step": 13541 + }, + { + "epoch": 0.8490814471126716, + "grad_norm": 3.1445934772491455, + "learning_rate": 1.171053786588303e-06, + "loss": 1.1146, + "step": 13542 + }, + { + "epoch": 0.849144146968462, + "grad_norm": 3.84891939163208, + "learning_rate": 1.170100380185676e-06, + "loss": 0.9724, + "step": 13543 + }, + { + "epoch": 0.8492068468242523, + "grad_norm": 3.0550239086151123, + "learning_rate": 1.1691473379284945e-06, + "loss": 1.0056, + "step": 13544 + }, + { + "epoch": 0.8492695466800426, + "grad_norm": 3.24735951423645, + "learning_rate": 1.1681946598560622e-06, + "loss": 1.0663, + "step": 13545 + }, + { + "epoch": 0.849332246535833, + "grad_norm": 3.351762056350708, + "learning_rate": 1.1672423460076666e-06, + "loss": 1.1366, + "step": 13546 + }, + { + "epoch": 0.8493949463916233, + "grad_norm": 3.395066261291504, + "learning_rate": 1.1662903964225835e-06, + "loss": 1.0404, + "step": 13547 + }, + { + "epoch": 0.8494576462474136, + "grad_norm": 3.1539456844329834, + "learning_rate": 1.1653388111400698e-06, + "loss": 1.0598, + "step": 13548 + }, + { + "epoch": 0.8495203461032039, + "grad_norm": 3.5213706493377686, + "learning_rate": 1.1643875901993684e-06, + "loss": 1.1569, + "step": 13549 + }, + { + "epoch": 0.8495830459589943, + "grad_norm": 3.4661550521850586, + "learning_rate": 1.16343673363971e-06, + "loss": 0.8953, + "step": 13550 + }, + { + "epoch": 0.8496457458147846, + "grad_norm": 3.2688944339752197, + "learning_rate": 1.1624862415003068e-06, + "loss": 1.067, + "step": 13551 + }, + { + "epoch": 0.8497084456705749, + "grad_norm": 3.344322681427002, + "learning_rate": 1.1615361138203574e-06, + "loss": 1.0283, + "step": 13552 + }, + { + "epoch": 0.8497711455263652, + "grad_norm": 3.6259334087371826, + "learning_rate": 1.1605863506390415e-06, + "loss": 0.9344, + "step": 13553 + }, + { + "epoch": 0.8498338453821557, + "grad_norm": 3.553664445877075, + "learning_rate": 1.159636951995533e-06, + "loss": 1.0229, + "step": 13554 + }, + { + "epoch": 0.849896545237946, + "grad_norm": 3.229273557662964, + "learning_rate": 1.1586879179289833e-06, + "loss": 1.0239, + "step": 13555 + }, + { + "epoch": 0.8499592450937363, + "grad_norm": 3.5853707790374756, + "learning_rate": 1.15773924847853e-06, + "loss": 1.1899, + "step": 13556 + }, + { + "epoch": 0.8500219449495267, + "grad_norm": 3.4443557262420654, + "learning_rate": 1.1567909436832936e-06, + "loss": 1.1634, + "step": 13557 + }, + { + "epoch": 0.850084644805317, + "grad_norm": 3.485764741897583, + "learning_rate": 1.1558430035823864e-06, + "loss": 1.0232, + "step": 13558 + }, + { + "epoch": 0.8501473446611073, + "grad_norm": 3.3867173194885254, + "learning_rate": 1.1548954282149006e-06, + "loss": 1.1982, + "step": 13559 + }, + { + "epoch": 0.8502100445168976, + "grad_norm": 3.2759459018707275, + "learning_rate": 1.1539482176199125e-06, + "loss": 0.9977, + "step": 13560 + }, + { + "epoch": 0.850272744372688, + "grad_norm": 3.1620047092437744, + "learning_rate": 1.1530013718364863e-06, + "loss": 1.0425, + "step": 13561 + }, + { + "epoch": 0.8503354442284783, + "grad_norm": 3.106139898300171, + "learning_rate": 1.152054890903671e-06, + "loss": 1.1975, + "step": 13562 + }, + { + "epoch": 0.8503981440842686, + "grad_norm": 3.243324041366577, + "learning_rate": 1.1511087748604988e-06, + "loss": 1.1879, + "step": 13563 + }, + { + "epoch": 0.8504608439400589, + "grad_norm": 3.3989481925964355, + "learning_rate": 1.150163023745985e-06, + "loss": 1.0602, + "step": 13564 + }, + { + "epoch": 0.8505235437958493, + "grad_norm": 3.448991298675537, + "learning_rate": 1.149217637599136e-06, + "loss": 0.9605, + "step": 13565 + }, + { + "epoch": 0.8505862436516396, + "grad_norm": 3.4032065868377686, + "learning_rate": 1.1482726164589386e-06, + "loss": 1.2023, + "step": 13566 + }, + { + "epoch": 0.8506489435074299, + "grad_norm": 3.8480136394500732, + "learning_rate": 1.147327960364364e-06, + "loss": 1.0753, + "step": 13567 + }, + { + "epoch": 0.8507116433632202, + "grad_norm": 3.5972464084625244, + "learning_rate": 1.1463836693543695e-06, + "loss": 1.0117, + "step": 13568 + }, + { + "epoch": 0.8507743432190106, + "grad_norm": 3.244304895401001, + "learning_rate": 1.1454397434679022e-06, + "loss": 1.2683, + "step": 13569 + }, + { + "epoch": 0.8508370430748009, + "grad_norm": 3.5429015159606934, + "learning_rate": 1.144496182743885e-06, + "loss": 1.1549, + "step": 13570 + }, + { + "epoch": 0.8508997429305912, + "grad_norm": 3.6003761291503906, + "learning_rate": 1.1435529872212304e-06, + "loss": 1.1495, + "step": 13571 + }, + { + "epoch": 0.8509624427863816, + "grad_norm": 3.145786762237549, + "learning_rate": 1.1426101569388404e-06, + "loss": 0.9048, + "step": 13572 + }, + { + "epoch": 0.8510251426421719, + "grad_norm": 3.016399621963501, + "learning_rate": 1.1416676919355928e-06, + "loss": 1.0946, + "step": 13573 + }, + { + "epoch": 0.8510878424979622, + "grad_norm": 3.0432496070861816, + "learning_rate": 1.140725592250358e-06, + "loss": 1.1039, + "step": 13574 + }, + { + "epoch": 0.8511505423537525, + "grad_norm": 3.51210618019104, + "learning_rate": 1.1397838579219834e-06, + "loss": 1.0917, + "step": 13575 + }, + { + "epoch": 0.8512132422095429, + "grad_norm": 3.181440830230713, + "learning_rate": 1.1388424889893114e-06, + "loss": 1.158, + "step": 13576 + }, + { + "epoch": 0.8512759420653333, + "grad_norm": 3.387627601623535, + "learning_rate": 1.137901485491163e-06, + "loss": 1.1804, + "step": 13577 + }, + { + "epoch": 0.8513386419211236, + "grad_norm": 3.520216464996338, + "learning_rate": 1.136960847466344e-06, + "loss": 1.1797, + "step": 13578 + }, + { + "epoch": 0.851401341776914, + "grad_norm": 3.1369664669036865, + "learning_rate": 1.1360205749536446e-06, + "loss": 1.0275, + "step": 13579 + }, + { + "epoch": 0.8514640416327043, + "grad_norm": 3.632263660430908, + "learning_rate": 1.1350806679918457e-06, + "loss": 0.9404, + "step": 13580 + }, + { + "epoch": 0.8515267414884946, + "grad_norm": 3.1416280269622803, + "learning_rate": 1.1341411266197077e-06, + "loss": 1.1901, + "step": 13581 + }, + { + "epoch": 0.8515894413442849, + "grad_norm": 3.19169545173645, + "learning_rate": 1.133201950875974e-06, + "loss": 1.1006, + "step": 13582 + }, + { + "epoch": 0.8516521412000753, + "grad_norm": 3.18283748626709, + "learning_rate": 1.132263140799381e-06, + "loss": 1.0776, + "step": 13583 + }, + { + "epoch": 0.8517148410558656, + "grad_norm": 3.5694870948791504, + "learning_rate": 1.1313246964286439e-06, + "loss": 1.1784, + "step": 13584 + }, + { + "epoch": 0.8517775409116559, + "grad_norm": 3.430736780166626, + "learning_rate": 1.1303866178024636e-06, + "loss": 1.0914, + "step": 13585 + }, + { + "epoch": 0.8518402407674462, + "grad_norm": 3.5326530933380127, + "learning_rate": 1.1294489049595247e-06, + "loss": 0.8467, + "step": 13586 + }, + { + "epoch": 0.8519029406232366, + "grad_norm": 3.3532912731170654, + "learning_rate": 1.1285115579385019e-06, + "loss": 1.0925, + "step": 13587 + }, + { + "epoch": 0.8519656404790269, + "grad_norm": 3.7469217777252197, + "learning_rate": 1.1275745767780499e-06, + "loss": 1.1671, + "step": 13588 + }, + { + "epoch": 0.8520283403348172, + "grad_norm": 3.35113263130188, + "learning_rate": 1.1266379615168077e-06, + "loss": 1.0087, + "step": 13589 + }, + { + "epoch": 0.8520910401906076, + "grad_norm": 3.3649024963378906, + "learning_rate": 1.1257017121934055e-06, + "loss": 1.1349, + "step": 13590 + }, + { + "epoch": 0.8521537400463979, + "grad_norm": 3.3302204608917236, + "learning_rate": 1.1247658288464525e-06, + "loss": 1.1062, + "step": 13591 + }, + { + "epoch": 0.8522164399021882, + "grad_norm": 3.3517746925354004, + "learning_rate": 1.1238303115145444e-06, + "loss": 1.0169, + "step": 13592 + }, + { + "epoch": 0.8522791397579785, + "grad_norm": 3.2464964389801025, + "learning_rate": 1.1228951602362602e-06, + "loss": 1.0722, + "step": 13593 + }, + { + "epoch": 0.8523418396137689, + "grad_norm": 2.8282039165496826, + "learning_rate": 1.1219603750501684e-06, + "loss": 1.0162, + "step": 13594 + }, + { + "epoch": 0.8524045394695592, + "grad_norm": 3.13228178024292, + "learning_rate": 1.121025955994819e-06, + "loss": 1.0409, + "step": 13595 + }, + { + "epoch": 0.8524672393253495, + "grad_norm": 3.2355518341064453, + "learning_rate": 1.1200919031087466e-06, + "loss": 0.9864, + "step": 13596 + }, + { + "epoch": 0.8525299391811398, + "grad_norm": 3.6912288665771484, + "learning_rate": 1.1191582164304705e-06, + "loss": 1.0939, + "step": 13597 + }, + { + "epoch": 0.8525926390369302, + "grad_norm": 3.963895559310913, + "learning_rate": 1.1182248959984987e-06, + "loss": 1.0682, + "step": 13598 + }, + { + "epoch": 0.8526553388927205, + "grad_norm": 3.5924079418182373, + "learning_rate": 1.1172919418513195e-06, + "loss": 0.9767, + "step": 13599 + }, + { + "epoch": 0.8527180387485109, + "grad_norm": 3.4615471363067627, + "learning_rate": 1.1163593540274076e-06, + "loss": 1.0326, + "step": 13600 + }, + { + "epoch": 0.8527807386043013, + "grad_norm": 3.1490042209625244, + "learning_rate": 1.1154271325652254e-06, + "loss": 1.0735, + "step": 13601 + }, + { + "epoch": 0.8528434384600916, + "grad_norm": 3.3210906982421875, + "learning_rate": 1.114495277503217e-06, + "loss": 1.0508, + "step": 13602 + }, + { + "epoch": 0.8529061383158819, + "grad_norm": 3.1281938552856445, + "learning_rate": 1.1135637888798101e-06, + "loss": 1.0018, + "step": 13603 + }, + { + "epoch": 0.8529688381716722, + "grad_norm": 3.3324930667877197, + "learning_rate": 1.1126326667334196e-06, + "loss": 1.1365, + "step": 13604 + }, + { + "epoch": 0.8530315380274626, + "grad_norm": 3.568819999694824, + "learning_rate": 1.1117019111024475e-06, + "loss": 1.1881, + "step": 13605 + }, + { + "epoch": 0.8530942378832529, + "grad_norm": 3.460297107696533, + "learning_rate": 1.1107715220252778e-06, + "loss": 1.0661, + "step": 13606 + }, + { + "epoch": 0.8531569377390432, + "grad_norm": 3.2389543056488037, + "learning_rate": 1.1098414995402772e-06, + "loss": 1.0762, + "step": 13607 + }, + { + "epoch": 0.8532196375948335, + "grad_norm": 3.580777883529663, + "learning_rate": 1.1089118436858004e-06, + "loss": 1.0938, + "step": 13608 + }, + { + "epoch": 0.8532823374506239, + "grad_norm": 3.2722816467285156, + "learning_rate": 1.1079825545001887e-06, + "loss": 1.1373, + "step": 13609 + }, + { + "epoch": 0.8533450373064142, + "grad_norm": 3.4568471908569336, + "learning_rate": 1.107053632021766e-06, + "loss": 1.1722, + "step": 13610 + }, + { + "epoch": 0.8534077371622045, + "grad_norm": 3.367684841156006, + "learning_rate": 1.1061250762888365e-06, + "loss": 1.1119, + "step": 13611 + }, + { + "epoch": 0.8534704370179949, + "grad_norm": 3.24933123588562, + "learning_rate": 1.1051968873397e-06, + "loss": 1.115, + "step": 13612 + }, + { + "epoch": 0.8535331368737852, + "grad_norm": 3.456411600112915, + "learning_rate": 1.104269065212632e-06, + "loss": 1.0957, + "step": 13613 + }, + { + "epoch": 0.8535958367295755, + "grad_norm": 3.258037805557251, + "learning_rate": 1.103341609945897e-06, + "loss": 1.117, + "step": 13614 + }, + { + "epoch": 0.8536585365853658, + "grad_norm": 3.0551114082336426, + "learning_rate": 1.1024145215777392e-06, + "loss": 1.0579, + "step": 13615 + }, + { + "epoch": 0.8537212364411562, + "grad_norm": 3.546170711517334, + "learning_rate": 1.101487800146398e-06, + "loss": 0.9856, + "step": 13616 + }, + { + "epoch": 0.8537839362969465, + "grad_norm": 3.702685594558716, + "learning_rate": 1.1005614456900882e-06, + "loss": 1.0267, + "step": 13617 + }, + { + "epoch": 0.8538466361527368, + "grad_norm": 3.547483205795288, + "learning_rate": 1.0996354582470138e-06, + "loss": 1.2059, + "step": 13618 + }, + { + "epoch": 0.8539093360085271, + "grad_norm": 3.3782873153686523, + "learning_rate": 1.0987098378553584e-06, + "loss": 1.1207, + "step": 13619 + }, + { + "epoch": 0.8539720358643175, + "grad_norm": 3.4657797813415527, + "learning_rate": 1.0977845845533009e-06, + "loss": 1.1268, + "step": 13620 + }, + { + "epoch": 0.8540347357201078, + "grad_norm": 3.358482599258423, + "learning_rate": 1.096859698378996e-06, + "loss": 0.966, + "step": 13621 + }, + { + "epoch": 0.8540974355758981, + "grad_norm": 3.8153350353240967, + "learning_rate": 1.095935179370584e-06, + "loss": 0.9485, + "step": 13622 + }, + { + "epoch": 0.8541601354316886, + "grad_norm": 3.5573718547821045, + "learning_rate": 1.0950110275661951e-06, + "loss": 1.0789, + "step": 13623 + }, + { + "epoch": 0.8542228352874789, + "grad_norm": 2.8328044414520264, + "learning_rate": 1.0940872430039406e-06, + "loss": 1.2376, + "step": 13624 + }, + { + "epoch": 0.8542855351432692, + "grad_norm": 3.4081976413726807, + "learning_rate": 1.0931638257219179e-06, + "loss": 1.1145, + "step": 13625 + }, + { + "epoch": 0.8543482349990595, + "grad_norm": 3.68703293800354, + "learning_rate": 1.0922407757582053e-06, + "loss": 1.0678, + "step": 13626 + }, + { + "epoch": 0.8544109348548499, + "grad_norm": 3.5256240367889404, + "learning_rate": 1.091318093150875e-06, + "loss": 1.1846, + "step": 13627 + }, + { + "epoch": 0.8544736347106402, + "grad_norm": 3.438997983932495, + "learning_rate": 1.0903957779379749e-06, + "loss": 0.9804, + "step": 13628 + }, + { + "epoch": 0.8545363345664305, + "grad_norm": 2.925727128982544, + "learning_rate": 1.0894738301575414e-06, + "loss": 1.1868, + "step": 13629 + }, + { + "epoch": 0.8545990344222208, + "grad_norm": 3.081526041030884, + "learning_rate": 1.0885522498475953e-06, + "loss": 1.1882, + "step": 13630 + }, + { + "epoch": 0.8546617342780112, + "grad_norm": 3.2109627723693848, + "learning_rate": 1.087631037046145e-06, + "loss": 1.2117, + "step": 13631 + }, + { + "epoch": 0.8547244341338015, + "grad_norm": 3.435896158218384, + "learning_rate": 1.0867101917911804e-06, + "loss": 1.1206, + "step": 13632 + }, + { + "epoch": 0.8547871339895918, + "grad_norm": 3.5571744441986084, + "learning_rate": 1.085789714120674e-06, + "loss": 1.0988, + "step": 13633 + }, + { + "epoch": 0.8548498338453822, + "grad_norm": 3.483684778213501, + "learning_rate": 1.0848696040725914e-06, + "loss": 0.9828, + "step": 13634 + }, + { + "epoch": 0.8549125337011725, + "grad_norm": 3.2450759410858154, + "learning_rate": 1.0839498616848753e-06, + "loss": 1.0466, + "step": 13635 + }, + { + "epoch": 0.8549752335569628, + "grad_norm": 3.4936370849609375, + "learning_rate": 1.0830304869954578e-06, + "loss": 1.1426, + "step": 13636 + }, + { + "epoch": 0.8550379334127531, + "grad_norm": 3.424017906188965, + "learning_rate": 1.0821114800422482e-06, + "loss": 0.9573, + "step": 13637 + }, + { + "epoch": 0.8551006332685435, + "grad_norm": 3.586397647857666, + "learning_rate": 1.0811928408631545e-06, + "loss": 1.0463, + "step": 13638 + }, + { + "epoch": 0.8551633331243338, + "grad_norm": 3.5472350120544434, + "learning_rate": 1.080274569496057e-06, + "loss": 0.9562, + "step": 13639 + }, + { + "epoch": 0.8552260329801241, + "grad_norm": 3.1613757610321045, + "learning_rate": 1.0793566659788258e-06, + "loss": 1.0644, + "step": 13640 + }, + { + "epoch": 0.8552887328359144, + "grad_norm": 3.984281063079834, + "learning_rate": 1.0784391303493126e-06, + "loss": 0.9296, + "step": 13641 + }, + { + "epoch": 0.8553514326917048, + "grad_norm": 3.451045036315918, + "learning_rate": 1.0775219626453615e-06, + "loss": 1.214, + "step": 13642 + }, + { + "epoch": 0.8554141325474951, + "grad_norm": 3.2754335403442383, + "learning_rate": 1.0766051629047947e-06, + "loss": 0.959, + "step": 13643 + }, + { + "epoch": 0.8554768324032854, + "grad_norm": 3.334829568862915, + "learning_rate": 1.0756887311654186e-06, + "loss": 0.9327, + "step": 13644 + }, + { + "epoch": 0.8555395322590758, + "grad_norm": 3.1537528038024902, + "learning_rate": 1.0747726674650305e-06, + "loss": 1.0407, + "step": 13645 + }, + { + "epoch": 0.8556022321148662, + "grad_norm": 3.2891738414764404, + "learning_rate": 1.0738569718414083e-06, + "loss": 1.0425, + "step": 13646 + }, + { + "epoch": 0.8556649319706565, + "grad_norm": 3.3994016647338867, + "learning_rate": 1.0729416443323126e-06, + "loss": 1.0956, + "step": 13647 + }, + { + "epoch": 0.8557276318264468, + "grad_norm": 3.0727477073669434, + "learning_rate": 1.0720266849754934e-06, + "loss": 1.1461, + "step": 13648 + }, + { + "epoch": 0.8557903316822372, + "grad_norm": 3.5789480209350586, + "learning_rate": 1.0711120938086839e-06, + "loss": 0.9519, + "step": 13649 + }, + { + "epoch": 0.8558530315380275, + "grad_norm": 3.4807188510894775, + "learning_rate": 1.0701978708696026e-06, + "loss": 1.008, + "step": 13650 + }, + { + "epoch": 0.8559157313938178, + "grad_norm": 3.4193496704101562, + "learning_rate": 1.0692840161959495e-06, + "loss": 1.0406, + "step": 13651 + }, + { + "epoch": 0.8559784312496082, + "grad_norm": 3.565535306930542, + "learning_rate": 1.0683705298254143e-06, + "loss": 1.1327, + "step": 13652 + }, + { + "epoch": 0.8560411311053985, + "grad_norm": 3.554755926132202, + "learning_rate": 1.0674574117956692e-06, + "loss": 1.1301, + "step": 13653 + }, + { + "epoch": 0.8561038309611888, + "grad_norm": 3.984966993331909, + "learning_rate": 1.066544662144371e-06, + "loss": 1.1786, + "step": 13654 + }, + { + "epoch": 0.8561665308169791, + "grad_norm": 3.0376553535461426, + "learning_rate": 1.065632280909158e-06, + "loss": 1.0799, + "step": 13655 + }, + { + "epoch": 0.8562292306727695, + "grad_norm": 3.458869218826294, + "learning_rate": 1.0647202681276625e-06, + "loss": 1.0271, + "step": 13656 + }, + { + "epoch": 0.8562919305285598, + "grad_norm": 3.572965621948242, + "learning_rate": 1.0638086238374934e-06, + "loss": 1.0517, + "step": 13657 + }, + { + "epoch": 0.8563546303843501, + "grad_norm": 3.352595806121826, + "learning_rate": 1.062897348076246e-06, + "loss": 1.091, + "step": 13658 + }, + { + "epoch": 0.8564173302401404, + "grad_norm": 3.4262402057647705, + "learning_rate": 1.0619864408815018e-06, + "loss": 1.2063, + "step": 13659 + }, + { + "epoch": 0.8564800300959308, + "grad_norm": 3.253185749053955, + "learning_rate": 1.0610759022908278e-06, + "loss": 1.2555, + "step": 13660 + }, + { + "epoch": 0.8565427299517211, + "grad_norm": 3.4020888805389404, + "learning_rate": 1.0601657323417746e-06, + "loss": 1.1185, + "step": 13661 + }, + { + "epoch": 0.8566054298075114, + "grad_norm": 3.5303783416748047, + "learning_rate": 1.0592559310718753e-06, + "loss": 0.9973, + "step": 13662 + }, + { + "epoch": 0.8566681296633017, + "grad_norm": 2.857412099838257, + "learning_rate": 1.058346498518653e-06, + "loss": 1.0741, + "step": 13663 + }, + { + "epoch": 0.8567308295190921, + "grad_norm": 3.6176669597625732, + "learning_rate": 1.0574374347196115e-06, + "loss": 1.1081, + "step": 13664 + }, + { + "epoch": 0.8567935293748824, + "grad_norm": 3.3424646854400635, + "learning_rate": 1.0565287397122416e-06, + "loss": 1.1833, + "step": 13665 + }, + { + "epoch": 0.8568562292306727, + "grad_norm": 3.0708115100860596, + "learning_rate": 1.0556204135340142e-06, + "loss": 1.1477, + "step": 13666 + }, + { + "epoch": 0.856918929086463, + "grad_norm": 3.4545109272003174, + "learning_rate": 1.0547124562223943e-06, + "loss": 0.9378, + "step": 13667 + }, + { + "epoch": 0.8569816289422534, + "grad_norm": 3.7205846309661865, + "learning_rate": 1.0538048678148228e-06, + "loss": 1.0042, + "step": 13668 + }, + { + "epoch": 0.8570443287980438, + "grad_norm": 3.227858066558838, + "learning_rate": 1.0528976483487297e-06, + "loss": 0.9775, + "step": 13669 + }, + { + "epoch": 0.8571070286538341, + "grad_norm": 3.639906167984009, + "learning_rate": 1.0519907978615263e-06, + "loss": 1.0216, + "step": 13670 + }, + { + "epoch": 0.8571697285096245, + "grad_norm": 3.9731640815734863, + "learning_rate": 1.0510843163906148e-06, + "loss": 1.0003, + "step": 13671 + }, + { + "epoch": 0.8572324283654148, + "grad_norm": 4.112612247467041, + "learning_rate": 1.0501782039733765e-06, + "loss": 1.2121, + "step": 13672 + }, + { + "epoch": 0.8572951282212051, + "grad_norm": 3.0581533908843994, + "learning_rate": 1.0492724606471794e-06, + "loss": 1.1314, + "step": 13673 + }, + { + "epoch": 0.8573578280769955, + "grad_norm": 3.2983858585357666, + "learning_rate": 1.0483670864493777e-06, + "loss": 1.0956, + "step": 13674 + }, + { + "epoch": 0.8574205279327858, + "grad_norm": 3.121201515197754, + "learning_rate": 1.0474620814173087e-06, + "loss": 1.1696, + "step": 13675 + }, + { + "epoch": 0.8574832277885761, + "grad_norm": 3.480806350708008, + "learning_rate": 1.0465574455882931e-06, + "loss": 1.0085, + "step": 13676 + }, + { + "epoch": 0.8575459276443664, + "grad_norm": 3.524195909500122, + "learning_rate": 1.0456531789996393e-06, + "loss": 0.9838, + "step": 13677 + }, + { + "epoch": 0.8576086275001568, + "grad_norm": 3.2770562171936035, + "learning_rate": 1.0447492816886406e-06, + "loss": 1.1079, + "step": 13678 + }, + { + "epoch": 0.8576713273559471, + "grad_norm": 3.5246493816375732, + "learning_rate": 1.0438457536925728e-06, + "loss": 1.126, + "step": 13679 + }, + { + "epoch": 0.8577340272117374, + "grad_norm": 3.4843883514404297, + "learning_rate": 1.042942595048697e-06, + "loss": 1.1246, + "step": 13680 + }, + { + "epoch": 0.8577967270675277, + "grad_norm": 3.472395896911621, + "learning_rate": 1.0420398057942581e-06, + "loss": 1.1602, + "step": 13681 + }, + { + "epoch": 0.8578594269233181, + "grad_norm": 3.6274876594543457, + "learning_rate": 1.041137385966491e-06, + "loss": 1.0493, + "step": 13682 + }, + { + "epoch": 0.8579221267791084, + "grad_norm": 3.080277919769287, + "learning_rate": 1.0402353356026084e-06, + "loss": 1.1549, + "step": 13683 + }, + { + "epoch": 0.8579848266348987, + "grad_norm": 3.56717848777771, + "learning_rate": 1.0393336547398103e-06, + "loss": 0.9625, + "step": 13684 + }, + { + "epoch": 0.858047526490689, + "grad_norm": 3.4591002464294434, + "learning_rate": 1.0384323434152854e-06, + "loss": 1.0757, + "step": 13685 + }, + { + "epoch": 0.8581102263464794, + "grad_norm": 3.3637776374816895, + "learning_rate": 1.037531401666202e-06, + "loss": 0.9269, + "step": 13686 + }, + { + "epoch": 0.8581729262022697, + "grad_norm": 3.6989033222198486, + "learning_rate": 1.0366308295297156e-06, + "loss": 1.0605, + "step": 13687 + }, + { + "epoch": 0.85823562605806, + "grad_norm": 3.2707602977752686, + "learning_rate": 1.0357306270429623e-06, + "loss": 0.9752, + "step": 13688 + }, + { + "epoch": 0.8582983259138504, + "grad_norm": 3.3956918716430664, + "learning_rate": 1.0348307942430723e-06, + "loss": 1.0921, + "step": 13689 + }, + { + "epoch": 0.8583610257696407, + "grad_norm": 3.7054197788238525, + "learning_rate": 1.0339313311671518e-06, + "loss": 0.9606, + "step": 13690 + }, + { + "epoch": 0.858423725625431, + "grad_norm": 3.9178574085235596, + "learning_rate": 1.033032237852294e-06, + "loss": 1.0357, + "step": 13691 + }, + { + "epoch": 0.8584864254812213, + "grad_norm": 2.980863332748413, + "learning_rate": 1.0321335143355771e-06, + "loss": 1.1233, + "step": 13692 + }, + { + "epoch": 0.8585491253370118, + "grad_norm": 3.167726755142212, + "learning_rate": 1.0312351606540672e-06, + "loss": 1.1583, + "step": 13693 + }, + { + "epoch": 0.8586118251928021, + "grad_norm": 3.857119560241699, + "learning_rate": 1.0303371768448113e-06, + "loss": 1.0792, + "step": 13694 + }, + { + "epoch": 0.8586745250485924, + "grad_norm": 3.3325934410095215, + "learning_rate": 1.0294395629448394e-06, + "loss": 1.0785, + "step": 13695 + }, + { + "epoch": 0.8587372249043828, + "grad_norm": 3.655564069747925, + "learning_rate": 1.0285423189911748e-06, + "loss": 1.1637, + "step": 13696 + }, + { + "epoch": 0.8587999247601731, + "grad_norm": 3.5149152278900146, + "learning_rate": 1.0276454450208151e-06, + "loss": 0.9997, + "step": 13697 + }, + { + "epoch": 0.8588626246159634, + "grad_norm": 3.622642993927002, + "learning_rate": 1.02674894107075e-06, + "loss": 0.9425, + "step": 13698 + }, + { + "epoch": 0.8589253244717537, + "grad_norm": 3.9269187450408936, + "learning_rate": 1.0258528071779483e-06, + "loss": 1.0853, + "step": 13699 + }, + { + "epoch": 0.8589880243275441, + "grad_norm": 3.537444829940796, + "learning_rate": 1.0249570433793709e-06, + "loss": 1.0645, + "step": 13700 + }, + { + "epoch": 0.8590507241833344, + "grad_norm": 3.9826178550720215, + "learning_rate": 1.024061649711957e-06, + "loss": 1.0466, + "step": 13701 + }, + { + "epoch": 0.8591134240391247, + "grad_norm": 3.668041229248047, + "learning_rate": 1.0231666262126305e-06, + "loss": 0.9648, + "step": 13702 + }, + { + "epoch": 0.859176123894915, + "grad_norm": 3.3677191734313965, + "learning_rate": 1.0222719729183062e-06, + "loss": 1.1658, + "step": 13703 + }, + { + "epoch": 0.8592388237507054, + "grad_norm": 3.6524622440338135, + "learning_rate": 1.0213776898658779e-06, + "loss": 1.0781, + "step": 13704 + }, + { + "epoch": 0.8593015236064957, + "grad_norm": 3.521315097808838, + "learning_rate": 1.020483777092226e-06, + "loss": 1.1078, + "step": 13705 + }, + { + "epoch": 0.859364223462286, + "grad_norm": 3.1904373168945312, + "learning_rate": 1.0195902346342135e-06, + "loss": 0.9539, + "step": 13706 + }, + { + "epoch": 0.8594269233180764, + "grad_norm": 3.361760139465332, + "learning_rate": 1.018697062528693e-06, + "loss": 0.9604, + "step": 13707 + }, + { + "epoch": 0.8594896231738667, + "grad_norm": 3.393986940383911, + "learning_rate": 1.0178042608124984e-06, + "loss": 1.2103, + "step": 13708 + }, + { + "epoch": 0.859552323029657, + "grad_norm": 3.2524328231811523, + "learning_rate": 1.0169118295224488e-06, + "loss": 1.1203, + "step": 13709 + }, + { + "epoch": 0.8596150228854473, + "grad_norm": 3.5756444931030273, + "learning_rate": 1.0160197686953444e-06, + "loss": 0.9406, + "step": 13710 + }, + { + "epoch": 0.8596777227412377, + "grad_norm": 3.5740952491760254, + "learning_rate": 1.0151280783679806e-06, + "loss": 1.0152, + "step": 13711 + }, + { + "epoch": 0.859740422597028, + "grad_norm": 3.497485876083374, + "learning_rate": 1.0142367585771262e-06, + "loss": 1.1399, + "step": 13712 + }, + { + "epoch": 0.8598031224528183, + "grad_norm": 3.6912810802459717, + "learning_rate": 1.0133458093595383e-06, + "loss": 0.8953, + "step": 13713 + }, + { + "epoch": 0.8598658223086086, + "grad_norm": 3.036785840988159, + "learning_rate": 1.0124552307519641e-06, + "loss": 1.252, + "step": 13714 + }, + { + "epoch": 0.859928522164399, + "grad_norm": 3.3726413249969482, + "learning_rate": 1.0115650227911279e-06, + "loss": 1.05, + "step": 13715 + }, + { + "epoch": 0.8599912220201894, + "grad_norm": 3.1564064025878906, + "learning_rate": 1.010675185513743e-06, + "loss": 1.1744, + "step": 13716 + }, + { + "epoch": 0.8600539218759797, + "grad_norm": 3.2276217937469482, + "learning_rate": 1.0097857189565053e-06, + "loss": 1.1125, + "step": 13717 + }, + { + "epoch": 0.8601166217317701, + "grad_norm": 3.341817855834961, + "learning_rate": 1.0088966231560981e-06, + "loss": 1.1466, + "step": 13718 + }, + { + "epoch": 0.8601793215875604, + "grad_norm": 3.1644320487976074, + "learning_rate": 1.0080078981491858e-06, + "loss": 1.0881, + "step": 13719 + }, + { + "epoch": 0.8602420214433507, + "grad_norm": 3.9656362533569336, + "learning_rate": 1.0071195439724223e-06, + "loss": 1.0892, + "step": 13720 + }, + { + "epoch": 0.860304721299141, + "grad_norm": 3.5941085815429688, + "learning_rate": 1.0062315606624383e-06, + "loss": 0.9386, + "step": 13721 + }, + { + "epoch": 0.8603674211549314, + "grad_norm": 3.7753329277038574, + "learning_rate": 1.0053439482558602e-06, + "loss": 1.0636, + "step": 13722 + }, + { + "epoch": 0.8604301210107217, + "grad_norm": 3.4691402912139893, + "learning_rate": 1.0044567067892896e-06, + "loss": 1.1276, + "step": 13723 + }, + { + "epoch": 0.860492820866512, + "grad_norm": 3.640181303024292, + "learning_rate": 1.0035698362993163e-06, + "loss": 1.0406, + "step": 13724 + }, + { + "epoch": 0.8605555207223023, + "grad_norm": 3.3114113807678223, + "learning_rate": 1.0026833368225175e-06, + "loss": 1.0906, + "step": 13725 + }, + { + "epoch": 0.8606182205780927, + "grad_norm": 3.7023210525512695, + "learning_rate": 1.0017972083954508e-06, + "loss": 1.0727, + "step": 13726 + }, + { + "epoch": 0.860680920433883, + "grad_norm": 3.0176281929016113, + "learning_rate": 1.0009114510546592e-06, + "loss": 1.2068, + "step": 13727 + }, + { + "epoch": 0.8607436202896733, + "grad_norm": 3.4164257049560547, + "learning_rate": 1.000026064836671e-06, + "loss": 1.225, + "step": 13728 + }, + { + "epoch": 0.8608063201454637, + "grad_norm": 3.177858829498291, + "learning_rate": 9.99141049778004e-07, + "loss": 1.2249, + "step": 13729 + }, + { + "epoch": 0.860869020001254, + "grad_norm": 3.550830602645874, + "learning_rate": 9.982564059151524e-07, + "loss": 1.0324, + "step": 13730 + }, + { + "epoch": 0.8609317198570443, + "grad_norm": 3.780325174331665, + "learning_rate": 9.973721332845986e-07, + "loss": 1.0315, + "step": 13731 + }, + { + "epoch": 0.8609944197128346, + "grad_norm": 3.2969813346862793, + "learning_rate": 9.96488231922811e-07, + "loss": 1.0448, + "step": 13732 + }, + { + "epoch": 0.861057119568625, + "grad_norm": 3.3628785610198975, + "learning_rate": 9.956047018662429e-07, + "loss": 1.0835, + "step": 13733 + }, + { + "epoch": 0.8611198194244153, + "grad_norm": 3.454021692276001, + "learning_rate": 9.947215431513302e-07, + "loss": 0.992, + "step": 13734 + }, + { + "epoch": 0.8611825192802056, + "grad_norm": 3.3174421787261963, + "learning_rate": 9.938387558144925e-07, + "loss": 1.0118, + "step": 13735 + }, + { + "epoch": 0.8612452191359959, + "grad_norm": 3.2999017238616943, + "learning_rate": 9.929563398921393e-07, + "loss": 1.1543, + "step": 13736 + }, + { + "epoch": 0.8613079189917863, + "grad_norm": 3.705994129180908, + "learning_rate": 9.920742954206607e-07, + "loss": 0.8982, + "step": 13737 + }, + { + "epoch": 0.8613706188475766, + "grad_norm": 3.5675837993621826, + "learning_rate": 9.91192622436431e-07, + "loss": 1.1195, + "step": 13738 + }, + { + "epoch": 0.861433318703367, + "grad_norm": 3.479492664337158, + "learning_rate": 9.903113209758098e-07, + "loss": 1.2258, + "step": 13739 + }, + { + "epoch": 0.8614960185591574, + "grad_norm": 3.5986454486846924, + "learning_rate": 9.894303910751435e-07, + "loss": 1.064, + "step": 13740 + }, + { + "epoch": 0.8615587184149477, + "grad_norm": 3.2898123264312744, + "learning_rate": 9.885498327707632e-07, + "loss": 1.1507, + "step": 13741 + }, + { + "epoch": 0.861621418270738, + "grad_norm": 3.8004753589630127, + "learning_rate": 9.8766964609898e-07, + "loss": 1.0097, + "step": 13742 + }, + { + "epoch": 0.8616841181265283, + "grad_norm": 3.555692434310913, + "learning_rate": 9.867898310960932e-07, + "loss": 0.8834, + "step": 13743 + }, + { + "epoch": 0.8617468179823187, + "grad_norm": 3.4420671463012695, + "learning_rate": 9.85910387798389e-07, + "loss": 1.1462, + "step": 13744 + }, + { + "epoch": 0.861809517838109, + "grad_norm": 3.258749008178711, + "learning_rate": 9.850313162421332e-07, + "loss": 1.0983, + "step": 13745 + }, + { + "epoch": 0.8618722176938993, + "grad_norm": 3.3983185291290283, + "learning_rate": 9.841526164635785e-07, + "loss": 1.0428, + "step": 13746 + }, + { + "epoch": 0.8619349175496896, + "grad_norm": 3.5530011653900146, + "learning_rate": 9.832742884989654e-07, + "loss": 1.1246, + "step": 13747 + }, + { + "epoch": 0.86199761740548, + "grad_norm": 3.2534804344177246, + "learning_rate": 9.823963323845154e-07, + "loss": 1.0252, + "step": 13748 + }, + { + "epoch": 0.8620603172612703, + "grad_norm": 3.445887565612793, + "learning_rate": 9.815187481564325e-07, + "loss": 1.0012, + "step": 13749 + }, + { + "epoch": 0.8621230171170606, + "grad_norm": 3.103224039077759, + "learning_rate": 9.806415358509102e-07, + "loss": 1.0901, + "step": 13750 + }, + { + "epoch": 0.862185716972851, + "grad_norm": 3.343268632888794, + "learning_rate": 9.79764695504125e-07, + "loss": 1.0988, + "step": 13751 + }, + { + "epoch": 0.8622484168286413, + "grad_norm": 3.311410903930664, + "learning_rate": 9.788882271522394e-07, + "loss": 1.0858, + "step": 13752 + }, + { + "epoch": 0.8623111166844316, + "grad_norm": 3.246339797973633, + "learning_rate": 9.78012130831394e-07, + "loss": 1.0306, + "step": 13753 + }, + { + "epoch": 0.8623738165402219, + "grad_norm": 3.3781440258026123, + "learning_rate": 9.771364065777244e-07, + "loss": 1.0045, + "step": 13754 + }, + { + "epoch": 0.8624365163960123, + "grad_norm": 4.0365424156188965, + "learning_rate": 9.762610544273443e-07, + "loss": 0.9513, + "step": 13755 + }, + { + "epoch": 0.8624992162518026, + "grad_norm": 3.619677782058716, + "learning_rate": 9.753860744163524e-07, + "loss": 1.0081, + "step": 13756 + }, + { + "epoch": 0.8625619161075929, + "grad_norm": 4.0181803703308105, + "learning_rate": 9.745114665808298e-07, + "loss": 1.182, + "step": 13757 + }, + { + "epoch": 0.8626246159633832, + "grad_norm": 3.651611328125, + "learning_rate": 9.73637230956851e-07, + "loss": 1.0823, + "step": 13758 + }, + { + "epoch": 0.8626873158191736, + "grad_norm": 3.8593146800994873, + "learning_rate": 9.72763367580467e-07, + "loss": 1.0966, + "step": 13759 + }, + { + "epoch": 0.8627500156749639, + "grad_norm": 3.2336933612823486, + "learning_rate": 9.718898764877161e-07, + "loss": 1.1634, + "step": 13760 + }, + { + "epoch": 0.8628127155307542, + "grad_norm": 3.491760015487671, + "learning_rate": 9.71016757714619e-07, + "loss": 0.9378, + "step": 13761 + }, + { + "epoch": 0.8628754153865447, + "grad_norm": 3.390044689178467, + "learning_rate": 9.701440112971871e-07, + "loss": 1.0161, + "step": 13762 + }, + { + "epoch": 0.862938115242335, + "grad_norm": 3.6702721118927, + "learning_rate": 9.692716372714096e-07, + "loss": 0.9442, + "step": 13763 + }, + { + "epoch": 0.8630008150981253, + "grad_norm": 3.532177448272705, + "learning_rate": 9.683996356732639e-07, + "loss": 1.136, + "step": 13764 + }, + { + "epoch": 0.8630635149539156, + "grad_norm": 3.5786831378936768, + "learning_rate": 9.675280065387117e-07, + "loss": 1.2378, + "step": 13765 + }, + { + "epoch": 0.863126214809706, + "grad_norm": 3.982025384902954, + "learning_rate": 9.666567499037004e-07, + "loss": 0.9669, + "step": 13766 + }, + { + "epoch": 0.8631889146654963, + "grad_norm": 3.4347689151763916, + "learning_rate": 9.657858658041585e-07, + "loss": 1.2172, + "step": 13767 + }, + { + "epoch": 0.8632516145212866, + "grad_norm": 3.307196855545044, + "learning_rate": 9.649153542760003e-07, + "loss": 0.9132, + "step": 13768 + }, + { + "epoch": 0.863314314377077, + "grad_norm": 3.439542055130005, + "learning_rate": 9.640452153551294e-07, + "loss": 1.1428, + "step": 13769 + }, + { + "epoch": 0.8633770142328673, + "grad_norm": 3.5062451362609863, + "learning_rate": 9.63175449077428e-07, + "loss": 1.1033, + "step": 13770 + }, + { + "epoch": 0.8634397140886576, + "grad_norm": 3.8194611072540283, + "learning_rate": 9.623060554787667e-07, + "loss": 1.1295, + "step": 13771 + }, + { + "epoch": 0.8635024139444479, + "grad_norm": 3.9134957790374756, + "learning_rate": 9.614370345949952e-07, + "loss": 1.0487, + "step": 13772 + }, + { + "epoch": 0.8635651138002383, + "grad_norm": 3.743391513824463, + "learning_rate": 9.605683864619574e-07, + "loss": 1.044, + "step": 13773 + }, + { + "epoch": 0.8636278136560286, + "grad_norm": 3.276681423187256, + "learning_rate": 9.597001111154736e-07, + "loss": 1.0894, + "step": 13774 + }, + { + "epoch": 0.8636905135118189, + "grad_norm": 3.5497148036956787, + "learning_rate": 9.588322085913526e-07, + "loss": 1.0419, + "step": 13775 + }, + { + "epoch": 0.8637532133676092, + "grad_norm": 3.206979751586914, + "learning_rate": 9.579646789253839e-07, + "loss": 1.1092, + "step": 13776 + }, + { + "epoch": 0.8638159132233996, + "grad_norm": 3.1999588012695312, + "learning_rate": 9.570975221533496e-07, + "loss": 1.1076, + "step": 13777 + }, + { + "epoch": 0.8638786130791899, + "grad_norm": 3.480293035507202, + "learning_rate": 9.56230738311007e-07, + "loss": 1.0979, + "step": 13778 + }, + { + "epoch": 0.8639413129349802, + "grad_norm": 3.5313918590545654, + "learning_rate": 9.55364327434105e-07, + "loss": 1.1956, + "step": 13779 + }, + { + "epoch": 0.8640040127907705, + "grad_norm": 3.9809746742248535, + "learning_rate": 9.54498289558371e-07, + "loss": 1.0899, + "step": 13780 + }, + { + "epoch": 0.8640667126465609, + "grad_norm": 3.771759510040283, + "learning_rate": 9.536326247195238e-07, + "loss": 1.2483, + "step": 13781 + }, + { + "epoch": 0.8641294125023512, + "grad_norm": 3.722146987915039, + "learning_rate": 9.527673329532627e-07, + "loss": 1.0386, + "step": 13782 + }, + { + "epoch": 0.8641921123581415, + "grad_norm": 3.4285404682159424, + "learning_rate": 9.519024142952704e-07, + "loss": 1.1231, + "step": 13783 + }, + { + "epoch": 0.8642548122139319, + "grad_norm": 3.36118221282959, + "learning_rate": 9.510378687812194e-07, + "loss": 1.1366, + "step": 13784 + }, + { + "epoch": 0.8643175120697223, + "grad_norm": 3.467180013656616, + "learning_rate": 9.501736964467612e-07, + "loss": 1.2201, + "step": 13785 + }, + { + "epoch": 0.8643802119255126, + "grad_norm": 3.2202258110046387, + "learning_rate": 9.49309897327535e-07, + "loss": 1.0983, + "step": 13786 + }, + { + "epoch": 0.864442911781303, + "grad_norm": 3.4232144355773926, + "learning_rate": 9.484464714591624e-07, + "loss": 1.2301, + "step": 13787 + }, + { + "epoch": 0.8645056116370933, + "grad_norm": 3.550297260284424, + "learning_rate": 9.475834188772548e-07, + "loss": 1.0851, + "step": 13788 + }, + { + "epoch": 0.8645683114928836, + "grad_norm": 3.4609177112579346, + "learning_rate": 9.467207396174016e-07, + "loss": 1.2673, + "step": 13789 + }, + { + "epoch": 0.8646310113486739, + "grad_norm": 3.446171283721924, + "learning_rate": 9.458584337151811e-07, + "loss": 1.1657, + "step": 13790 + }, + { + "epoch": 0.8646937112044643, + "grad_norm": 3.425879955291748, + "learning_rate": 9.449965012061524e-07, + "loss": 1.0786, + "step": 13791 + }, + { + "epoch": 0.8647564110602546, + "grad_norm": 3.74410343170166, + "learning_rate": 9.44134942125865e-07, + "loss": 1.2305, + "step": 13792 + }, + { + "epoch": 0.8648191109160449, + "grad_norm": 3.527759313583374, + "learning_rate": 9.43273756509847e-07, + "loss": 1.0551, + "step": 13793 + }, + { + "epoch": 0.8648818107718352, + "grad_norm": 3.3546688556671143, + "learning_rate": 9.424129443936147e-07, + "loss": 1.1039, + "step": 13794 + }, + { + "epoch": 0.8649445106276256, + "grad_norm": 3.4338810443878174, + "learning_rate": 9.415525058126684e-07, + "loss": 1.1095, + "step": 13795 + }, + { + "epoch": 0.8650072104834159, + "grad_norm": 3.4938087463378906, + "learning_rate": 9.406924408024931e-07, + "loss": 1.0425, + "step": 13796 + }, + { + "epoch": 0.8650699103392062, + "grad_norm": 3.283724784851074, + "learning_rate": 9.398327493985582e-07, + "loss": 1.0617, + "step": 13797 + }, + { + "epoch": 0.8651326101949965, + "grad_norm": 3.4589996337890625, + "learning_rate": 9.389734316363131e-07, + "loss": 1.0461, + "step": 13798 + }, + { + "epoch": 0.8651953100507869, + "grad_norm": 3.631295919418335, + "learning_rate": 9.381144875512016e-07, + "loss": 0.9401, + "step": 13799 + }, + { + "epoch": 0.8652580099065772, + "grad_norm": 3.51526141166687, + "learning_rate": 9.372559171786444e-07, + "loss": 1.0975, + "step": 13800 + }, + { + "epoch": 0.8653207097623675, + "grad_norm": 3.0962045192718506, + "learning_rate": 9.363977205540486e-07, + "loss": 1.2886, + "step": 13801 + }, + { + "epoch": 0.8653834096181579, + "grad_norm": 3.849029779434204, + "learning_rate": 9.35539897712805e-07, + "loss": 1.1123, + "step": 13802 + }, + { + "epoch": 0.8654461094739482, + "grad_norm": 3.4439969062805176, + "learning_rate": 9.346824486902939e-07, + "loss": 1.1097, + "step": 13803 + }, + { + "epoch": 0.8655088093297385, + "grad_norm": 3.066599130630493, + "learning_rate": 9.33825373521875e-07, + "loss": 1.03, + "step": 13804 + }, + { + "epoch": 0.8655715091855288, + "grad_norm": 3.294511556625366, + "learning_rate": 9.32968672242891e-07, + "loss": 1.1362, + "step": 13805 + }, + { + "epoch": 0.8656342090413192, + "grad_norm": 3.0190277099609375, + "learning_rate": 9.32112344888677e-07, + "loss": 1.1348, + "step": 13806 + }, + { + "epoch": 0.8656969088971095, + "grad_norm": 3.5757293701171875, + "learning_rate": 9.312563914945461e-07, + "loss": 1.0157, + "step": 13807 + }, + { + "epoch": 0.8657596087528999, + "grad_norm": 3.305920362472534, + "learning_rate": 9.304008120957974e-07, + "loss": 1.0569, + "step": 13808 + }, + { + "epoch": 0.8658223086086902, + "grad_norm": 3.283393621444702, + "learning_rate": 9.295456067277131e-07, + "loss": 0.9788, + "step": 13809 + }, + { + "epoch": 0.8658850084644806, + "grad_norm": 4.037471294403076, + "learning_rate": 9.286907754255659e-07, + "loss": 1.0259, + "step": 13810 + }, + { + "epoch": 0.8659477083202709, + "grad_norm": 3.1144630908966064, + "learning_rate": 9.278363182246075e-07, + "loss": 1.2301, + "step": 13811 + }, + { + "epoch": 0.8660104081760612, + "grad_norm": 3.436795711517334, + "learning_rate": 9.269822351600755e-07, + "loss": 1.0135, + "step": 13812 + }, + { + "epoch": 0.8660731080318516, + "grad_norm": 3.6193203926086426, + "learning_rate": 9.261285262671915e-07, + "loss": 1.1572, + "step": 13813 + }, + { + "epoch": 0.8661358078876419, + "grad_norm": 3.1330575942993164, + "learning_rate": 9.252751915811642e-07, + "loss": 1.0587, + "step": 13814 + }, + { + "epoch": 0.8661985077434322, + "grad_norm": 3.5784056186676025, + "learning_rate": 9.244222311371842e-07, + "loss": 1.1052, + "step": 13815 + }, + { + "epoch": 0.8662612075992225, + "grad_norm": 3.75950026512146, + "learning_rate": 9.235696449704257e-07, + "loss": 0.8954, + "step": 13816 + }, + { + "epoch": 0.8663239074550129, + "grad_norm": 3.3797354698181152, + "learning_rate": 9.227174331160538e-07, + "loss": 1.2083, + "step": 13817 + }, + { + "epoch": 0.8663866073108032, + "grad_norm": 3.6847586631774902, + "learning_rate": 9.218655956092116e-07, + "loss": 1.0244, + "step": 13818 + }, + { + "epoch": 0.8664493071665935, + "grad_norm": 3.4778008460998535, + "learning_rate": 9.210141324850286e-07, + "loss": 1.1257, + "step": 13819 + }, + { + "epoch": 0.8665120070223838, + "grad_norm": 3.6614458560943604, + "learning_rate": 9.201630437786169e-07, + "loss": 1.0113, + "step": 13820 + }, + { + "epoch": 0.8665747068781742, + "grad_norm": 3.3395276069641113, + "learning_rate": 9.193123295250794e-07, + "loss": 1.0017, + "step": 13821 + }, + { + "epoch": 0.8666374067339645, + "grad_norm": 3.5028278827667236, + "learning_rate": 9.184619897594982e-07, + "loss": 1.1139, + "step": 13822 + }, + { + "epoch": 0.8667001065897548, + "grad_norm": 3.611086368560791, + "learning_rate": 9.176120245169418e-07, + "loss": 0.8876, + "step": 13823 + }, + { + "epoch": 0.8667628064455452, + "grad_norm": 3.5496277809143066, + "learning_rate": 9.167624338324599e-07, + "loss": 1.0958, + "step": 13824 + }, + { + "epoch": 0.8668255063013355, + "grad_norm": 3.5601518154144287, + "learning_rate": 9.159132177410934e-07, + "loss": 1.1619, + "step": 13825 + }, + { + "epoch": 0.8668882061571258, + "grad_norm": 2.982316493988037, + "learning_rate": 9.150643762778633e-07, + "loss": 1.0664, + "step": 13826 + }, + { + "epoch": 0.8669509060129161, + "grad_norm": 3.8987958431243896, + "learning_rate": 9.142159094777725e-07, + "loss": 1.0167, + "step": 13827 + }, + { + "epoch": 0.8670136058687065, + "grad_norm": 3.514134168624878, + "learning_rate": 9.133678173758165e-07, + "loss": 1.0262, + "step": 13828 + }, + { + "epoch": 0.8670763057244968, + "grad_norm": 3.2271411418914795, + "learning_rate": 9.125201000069683e-07, + "loss": 1.0157, + "step": 13829 + }, + { + "epoch": 0.8671390055802871, + "grad_norm": 3.517493963241577, + "learning_rate": 9.116727574061879e-07, + "loss": 1.0036, + "step": 13830 + }, + { + "epoch": 0.8672017054360776, + "grad_norm": 3.735381603240967, + "learning_rate": 9.108257896084182e-07, + "loss": 1.1825, + "step": 13831 + }, + { + "epoch": 0.8672644052918679, + "grad_norm": 3.6473042964935303, + "learning_rate": 9.099791966485915e-07, + "loss": 1.1275, + "step": 13832 + }, + { + "epoch": 0.8673271051476582, + "grad_norm": 3.648810863494873, + "learning_rate": 9.09132978561621e-07, + "loss": 0.9997, + "step": 13833 + }, + { + "epoch": 0.8673898050034485, + "grad_norm": 3.3710503578186035, + "learning_rate": 9.082871353823997e-07, + "loss": 1.1583, + "step": 13834 + }, + { + "epoch": 0.8674525048592389, + "grad_norm": 3.45920467376709, + "learning_rate": 9.074416671458175e-07, + "loss": 1.0875, + "step": 13835 + }, + { + "epoch": 0.8675152047150292, + "grad_norm": 3.5227479934692383, + "learning_rate": 9.065965738867367e-07, + "loss": 1.0872, + "step": 13836 + }, + { + "epoch": 0.8675779045708195, + "grad_norm": 3.5107903480529785, + "learning_rate": 9.057518556400113e-07, + "loss": 1.0285, + "step": 13837 + }, + { + "epoch": 0.8676406044266098, + "grad_norm": 3.323444128036499, + "learning_rate": 9.049075124404738e-07, + "loss": 1.1037, + "step": 13838 + }, + { + "epoch": 0.8677033042824002, + "grad_norm": 3.5994396209716797, + "learning_rate": 9.040635443229506e-07, + "loss": 0.9779, + "step": 13839 + }, + { + "epoch": 0.8677660041381905, + "grad_norm": 3.292891025543213, + "learning_rate": 9.032199513222428e-07, + "loss": 1.1595, + "step": 13840 + }, + { + "epoch": 0.8678287039939808, + "grad_norm": 3.4698996543884277, + "learning_rate": 9.023767334731426e-07, + "loss": 1.2034, + "step": 13841 + }, + { + "epoch": 0.8678914038497711, + "grad_norm": 4.061750888824463, + "learning_rate": 9.01533890810421e-07, + "loss": 0.8182, + "step": 13842 + }, + { + "epoch": 0.8679541037055615, + "grad_norm": 3.6636011600494385, + "learning_rate": 9.006914233688402e-07, + "loss": 0.9844, + "step": 13843 + }, + { + "epoch": 0.8680168035613518, + "grad_norm": 3.4870121479034424, + "learning_rate": 8.998493311831435e-07, + "loss": 1.0726, + "step": 13844 + }, + { + "epoch": 0.8680795034171421, + "grad_norm": 3.2726855278015137, + "learning_rate": 8.990076142880554e-07, + "loss": 1.0173, + "step": 13845 + }, + { + "epoch": 0.8681422032729325, + "grad_norm": 3.4069643020629883, + "learning_rate": 8.981662727182927e-07, + "loss": 0.9781, + "step": 13846 + }, + { + "epoch": 0.8682049031287228, + "grad_norm": 3.214857816696167, + "learning_rate": 8.973253065085497e-07, + "loss": 1.0686, + "step": 13847 + }, + { + "epoch": 0.8682676029845131, + "grad_norm": 3.675723075866699, + "learning_rate": 8.964847156935097e-07, + "loss": 1.0561, + "step": 13848 + }, + { + "epoch": 0.8683303028403034, + "grad_norm": 3.4506711959838867, + "learning_rate": 8.956445003078351e-07, + "loss": 1.1364, + "step": 13849 + }, + { + "epoch": 0.8683930026960938, + "grad_norm": 3.182889223098755, + "learning_rate": 8.948046603861804e-07, + "loss": 0.982, + "step": 13850 + }, + { + "epoch": 0.8684557025518841, + "grad_norm": 3.259505033493042, + "learning_rate": 8.939651959631801e-07, + "loss": 1.0453, + "step": 13851 + }, + { + "epoch": 0.8685184024076744, + "grad_norm": 3.3116235733032227, + "learning_rate": 8.931261070734521e-07, + "loss": 1.0128, + "step": 13852 + }, + { + "epoch": 0.8685811022634647, + "grad_norm": 3.3118646144866943, + "learning_rate": 8.922873937515997e-07, + "loss": 1.0359, + "step": 13853 + }, + { + "epoch": 0.8686438021192551, + "grad_norm": 3.564362049102783, + "learning_rate": 8.914490560322142e-07, + "loss": 1.1225, + "step": 13854 + }, + { + "epoch": 0.8687065019750455, + "grad_norm": 3.4475061893463135, + "learning_rate": 8.90611093949868e-07, + "loss": 0.9699, + "step": 13855 + }, + { + "epoch": 0.8687692018308358, + "grad_norm": 3.077507495880127, + "learning_rate": 8.897735075391156e-07, + "loss": 1.0257, + "step": 13856 + }, + { + "epoch": 0.8688319016866262, + "grad_norm": 3.640716075897217, + "learning_rate": 8.889362968345039e-07, + "loss": 1.2861, + "step": 13857 + }, + { + "epoch": 0.8688946015424165, + "grad_norm": 3.276437282562256, + "learning_rate": 8.880994618705574e-07, + "loss": 1.0993, + "step": 13858 + }, + { + "epoch": 0.8689573013982068, + "grad_norm": 3.6374857425689697, + "learning_rate": 8.872630026817864e-07, + "loss": 1.1181, + "step": 13859 + }, + { + "epoch": 0.8690200012539971, + "grad_norm": 3.1702332496643066, + "learning_rate": 8.864269193026853e-07, + "loss": 1.1332, + "step": 13860 + }, + { + "epoch": 0.8690827011097875, + "grad_norm": 3.532721519470215, + "learning_rate": 8.855912117677389e-07, + "loss": 1.0243, + "step": 13861 + }, + { + "epoch": 0.8691454009655778, + "grad_norm": 3.5639336109161377, + "learning_rate": 8.847558801114075e-07, + "loss": 1.0711, + "step": 13862 + }, + { + "epoch": 0.8692081008213681, + "grad_norm": 3.389723777770996, + "learning_rate": 8.839209243681435e-07, + "loss": 0.9421, + "step": 13863 + }, + { + "epoch": 0.8692708006771585, + "grad_norm": 3.6576831340789795, + "learning_rate": 8.830863445723748e-07, + "loss": 1.1545, + "step": 13864 + }, + { + "epoch": 0.8693335005329488, + "grad_norm": 3.3372035026550293, + "learning_rate": 8.822521407585271e-07, + "loss": 1.1228, + "step": 13865 + }, + { + "epoch": 0.8693962003887391, + "grad_norm": 3.372591733932495, + "learning_rate": 8.814183129609977e-07, + "loss": 1.1823, + "step": 13866 + }, + { + "epoch": 0.8694589002445294, + "grad_norm": 3.573399305343628, + "learning_rate": 8.805848612141743e-07, + "loss": 0.9431, + "step": 13867 + }, + { + "epoch": 0.8695216001003198, + "grad_norm": 3.685483455657959, + "learning_rate": 8.797517855524307e-07, + "loss": 0.8999, + "step": 13868 + }, + { + "epoch": 0.8695842999561101, + "grad_norm": 3.141036033630371, + "learning_rate": 8.789190860101226e-07, + "loss": 1.2133, + "step": 13869 + }, + { + "epoch": 0.8696469998119004, + "grad_norm": 2.9863574504852295, + "learning_rate": 8.780867626215895e-07, + "loss": 1.1022, + "step": 13870 + }, + { + "epoch": 0.8697096996676907, + "grad_norm": 3.6265931129455566, + "learning_rate": 8.772548154211547e-07, + "loss": 1.081, + "step": 13871 + }, + { + "epoch": 0.8697723995234811, + "grad_norm": 3.227375030517578, + "learning_rate": 8.764232444431309e-07, + "loss": 1.2448, + "step": 13872 + }, + { + "epoch": 0.8698350993792714, + "grad_norm": 3.133575677871704, + "learning_rate": 8.755920497218118e-07, + "loss": 1.0802, + "step": 13873 + }, + { + "epoch": 0.8698977992350617, + "grad_norm": 3.323798656463623, + "learning_rate": 8.747612312914744e-07, + "loss": 1.1729, + "step": 13874 + }, + { + "epoch": 0.869960499090852, + "grad_norm": 3.388232469558716, + "learning_rate": 8.739307891863813e-07, + "loss": 1.0769, + "step": 13875 + }, + { + "epoch": 0.8700231989466424, + "grad_norm": 3.326087713241577, + "learning_rate": 8.731007234407818e-07, + "loss": 1.1374, + "step": 13876 + }, + { + "epoch": 0.8700858988024327, + "grad_norm": 3.4156744480133057, + "learning_rate": 8.722710340889074e-07, + "loss": 1.0605, + "step": 13877 + }, + { + "epoch": 0.8701485986582231, + "grad_norm": 3.7453320026397705, + "learning_rate": 8.714417211649729e-07, + "loss": 1.1378, + "step": 13878 + }, + { + "epoch": 0.8702112985140135, + "grad_norm": 3.2963972091674805, + "learning_rate": 8.70612784703182e-07, + "loss": 1.2647, + "step": 13879 + }, + { + "epoch": 0.8702739983698038, + "grad_norm": 3.7975053787231445, + "learning_rate": 8.697842247377187e-07, + "loss": 0.899, + "step": 13880 + }, + { + "epoch": 0.8703366982255941, + "grad_norm": 3.661039352416992, + "learning_rate": 8.68956041302752e-07, + "loss": 1.1699, + "step": 13881 + }, + { + "epoch": 0.8703993980813844, + "grad_norm": 3.704766273498535, + "learning_rate": 8.68128234432436e-07, + "loss": 1.1348, + "step": 13882 + }, + { + "epoch": 0.8704620979371748, + "grad_norm": 3.810352325439453, + "learning_rate": 8.67300804160911e-07, + "loss": 0.913, + "step": 13883 + }, + { + "epoch": 0.8705247977929651, + "grad_norm": 3.2712275981903076, + "learning_rate": 8.664737505223009e-07, + "loss": 1.0401, + "step": 13884 + }, + { + "epoch": 0.8705874976487554, + "grad_norm": 3.4355220794677734, + "learning_rate": 8.656470735507095e-07, + "loss": 1.06, + "step": 13885 + }, + { + "epoch": 0.8706501975045458, + "grad_norm": 3.6554222106933594, + "learning_rate": 8.648207732802338e-07, + "loss": 1.0146, + "step": 13886 + }, + { + "epoch": 0.8707128973603361, + "grad_norm": 3.1790497303009033, + "learning_rate": 8.63994849744949e-07, + "loss": 1.0636, + "step": 13887 + }, + { + "epoch": 0.8707755972161264, + "grad_norm": 3.852224826812744, + "learning_rate": 8.631693029789146e-07, + "loss": 1.1533, + "step": 13888 + }, + { + "epoch": 0.8708382970719167, + "grad_norm": 3.5320632457733154, + "learning_rate": 8.623441330161752e-07, + "loss": 1.1094, + "step": 13889 + }, + { + "epoch": 0.8709009969277071, + "grad_norm": 3.352429151535034, + "learning_rate": 8.61519339890765e-07, + "loss": 1.1802, + "step": 13890 + }, + { + "epoch": 0.8709636967834974, + "grad_norm": 3.1798529624938965, + "learning_rate": 8.606949236366969e-07, + "loss": 1.1325, + "step": 13891 + }, + { + "epoch": 0.8710263966392877, + "grad_norm": 3.4519710540771484, + "learning_rate": 8.598708842879688e-07, + "loss": 0.9949, + "step": 13892 + }, + { + "epoch": 0.871089096495078, + "grad_norm": 3.0294315814971924, + "learning_rate": 8.590472218785628e-07, + "loss": 1.0814, + "step": 13893 + }, + { + "epoch": 0.8711517963508684, + "grad_norm": 3.4859373569488525, + "learning_rate": 8.582239364424505e-07, + "loss": 1.159, + "step": 13894 + }, + { + "epoch": 0.8712144962066587, + "grad_norm": 3.0956408977508545, + "learning_rate": 8.574010280135824e-07, + "loss": 1.1171, + "step": 13895 + }, + { + "epoch": 0.871277196062449, + "grad_norm": 3.3258140087127686, + "learning_rate": 8.565784966258928e-07, + "loss": 1.0376, + "step": 13896 + }, + { + "epoch": 0.8713398959182393, + "grad_norm": 3.5403432846069336, + "learning_rate": 8.557563423133075e-07, + "loss": 1.0125, + "step": 13897 + }, + { + "epoch": 0.8714025957740297, + "grad_norm": 3.4464330673217773, + "learning_rate": 8.549345651097296e-07, + "loss": 0.9946, + "step": 13898 + }, + { + "epoch": 0.87146529562982, + "grad_norm": 3.5029284954071045, + "learning_rate": 8.541131650490498e-07, + "loss": 1.1211, + "step": 13899 + }, + { + "epoch": 0.8715279954856103, + "grad_norm": 3.2976861000061035, + "learning_rate": 8.532921421651407e-07, + "loss": 1.2596, + "step": 13900 + }, + { + "epoch": 0.8715906953414008, + "grad_norm": 3.340754270553589, + "learning_rate": 8.524714964918646e-07, + "loss": 1.1411, + "step": 13901 + }, + { + "epoch": 0.8716533951971911, + "grad_norm": 2.9253625869750977, + "learning_rate": 8.51651228063064e-07, + "loss": 1.1325, + "step": 13902 + }, + { + "epoch": 0.8717160950529814, + "grad_norm": 3.981452465057373, + "learning_rate": 8.508313369125664e-07, + "loss": 1.0858, + "step": 13903 + }, + { + "epoch": 0.8717787949087717, + "grad_norm": 3.5549869537353516, + "learning_rate": 8.500118230741816e-07, + "loss": 1.0879, + "step": 13904 + }, + { + "epoch": 0.8718414947645621, + "grad_norm": 3.6954474449157715, + "learning_rate": 8.491926865817113e-07, + "loss": 1.0086, + "step": 13905 + }, + { + "epoch": 0.8719041946203524, + "grad_norm": 3.6693849563598633, + "learning_rate": 8.483739274689329e-07, + "loss": 0.8912, + "step": 13906 + }, + { + "epoch": 0.8719668944761427, + "grad_norm": 3.4041190147399902, + "learning_rate": 8.475555457696127e-07, + "loss": 1.0202, + "step": 13907 + }, + { + "epoch": 0.8720295943319331, + "grad_norm": 3.278120517730713, + "learning_rate": 8.467375415175027e-07, + "loss": 0.9605, + "step": 13908 + }, + { + "epoch": 0.8720922941877234, + "grad_norm": 3.4380135536193848, + "learning_rate": 8.459199147463371e-07, + "loss": 1.0572, + "step": 13909 + }, + { + "epoch": 0.8721549940435137, + "grad_norm": 3.401038408279419, + "learning_rate": 8.451026654898331e-07, + "loss": 1.0482, + "step": 13910 + }, + { + "epoch": 0.872217693899304, + "grad_norm": 3.185570478439331, + "learning_rate": 8.442857937816939e-07, + "loss": 1.1567, + "step": 13911 + }, + { + "epoch": 0.8722803937550944, + "grad_norm": 3.1346213817596436, + "learning_rate": 8.434692996556093e-07, + "loss": 1.1865, + "step": 13912 + }, + { + "epoch": 0.8723430936108847, + "grad_norm": 3.415062665939331, + "learning_rate": 8.426531831452511e-07, + "loss": 1.0512, + "step": 13913 + }, + { + "epoch": 0.872405793466675, + "grad_norm": 3.074395179748535, + "learning_rate": 8.418374442842758e-07, + "loss": 1.1427, + "step": 13914 + }, + { + "epoch": 0.8724684933224653, + "grad_norm": 3.5173349380493164, + "learning_rate": 8.41022083106322e-07, + "loss": 0.8947, + "step": 13915 + }, + { + "epoch": 0.8725311931782557, + "grad_norm": 3.194868564605713, + "learning_rate": 8.402070996450195e-07, + "loss": 1.0219, + "step": 13916 + }, + { + "epoch": 0.872593893034046, + "grad_norm": 3.427485466003418, + "learning_rate": 8.39392493933976e-07, + "loss": 1.1521, + "step": 13917 + }, + { + "epoch": 0.8726565928898363, + "grad_norm": 3.2627179622650146, + "learning_rate": 8.385782660067842e-07, + "loss": 0.9323, + "step": 13918 + }, + { + "epoch": 0.8727192927456267, + "grad_norm": 3.164755344390869, + "learning_rate": 8.377644158970277e-07, + "loss": 1.0961, + "step": 13919 + }, + { + "epoch": 0.872781992601417, + "grad_norm": 3.113938331604004, + "learning_rate": 8.369509436382661e-07, + "loss": 1.1079, + "step": 13920 + }, + { + "epoch": 0.8728446924572073, + "grad_norm": 3.3986315727233887, + "learning_rate": 8.361378492640471e-07, + "loss": 1.0152, + "step": 13921 + }, + { + "epoch": 0.8729073923129976, + "grad_norm": 3.046783447265625, + "learning_rate": 8.353251328079026e-07, + "loss": 1.1014, + "step": 13922 + }, + { + "epoch": 0.872970092168788, + "grad_norm": 3.5158183574676514, + "learning_rate": 8.345127943033504e-07, + "loss": 1.0004, + "step": 13923 + }, + { + "epoch": 0.8730327920245784, + "grad_norm": 3.672125816345215, + "learning_rate": 8.337008337838914e-07, + "loss": 1.0889, + "step": 13924 + }, + { + "epoch": 0.8730954918803687, + "grad_norm": 3.1256542205810547, + "learning_rate": 8.328892512830111e-07, + "loss": 1.0633, + "step": 13925 + }, + { + "epoch": 0.873158191736159, + "grad_norm": 3.9390525817871094, + "learning_rate": 8.320780468341761e-07, + "loss": 0.9895, + "step": 13926 + }, + { + "epoch": 0.8732208915919494, + "grad_norm": 3.2525153160095215, + "learning_rate": 8.312672204708439e-07, + "loss": 1.1156, + "step": 13927 + }, + { + "epoch": 0.8732835914477397, + "grad_norm": 3.682539701461792, + "learning_rate": 8.304567722264523e-07, + "loss": 1.1981, + "step": 13928 + }, + { + "epoch": 0.87334629130353, + "grad_norm": 3.4450011253356934, + "learning_rate": 8.296467021344223e-07, + "loss": 1.0729, + "step": 13929 + }, + { + "epoch": 0.8734089911593204, + "grad_norm": 3.728283643722534, + "learning_rate": 8.288370102281651e-07, + "loss": 1.0872, + "step": 13930 + }, + { + "epoch": 0.8734716910151107, + "grad_norm": 3.4750216007232666, + "learning_rate": 8.280276965410694e-07, + "loss": 1.0253, + "step": 13931 + }, + { + "epoch": 0.873534390870901, + "grad_norm": 3.34928560256958, + "learning_rate": 8.272187611065119e-07, + "loss": 1.1512, + "step": 13932 + }, + { + "epoch": 0.8735970907266913, + "grad_norm": 3.6128084659576416, + "learning_rate": 8.264102039578526e-07, + "loss": 1.0174, + "step": 13933 + }, + { + "epoch": 0.8736597905824817, + "grad_norm": 3.773442506790161, + "learning_rate": 8.256020251284381e-07, + "loss": 1.049, + "step": 13934 + }, + { + "epoch": 0.873722490438272, + "grad_norm": 3.3653531074523926, + "learning_rate": 8.247942246515961e-07, + "loss": 1.0102, + "step": 13935 + }, + { + "epoch": 0.8737851902940623, + "grad_norm": 3.2034287452697754, + "learning_rate": 8.239868025606402e-07, + "loss": 1.0798, + "step": 13936 + }, + { + "epoch": 0.8738478901498526, + "grad_norm": 3.363245725631714, + "learning_rate": 8.231797588888701e-07, + "loss": 1.0608, + "step": 13937 + }, + { + "epoch": 0.873910590005643, + "grad_norm": 3.404106616973877, + "learning_rate": 8.223730936695684e-07, + "loss": 1.0525, + "step": 13938 + }, + { + "epoch": 0.8739732898614333, + "grad_norm": 3.375317096710205, + "learning_rate": 8.215668069360006e-07, + "loss": 1.1658, + "step": 13939 + }, + { + "epoch": 0.8740359897172236, + "grad_norm": 3.5000758171081543, + "learning_rate": 8.207608987214167e-07, + "loss": 0.9692, + "step": 13940 + }, + { + "epoch": 0.874098689573014, + "grad_norm": 3.7190101146698, + "learning_rate": 8.199553690590556e-07, + "loss": 1.1697, + "step": 13941 + }, + { + "epoch": 0.8741613894288043, + "grad_norm": 2.8829407691955566, + "learning_rate": 8.191502179821365e-07, + "loss": 1.1855, + "step": 13942 + }, + { + "epoch": 0.8742240892845946, + "grad_norm": 3.460610866546631, + "learning_rate": 8.183454455238638e-07, + "loss": 1.1377, + "step": 13943 + }, + { + "epoch": 0.8742867891403849, + "grad_norm": 3.2912819385528564, + "learning_rate": 8.175410517174232e-07, + "loss": 1.2345, + "step": 13944 + }, + { + "epoch": 0.8743494889961753, + "grad_norm": 3.5619008541107178, + "learning_rate": 8.167370365959925e-07, + "loss": 0.9957, + "step": 13945 + }, + { + "epoch": 0.8744121888519656, + "grad_norm": 3.0744235515594482, + "learning_rate": 8.159334001927288e-07, + "loss": 1.1811, + "step": 13946 + }, + { + "epoch": 0.874474888707756, + "grad_norm": 3.595069646835327, + "learning_rate": 8.151301425407699e-07, + "loss": 1.097, + "step": 13947 + }, + { + "epoch": 0.8745375885635464, + "grad_norm": 3.161043405532837, + "learning_rate": 8.143272636732469e-07, + "loss": 1.1243, + "step": 13948 + }, + { + "epoch": 0.8746002884193367, + "grad_norm": 3.3601737022399902, + "learning_rate": 8.13524763623268e-07, + "loss": 1.0632, + "step": 13949 + }, + { + "epoch": 0.874662988275127, + "grad_norm": 3.224886655807495, + "learning_rate": 8.127226424239299e-07, + "loss": 1.1212, + "step": 13950 + }, + { + "epoch": 0.8747256881309173, + "grad_norm": 3.5393612384796143, + "learning_rate": 8.119209001083095e-07, + "loss": 1.0026, + "step": 13951 + }, + { + "epoch": 0.8747883879867077, + "grad_norm": 3.1653239727020264, + "learning_rate": 8.111195367094737e-07, + "loss": 0.9855, + "step": 13952 + }, + { + "epoch": 0.874851087842498, + "grad_norm": 3.362086534500122, + "learning_rate": 8.103185522604695e-07, + "loss": 0.9811, + "step": 13953 + }, + { + "epoch": 0.8749137876982883, + "grad_norm": 3.3362600803375244, + "learning_rate": 8.095179467943293e-07, + "loss": 0.9105, + "step": 13954 + }, + { + "epoch": 0.8749764875540786, + "grad_norm": 3.2388417720794678, + "learning_rate": 8.0871772034407e-07, + "loss": 1.134, + "step": 13955 + }, + { + "epoch": 0.875039187409869, + "grad_norm": 3.3301992416381836, + "learning_rate": 8.079178729426939e-07, + "loss": 0.9871, + "step": 13956 + }, + { + "epoch": 0.8751018872656593, + "grad_norm": 3.427062749862671, + "learning_rate": 8.071184046231861e-07, + "loss": 1.1317, + "step": 13957 + }, + { + "epoch": 0.8751645871214496, + "grad_norm": 3.495429277420044, + "learning_rate": 8.063193154185145e-07, + "loss": 1.2023, + "step": 13958 + }, + { + "epoch": 0.87522728697724, + "grad_norm": 3.5866801738739014, + "learning_rate": 8.055206053616382e-07, + "loss": 1.0687, + "step": 13959 + }, + { + "epoch": 0.8752899868330303, + "grad_norm": 3.5887253284454346, + "learning_rate": 8.047222744854943e-07, + "loss": 1.1059, + "step": 13960 + }, + { + "epoch": 0.8753526866888206, + "grad_norm": 3.2925305366516113, + "learning_rate": 8.039243228230043e-07, + "loss": 1.055, + "step": 13961 + }, + { + "epoch": 0.8754153865446109, + "grad_norm": 3.467461347579956, + "learning_rate": 8.031267504070761e-07, + "loss": 1.0856, + "step": 13962 + }, + { + "epoch": 0.8754780864004013, + "grad_norm": 3.584723949432373, + "learning_rate": 8.023295572706036e-07, + "loss": 1.1573, + "step": 13963 + }, + { + "epoch": 0.8755407862561916, + "grad_norm": 3.776484489440918, + "learning_rate": 8.015327434464615e-07, + "loss": 0.9973, + "step": 13964 + }, + { + "epoch": 0.8756034861119819, + "grad_norm": 4.010407447814941, + "learning_rate": 8.007363089675113e-07, + "loss": 1.0475, + "step": 13965 + }, + { + "epoch": 0.8756661859677722, + "grad_norm": 3.4031569957733154, + "learning_rate": 7.999402538665946e-07, + "loss": 1.1786, + "step": 13966 + }, + { + "epoch": 0.8757288858235626, + "grad_norm": 3.090432643890381, + "learning_rate": 7.991445781765462e-07, + "loss": 1.183, + "step": 13967 + }, + { + "epoch": 0.8757915856793529, + "grad_norm": 3.393240451812744, + "learning_rate": 7.983492819301775e-07, + "loss": 1.0379, + "step": 13968 + }, + { + "epoch": 0.8758542855351432, + "grad_norm": 3.572721242904663, + "learning_rate": 7.975543651602846e-07, + "loss": 1.0146, + "step": 13969 + }, + { + "epoch": 0.8759169853909337, + "grad_norm": 3.8503143787384033, + "learning_rate": 7.967598278996524e-07, + "loss": 0.9578, + "step": 13970 + }, + { + "epoch": 0.875979685246724, + "grad_norm": 3.4473659992218018, + "learning_rate": 7.95965670181047e-07, + "loss": 1.0014, + "step": 13971 + }, + { + "epoch": 0.8760423851025143, + "grad_norm": 3.491276741027832, + "learning_rate": 7.951718920372198e-07, + "loss": 1.0229, + "step": 13972 + }, + { + "epoch": 0.8761050849583046, + "grad_norm": 2.90401029586792, + "learning_rate": 7.943784935009036e-07, + "loss": 1.2488, + "step": 13973 + }, + { + "epoch": 0.876167784814095, + "grad_norm": 3.446052312850952, + "learning_rate": 7.935854746048221e-07, + "loss": 1.0781, + "step": 13974 + }, + { + "epoch": 0.8762304846698853, + "grad_norm": 3.648751974105835, + "learning_rate": 7.927928353816782e-07, + "loss": 0.9535, + "step": 13975 + }, + { + "epoch": 0.8762931845256756, + "grad_norm": 3.6753029823303223, + "learning_rate": 7.920005758641592e-07, + "loss": 1.1004, + "step": 13976 + }, + { + "epoch": 0.8763558843814659, + "grad_norm": 3.6228818893432617, + "learning_rate": 7.912086960849374e-07, + "loss": 1.1886, + "step": 13977 + }, + { + "epoch": 0.8764185842372563, + "grad_norm": 3.513486623764038, + "learning_rate": 7.904171960766727e-07, + "loss": 1.1916, + "step": 13978 + }, + { + "epoch": 0.8764812840930466, + "grad_norm": 3.2103769779205322, + "learning_rate": 7.896260758720043e-07, + "loss": 1.0358, + "step": 13979 + }, + { + "epoch": 0.8765439839488369, + "grad_norm": 3.6911561489105225, + "learning_rate": 7.888353355035583e-07, + "loss": 1.1116, + "step": 13980 + }, + { + "epoch": 0.8766066838046273, + "grad_norm": 3.6800878047943115, + "learning_rate": 7.880449750039454e-07, + "loss": 1.1332, + "step": 13981 + }, + { + "epoch": 0.8766693836604176, + "grad_norm": 3.294987916946411, + "learning_rate": 7.872549944057617e-07, + "loss": 1.1341, + "step": 13982 + }, + { + "epoch": 0.8767320835162079, + "grad_norm": 3.814612865447998, + "learning_rate": 7.864653937415833e-07, + "loss": 0.9721, + "step": 13983 + }, + { + "epoch": 0.8767947833719982, + "grad_norm": 3.3049495220184326, + "learning_rate": 7.856761730439732e-07, + "loss": 1.1646, + "step": 13984 + }, + { + "epoch": 0.8768574832277886, + "grad_norm": 3.2336697578430176, + "learning_rate": 7.848873323454809e-07, + "loss": 1.2887, + "step": 13985 + }, + { + "epoch": 0.8769201830835789, + "grad_norm": 3.4175868034362793, + "learning_rate": 7.840988716786391e-07, + "loss": 1.0156, + "step": 13986 + }, + { + "epoch": 0.8769828829393692, + "grad_norm": 3.8022727966308594, + "learning_rate": 7.833107910759608e-07, + "loss": 1.0186, + "step": 13987 + }, + { + "epoch": 0.8770455827951595, + "grad_norm": 3.410735607147217, + "learning_rate": 7.825230905699466e-07, + "loss": 1.0592, + "step": 13988 + }, + { + "epoch": 0.8771082826509499, + "grad_norm": 3.193695306777954, + "learning_rate": 7.81735770193085e-07, + "loss": 1.0449, + "step": 13989 + }, + { + "epoch": 0.8771709825067402, + "grad_norm": 2.79311466217041, + "learning_rate": 7.809488299778423e-07, + "loss": 1.1037, + "step": 13990 + }, + { + "epoch": 0.8772336823625305, + "grad_norm": 3.2290971279144287, + "learning_rate": 7.801622699566713e-07, + "loss": 1.1164, + "step": 13991 + }, + { + "epoch": 0.8772963822183208, + "grad_norm": 3.2547824382781982, + "learning_rate": 7.793760901620118e-07, + "loss": 1.0535, + "step": 13992 + }, + { + "epoch": 0.8773590820741113, + "grad_norm": 3.56498646736145, + "learning_rate": 7.785902906262854e-07, + "loss": 1.1475, + "step": 13993 + }, + { + "epoch": 0.8774217819299016, + "grad_norm": 3.2698476314544678, + "learning_rate": 7.778048713818975e-07, + "loss": 1.119, + "step": 13994 + }, + { + "epoch": 0.8774844817856919, + "grad_norm": 3.438385248184204, + "learning_rate": 7.770198324612387e-07, + "loss": 1.2558, + "step": 13995 + }, + { + "epoch": 0.8775471816414823, + "grad_norm": 3.449312448501587, + "learning_rate": 7.762351738966866e-07, + "loss": 0.9757, + "step": 13996 + }, + { + "epoch": 0.8776098814972726, + "grad_norm": 3.2141683101654053, + "learning_rate": 7.754508957205975e-07, + "loss": 0.9642, + "step": 13997 + }, + { + "epoch": 0.8776725813530629, + "grad_norm": 3.1714324951171875, + "learning_rate": 7.746669979653154e-07, + "loss": 1.1339, + "step": 13998 + }, + { + "epoch": 0.8777352812088532, + "grad_norm": 3.4874191284179688, + "learning_rate": 7.738834806631712e-07, + "loss": 1.1298, + "step": 13999 + }, + { + "epoch": 0.8777979810646436, + "grad_norm": 4.054221153259277, + "learning_rate": 7.731003438464746e-07, + "loss": 0.9562, + "step": 14000 + }, + { + "epoch": 0.8777979810646436, + "eval_loss": 1.09546959400177, + "eval_runtime": 144.1215, + "eval_samples_per_second": 4.371, + "eval_steps_per_second": 1.096, + "step": 14000 + }, + { + "epoch": 0.8778606809204339, + "grad_norm": 3.351203441619873, + "learning_rate": 7.723175875475231e-07, + "loss": 1.0972, + "step": 14001 + }, + { + "epoch": 0.8779233807762242, + "grad_norm": 3.4316887855529785, + "learning_rate": 7.715352117985952e-07, + "loss": 1.1948, + "step": 14002 + }, + { + "epoch": 0.8779860806320146, + "grad_norm": 3.5190608501434326, + "learning_rate": 7.707532166319598e-07, + "loss": 0.9771, + "step": 14003 + }, + { + "epoch": 0.8780487804878049, + "grad_norm": 2.93854022026062, + "learning_rate": 7.699716020798653e-07, + "loss": 1.0579, + "step": 14004 + }, + { + "epoch": 0.8781114803435952, + "grad_norm": 3.377786874771118, + "learning_rate": 7.69190368174545e-07, + "loss": 0.9573, + "step": 14005 + }, + { + "epoch": 0.8781741801993855, + "grad_norm": 3.717362880706787, + "learning_rate": 7.684095149482152e-07, + "loss": 1.0589, + "step": 14006 + }, + { + "epoch": 0.8782368800551759, + "grad_norm": 3.6537792682647705, + "learning_rate": 7.676290424330812e-07, + "loss": 1.0282, + "step": 14007 + }, + { + "epoch": 0.8782995799109662, + "grad_norm": 3.413566827774048, + "learning_rate": 7.668489506613297e-07, + "loss": 1.1677, + "step": 14008 + }, + { + "epoch": 0.8783622797667565, + "grad_norm": 3.954007148742676, + "learning_rate": 7.660692396651281e-07, + "loss": 0.9818, + "step": 14009 + }, + { + "epoch": 0.8784249796225468, + "grad_norm": 3.212070941925049, + "learning_rate": 7.652899094766364e-07, + "loss": 1.0411, + "step": 14010 + }, + { + "epoch": 0.8784876794783372, + "grad_norm": 3.3464369773864746, + "learning_rate": 7.645109601279921e-07, + "loss": 1.101, + "step": 14011 + }, + { + "epoch": 0.8785503793341275, + "grad_norm": 3.678800344467163, + "learning_rate": 7.637323916513195e-07, + "loss": 0.7731, + "step": 14012 + }, + { + "epoch": 0.8786130791899178, + "grad_norm": 3.7878849506378174, + "learning_rate": 7.629542040787241e-07, + "loss": 1.0046, + "step": 14013 + }, + { + "epoch": 0.8786757790457081, + "grad_norm": 3.469820737838745, + "learning_rate": 7.621763974423024e-07, + "loss": 1.0052, + "step": 14014 + }, + { + "epoch": 0.8787384789014985, + "grad_norm": 3.839262008666992, + "learning_rate": 7.613989717741299e-07, + "loss": 1.0879, + "step": 14015 + }, + { + "epoch": 0.8788011787572888, + "grad_norm": 3.525202989578247, + "learning_rate": 7.606219271062676e-07, + "loss": 1.0637, + "step": 14016 + }, + { + "epoch": 0.8788638786130792, + "grad_norm": 3.24947452545166, + "learning_rate": 7.598452634707576e-07, + "loss": 1.1267, + "step": 14017 + }, + { + "epoch": 0.8789265784688696, + "grad_norm": 3.2014596462249756, + "learning_rate": 7.590689808996355e-07, + "loss": 1.21, + "step": 14018 + }, + { + "epoch": 0.8789892783246599, + "grad_norm": 3.65391206741333, + "learning_rate": 7.58293079424911e-07, + "loss": 1.0568, + "step": 14019 + }, + { + "epoch": 0.8790519781804502, + "grad_norm": 3.473317861557007, + "learning_rate": 7.575175590785821e-07, + "loss": 0.9157, + "step": 14020 + }, + { + "epoch": 0.8791146780362405, + "grad_norm": 3.2659668922424316, + "learning_rate": 7.567424198926354e-07, + "loss": 1.0673, + "step": 14021 + }, + { + "epoch": 0.8791773778920309, + "grad_norm": 3.246436357498169, + "learning_rate": 7.559676618990342e-07, + "loss": 1.0617, + "step": 14022 + }, + { + "epoch": 0.8792400777478212, + "grad_norm": 3.146374464035034, + "learning_rate": 7.551932851297306e-07, + "loss": 1.0247, + "step": 14023 + }, + { + "epoch": 0.8793027776036115, + "grad_norm": 3.781355619430542, + "learning_rate": 7.544192896166569e-07, + "loss": 1.1254, + "step": 14024 + }, + { + "epoch": 0.8793654774594019, + "grad_norm": 3.5576331615448, + "learning_rate": 7.536456753917377e-07, + "loss": 0.9777, + "step": 14025 + }, + { + "epoch": 0.8794281773151922, + "grad_norm": 3.24733304977417, + "learning_rate": 7.52872442486875e-07, + "loss": 1.0586, + "step": 14026 + }, + { + "epoch": 0.8794908771709825, + "grad_norm": 3.3892457485198975, + "learning_rate": 7.520995909339568e-07, + "loss": 1.024, + "step": 14027 + }, + { + "epoch": 0.8795535770267728, + "grad_norm": 3.3724327087402344, + "learning_rate": 7.513271207648531e-07, + "loss": 0.977, + "step": 14028 + }, + { + "epoch": 0.8796162768825632, + "grad_norm": 3.240417957305908, + "learning_rate": 7.505550320114241e-07, + "loss": 1.119, + "step": 14029 + }, + { + "epoch": 0.8796789767383535, + "grad_norm": 3.211465835571289, + "learning_rate": 7.497833247055109e-07, + "loss": 0.9885, + "step": 14030 + }, + { + "epoch": 0.8797416765941438, + "grad_norm": 3.339686632156372, + "learning_rate": 7.490119988789346e-07, + "loss": 1.1065, + "step": 14031 + }, + { + "epoch": 0.8798043764499341, + "grad_norm": 3.1879758834838867, + "learning_rate": 7.482410545635088e-07, + "loss": 1.0766, + "step": 14032 + }, + { + "epoch": 0.8798670763057245, + "grad_norm": 3.148467540740967, + "learning_rate": 7.47470491791027e-07, + "loss": 1.195, + "step": 14033 + }, + { + "epoch": 0.8799297761615148, + "grad_norm": 3.471038579940796, + "learning_rate": 7.467003105932657e-07, + "loss": 0.9627, + "step": 14034 + }, + { + "epoch": 0.8799924760173051, + "grad_norm": 3.6493184566497803, + "learning_rate": 7.459305110019854e-07, + "loss": 1.0607, + "step": 14035 + }, + { + "epoch": 0.8800551758730955, + "grad_norm": 3.5367813110351562, + "learning_rate": 7.451610930489372e-07, + "loss": 1.1007, + "step": 14036 + }, + { + "epoch": 0.8801178757288858, + "grad_norm": 3.404273509979248, + "learning_rate": 7.443920567658502e-07, + "loss": 0.9626, + "step": 14037 + }, + { + "epoch": 0.8801805755846761, + "grad_norm": 3.2434635162353516, + "learning_rate": 7.43623402184438e-07, + "loss": 0.933, + "step": 14038 + }, + { + "epoch": 0.8802432754404664, + "grad_norm": 3.32944917678833, + "learning_rate": 7.428551293364006e-07, + "loss": 1.1732, + "step": 14039 + }, + { + "epoch": 0.8803059752962569, + "grad_norm": 3.795894145965576, + "learning_rate": 7.420872382534228e-07, + "loss": 1.0961, + "step": 14040 + }, + { + "epoch": 0.8803686751520472, + "grad_norm": 3.244513750076294, + "learning_rate": 7.413197289671714e-07, + "loss": 0.9224, + "step": 14041 + }, + { + "epoch": 0.8804313750078375, + "grad_norm": 3.97102689743042, + "learning_rate": 7.405526015092967e-07, + "loss": 1.1535, + "step": 14042 + }, + { + "epoch": 0.8804940748636279, + "grad_norm": 3.3528928756713867, + "learning_rate": 7.39785855911439e-07, + "loss": 1.2023, + "step": 14043 + }, + { + "epoch": 0.8805567747194182, + "grad_norm": 3.318756341934204, + "learning_rate": 7.390194922052174e-07, + "loss": 1.089, + "step": 14044 + }, + { + "epoch": 0.8806194745752085, + "grad_norm": 3.1453442573547363, + "learning_rate": 7.382535104222366e-07, + "loss": 1.22, + "step": 14045 + }, + { + "epoch": 0.8806821744309988, + "grad_norm": 3.2322182655334473, + "learning_rate": 7.374879105940824e-07, + "loss": 1.0674, + "step": 14046 + }, + { + "epoch": 0.8807448742867892, + "grad_norm": 3.561971426010132, + "learning_rate": 7.36722692752334e-07, + "loss": 1.008, + "step": 14047 + }, + { + "epoch": 0.8808075741425795, + "grad_norm": 2.995270252227783, + "learning_rate": 7.359578569285464e-07, + "loss": 1.1511, + "step": 14048 + }, + { + "epoch": 0.8808702739983698, + "grad_norm": 3.478684663772583, + "learning_rate": 7.351934031542596e-07, + "loss": 1.1357, + "step": 14049 + }, + { + "epoch": 0.8809329738541601, + "grad_norm": 3.029174327850342, + "learning_rate": 7.34429331461003e-07, + "loss": 1.0383, + "step": 14050 + }, + { + "epoch": 0.8809956737099505, + "grad_norm": 3.513363838195801, + "learning_rate": 7.336656418802857e-07, + "loss": 1.1664, + "step": 14051 + }, + { + "epoch": 0.8810583735657408, + "grad_norm": 3.8916006088256836, + "learning_rate": 7.329023344436015e-07, + "loss": 0.9229, + "step": 14052 + }, + { + "epoch": 0.8811210734215311, + "grad_norm": 3.863922595977783, + "learning_rate": 7.321394091824285e-07, + "loss": 0.9613, + "step": 14053 + }, + { + "epoch": 0.8811837732773214, + "grad_norm": 3.277489185333252, + "learning_rate": 7.313768661282316e-07, + "loss": 1.045, + "step": 14054 + }, + { + "epoch": 0.8812464731331118, + "grad_norm": 3.336683750152588, + "learning_rate": 7.30614705312459e-07, + "loss": 1.13, + "step": 14055 + }, + { + "epoch": 0.8813091729889021, + "grad_norm": 3.2457997798919678, + "learning_rate": 7.2985292676654e-07, + "loss": 1.0075, + "step": 14056 + }, + { + "epoch": 0.8813718728446924, + "grad_norm": 3.0898072719573975, + "learning_rate": 7.290915305218893e-07, + "loss": 1.2722, + "step": 14057 + }, + { + "epoch": 0.8814345727004828, + "grad_norm": 3.537574529647827, + "learning_rate": 7.283305166099109e-07, + "loss": 0.9015, + "step": 14058 + }, + { + "epoch": 0.8814972725562731, + "grad_norm": 3.2492542266845703, + "learning_rate": 7.275698850619861e-07, + "loss": 1.0157, + "step": 14059 + }, + { + "epoch": 0.8815599724120634, + "grad_norm": 3.666611671447754, + "learning_rate": 7.268096359094834e-07, + "loss": 1.0623, + "step": 14060 + }, + { + "epoch": 0.8816226722678537, + "grad_norm": 3.29249906539917, + "learning_rate": 7.260497691837576e-07, + "loss": 1.1273, + "step": 14061 + }, + { + "epoch": 0.8816853721236441, + "grad_norm": 3.710005283355713, + "learning_rate": 7.252902849161436e-07, + "loss": 1.2424, + "step": 14062 + }, + { + "epoch": 0.8817480719794345, + "grad_norm": 3.4389209747314453, + "learning_rate": 7.245311831379642e-07, + "loss": 1.0208, + "step": 14063 + }, + { + "epoch": 0.8818107718352248, + "grad_norm": 3.599757194519043, + "learning_rate": 7.237724638805221e-07, + "loss": 1.1552, + "step": 14064 + }, + { + "epoch": 0.8818734716910152, + "grad_norm": 3.574998378753662, + "learning_rate": 7.230141271751101e-07, + "loss": 1.075, + "step": 14065 + }, + { + "epoch": 0.8819361715468055, + "grad_norm": 3.7306230068206787, + "learning_rate": 7.222561730530009e-07, + "loss": 0.9271, + "step": 14066 + }, + { + "epoch": 0.8819988714025958, + "grad_norm": 3.7981088161468506, + "learning_rate": 7.214986015454517e-07, + "loss": 1.0461, + "step": 14067 + }, + { + "epoch": 0.8820615712583861, + "grad_norm": 3.687279224395752, + "learning_rate": 7.207414126837043e-07, + "loss": 1.1457, + "step": 14068 + }, + { + "epoch": 0.8821242711141765, + "grad_norm": 3.5344958305358887, + "learning_rate": 7.199846064989879e-07, + "loss": 0.8876, + "step": 14069 + }, + { + "epoch": 0.8821869709699668, + "grad_norm": 3.501824140548706, + "learning_rate": 7.192281830225112e-07, + "loss": 1.0706, + "step": 14070 + }, + { + "epoch": 0.8822496708257571, + "grad_norm": 3.749281644821167, + "learning_rate": 7.184721422854679e-07, + "loss": 1.163, + "step": 14071 + }, + { + "epoch": 0.8823123706815474, + "grad_norm": 3.604832649230957, + "learning_rate": 7.17716484319041e-07, + "loss": 1.0415, + "step": 14072 + }, + { + "epoch": 0.8823750705373378, + "grad_norm": 3.5785176753997803, + "learning_rate": 7.169612091543909e-07, + "loss": 1.0955, + "step": 14073 + }, + { + "epoch": 0.8824377703931281, + "grad_norm": 3.277388572692871, + "learning_rate": 7.162063168226663e-07, + "loss": 1.0779, + "step": 14074 + }, + { + "epoch": 0.8825004702489184, + "grad_norm": 3.980748176574707, + "learning_rate": 7.154518073549965e-07, + "loss": 1.1456, + "step": 14075 + }, + { + "epoch": 0.8825631701047087, + "grad_norm": 3.655493974685669, + "learning_rate": 7.146976807825023e-07, + "loss": 0.9304, + "step": 14076 + }, + { + "epoch": 0.8826258699604991, + "grad_norm": 3.760026216506958, + "learning_rate": 7.13943937136281e-07, + "loss": 0.9436, + "step": 14077 + }, + { + "epoch": 0.8826885698162894, + "grad_norm": 3.2398486137390137, + "learning_rate": 7.131905764474167e-07, + "loss": 1.0746, + "step": 14078 + }, + { + "epoch": 0.8827512696720797, + "grad_norm": 3.2267978191375732, + "learning_rate": 7.124375987469767e-07, + "loss": 1.2119, + "step": 14079 + }, + { + "epoch": 0.8828139695278701, + "grad_norm": 3.700634002685547, + "learning_rate": 7.116850040660184e-07, + "loss": 0.8859, + "step": 14080 + }, + { + "epoch": 0.8828766693836604, + "grad_norm": 3.2686476707458496, + "learning_rate": 7.109327924355747e-07, + "loss": 1.1231, + "step": 14081 + }, + { + "epoch": 0.8829393692394507, + "grad_norm": 3.0748114585876465, + "learning_rate": 7.101809638866675e-07, + "loss": 1.0806, + "step": 14082 + }, + { + "epoch": 0.883002069095241, + "grad_norm": 3.2556545734405518, + "learning_rate": 7.094295184503052e-07, + "loss": 0.9426, + "step": 14083 + }, + { + "epoch": 0.8830647689510314, + "grad_norm": 3.5625274181365967, + "learning_rate": 7.086784561574744e-07, + "loss": 1.1385, + "step": 14084 + }, + { + "epoch": 0.8831274688068217, + "grad_norm": 3.501089096069336, + "learning_rate": 7.079277770391502e-07, + "loss": 1.1932, + "step": 14085 + }, + { + "epoch": 0.8831901686626121, + "grad_norm": 3.276334762573242, + "learning_rate": 7.071774811262888e-07, + "loss": 1.0174, + "step": 14086 + }, + { + "epoch": 0.8832528685184025, + "grad_norm": 3.4149649143218994, + "learning_rate": 7.064275684498357e-07, + "loss": 1.1227, + "step": 14087 + }, + { + "epoch": 0.8833155683741928, + "grad_norm": 3.7002124786376953, + "learning_rate": 7.056780390407148e-07, + "loss": 1.005, + "step": 14088 + }, + { + "epoch": 0.8833782682299831, + "grad_norm": 3.4505176544189453, + "learning_rate": 7.049288929298382e-07, + "loss": 1.0701, + "step": 14089 + }, + { + "epoch": 0.8834409680857734, + "grad_norm": 3.4667060375213623, + "learning_rate": 7.041801301480977e-07, + "loss": 1.1163, + "step": 14090 + }, + { + "epoch": 0.8835036679415638, + "grad_norm": 3.3110334873199463, + "learning_rate": 7.034317507263766e-07, + "loss": 1.0999, + "step": 14091 + }, + { + "epoch": 0.8835663677973541, + "grad_norm": 3.346245050430298, + "learning_rate": 7.026837546955356e-07, + "loss": 0.9712, + "step": 14092 + }, + { + "epoch": 0.8836290676531444, + "grad_norm": 3.3196113109588623, + "learning_rate": 7.019361420864212e-07, + "loss": 1.0685, + "step": 14093 + }, + { + "epoch": 0.8836917675089347, + "grad_norm": 3.5975029468536377, + "learning_rate": 7.011889129298688e-07, + "loss": 0.9226, + "step": 14094 + }, + { + "epoch": 0.8837544673647251, + "grad_norm": 3.5768887996673584, + "learning_rate": 7.004420672566903e-07, + "loss": 1.0329, + "step": 14095 + }, + { + "epoch": 0.8838171672205154, + "grad_norm": 3.5084309577941895, + "learning_rate": 6.996956050976878e-07, + "loss": 1.0094, + "step": 14096 + }, + { + "epoch": 0.8838798670763057, + "grad_norm": 3.3599002361297607, + "learning_rate": 6.989495264836432e-07, + "loss": 1.1891, + "step": 14097 + }, + { + "epoch": 0.883942566932096, + "grad_norm": 3.466240406036377, + "learning_rate": 6.982038314453276e-07, + "loss": 1.1243, + "step": 14098 + }, + { + "epoch": 0.8840052667878864, + "grad_norm": 3.765192747116089, + "learning_rate": 6.974585200134931e-07, + "loss": 0.9942, + "step": 14099 + }, + { + "epoch": 0.8840679666436767, + "grad_norm": 3.359623670578003, + "learning_rate": 6.967135922188728e-07, + "loss": 1.0143, + "step": 14100 + }, + { + "epoch": 0.884130666499467, + "grad_norm": 3.1705801486968994, + "learning_rate": 6.959690480921922e-07, + "loss": 1.181, + "step": 14101 + }, + { + "epoch": 0.8841933663552574, + "grad_norm": 3.309762477874756, + "learning_rate": 6.952248876641543e-07, + "loss": 1.0432, + "step": 14102 + }, + { + "epoch": 0.8842560662110477, + "grad_norm": 3.466883420944214, + "learning_rate": 6.944811109654481e-07, + "loss": 0.9341, + "step": 14103 + }, + { + "epoch": 0.884318766066838, + "grad_norm": 3.1299896240234375, + "learning_rate": 6.937377180267458e-07, + "loss": 1.1311, + "step": 14104 + }, + { + "epoch": 0.8843814659226283, + "grad_norm": 3.3739094734191895, + "learning_rate": 6.929947088787081e-07, + "loss": 1.2595, + "step": 14105 + }, + { + "epoch": 0.8844441657784187, + "grad_norm": 3.501330852508545, + "learning_rate": 6.922520835519752e-07, + "loss": 1.1014, + "step": 14106 + }, + { + "epoch": 0.884506865634209, + "grad_norm": 3.488982677459717, + "learning_rate": 6.915098420771726e-07, + "loss": 0.9894, + "step": 14107 + }, + { + "epoch": 0.8845695654899993, + "grad_norm": 3.613492727279663, + "learning_rate": 6.907679844849091e-07, + "loss": 1.1144, + "step": 14108 + }, + { + "epoch": 0.8846322653457898, + "grad_norm": 3.933838367462158, + "learning_rate": 6.900265108057824e-07, + "loss": 1.22, + "step": 14109 + }, + { + "epoch": 0.8846949652015801, + "grad_norm": 3.6207869052886963, + "learning_rate": 6.892854210703692e-07, + "loss": 1.053, + "step": 14110 + }, + { + "epoch": 0.8847576650573704, + "grad_norm": 3.4247922897338867, + "learning_rate": 6.885447153092295e-07, + "loss": 1.136, + "step": 14111 + }, + { + "epoch": 0.8848203649131607, + "grad_norm": 3.1704835891723633, + "learning_rate": 6.878043935529144e-07, + "loss": 1.1143, + "step": 14112 + }, + { + "epoch": 0.8848830647689511, + "grad_norm": 3.3404200077056885, + "learning_rate": 6.870644558319528e-07, + "loss": 1.0259, + "step": 14113 + }, + { + "epoch": 0.8849457646247414, + "grad_norm": 3.386275291442871, + "learning_rate": 6.863249021768592e-07, + "loss": 1.2065, + "step": 14114 + }, + { + "epoch": 0.8850084644805317, + "grad_norm": 3.445178747177124, + "learning_rate": 6.855857326181315e-07, + "loss": 1.0101, + "step": 14115 + }, + { + "epoch": 0.885071164336322, + "grad_norm": 3.3292441368103027, + "learning_rate": 6.848469471862573e-07, + "loss": 1.0994, + "step": 14116 + }, + { + "epoch": 0.8851338641921124, + "grad_norm": 3.305049419403076, + "learning_rate": 6.841085459117014e-07, + "loss": 1.0647, + "step": 14117 + }, + { + "epoch": 0.8851965640479027, + "grad_norm": 3.859511137008667, + "learning_rate": 6.83370528824916e-07, + "loss": 0.9072, + "step": 14118 + }, + { + "epoch": 0.885259263903693, + "grad_norm": 3.661438465118408, + "learning_rate": 6.826328959563355e-07, + "loss": 1.0636, + "step": 14119 + }, + { + "epoch": 0.8853219637594834, + "grad_norm": 3.2570247650146484, + "learning_rate": 6.818956473363825e-07, + "loss": 1.05, + "step": 14120 + }, + { + "epoch": 0.8853846636152737, + "grad_norm": 3.561429023742676, + "learning_rate": 6.811587829954602e-07, + "loss": 1.0299, + "step": 14121 + }, + { + "epoch": 0.885447363471064, + "grad_norm": 3.535022735595703, + "learning_rate": 6.804223029639557e-07, + "loss": 1.1151, + "step": 14122 + }, + { + "epoch": 0.8855100633268543, + "grad_norm": 3.2274580001831055, + "learning_rate": 6.796862072722432e-07, + "loss": 1.1302, + "step": 14123 + }, + { + "epoch": 0.8855727631826447, + "grad_norm": 3.6962435245513916, + "learning_rate": 6.789504959506787e-07, + "loss": 1.1472, + "step": 14124 + }, + { + "epoch": 0.885635463038435, + "grad_norm": 3.337860584259033, + "learning_rate": 6.782151690296024e-07, + "loss": 0.9917, + "step": 14125 + }, + { + "epoch": 0.8856981628942253, + "grad_norm": 3.288450002670288, + "learning_rate": 6.774802265393387e-07, + "loss": 1.1017, + "step": 14126 + }, + { + "epoch": 0.8857608627500156, + "grad_norm": 3.33681321144104, + "learning_rate": 6.767456685101981e-07, + "loss": 1.1884, + "step": 14127 + }, + { + "epoch": 0.885823562605806, + "grad_norm": 3.1988754272460938, + "learning_rate": 6.760114949724739e-07, + "loss": 0.9852, + "step": 14128 + }, + { + "epoch": 0.8858862624615963, + "grad_norm": 3.1812808513641357, + "learning_rate": 6.752777059564431e-07, + "loss": 1.1984, + "step": 14129 + }, + { + "epoch": 0.8859489623173866, + "grad_norm": 3.834160804748535, + "learning_rate": 6.745443014923658e-07, + "loss": 1.0762, + "step": 14130 + }, + { + "epoch": 0.886011662173177, + "grad_norm": 3.163933753967285, + "learning_rate": 6.738112816104891e-07, + "loss": 1.0099, + "step": 14131 + }, + { + "epoch": 0.8860743620289674, + "grad_norm": 3.235617160797119, + "learning_rate": 6.730786463410432e-07, + "loss": 1.0779, + "step": 14132 + }, + { + "epoch": 0.8861370618847577, + "grad_norm": 3.116311550140381, + "learning_rate": 6.723463957142395e-07, + "loss": 1.108, + "step": 14133 + }, + { + "epoch": 0.886199761740548, + "grad_norm": 3.3829190731048584, + "learning_rate": 6.716145297602794e-07, + "loss": 0.9815, + "step": 14134 + }, + { + "epoch": 0.8862624615963384, + "grad_norm": 3.179523468017578, + "learning_rate": 6.708830485093432e-07, + "loss": 1.0532, + "step": 14135 + }, + { + "epoch": 0.8863251614521287, + "grad_norm": 3.1987228393554688, + "learning_rate": 6.701519519915978e-07, + "loss": 1.1046, + "step": 14136 + }, + { + "epoch": 0.886387861307919, + "grad_norm": 3.669646978378296, + "learning_rate": 6.694212402371913e-07, + "loss": 1.1001, + "step": 14137 + }, + { + "epoch": 0.8864505611637093, + "grad_norm": 3.4260122776031494, + "learning_rate": 6.68690913276262e-07, + "loss": 1.1472, + "step": 14138 + }, + { + "epoch": 0.8865132610194997, + "grad_norm": 3.1892147064208984, + "learning_rate": 6.679609711389279e-07, + "loss": 1.1578, + "step": 14139 + }, + { + "epoch": 0.88657596087529, + "grad_norm": 3.4917283058166504, + "learning_rate": 6.672314138552894e-07, + "loss": 1.1042, + "step": 14140 + }, + { + "epoch": 0.8866386607310803, + "grad_norm": 3.747485637664795, + "learning_rate": 6.665022414554346e-07, + "loss": 1.0573, + "step": 14141 + }, + { + "epoch": 0.8867013605868707, + "grad_norm": 3.6695594787597656, + "learning_rate": 6.657734539694361e-07, + "loss": 0.964, + "step": 14142 + }, + { + "epoch": 0.886764060442661, + "grad_norm": 3.31561017036438, + "learning_rate": 6.650450514273476e-07, + "loss": 1.1142, + "step": 14143 + }, + { + "epoch": 0.8868267602984513, + "grad_norm": 3.4259488582611084, + "learning_rate": 6.643170338592076e-07, + "loss": 1.112, + "step": 14144 + }, + { + "epoch": 0.8868894601542416, + "grad_norm": 3.6306960582733154, + "learning_rate": 6.635894012950428e-07, + "loss": 0.9812, + "step": 14145 + }, + { + "epoch": 0.886952160010032, + "grad_norm": 3.308267831802368, + "learning_rate": 6.628621537648583e-07, + "loss": 1.1528, + "step": 14146 + }, + { + "epoch": 0.8870148598658223, + "grad_norm": 3.2039289474487305, + "learning_rate": 6.621352912986468e-07, + "loss": 1.1255, + "step": 14147 + }, + { + "epoch": 0.8870775597216126, + "grad_norm": 3.2904181480407715, + "learning_rate": 6.614088139263808e-07, + "loss": 1.1286, + "step": 14148 + }, + { + "epoch": 0.887140259577403, + "grad_norm": 3.459296941757202, + "learning_rate": 6.606827216780265e-07, + "loss": 1.2449, + "step": 14149 + }, + { + "epoch": 0.8872029594331933, + "grad_norm": 3.705528497695923, + "learning_rate": 6.599570145835233e-07, + "loss": 1.1191, + "step": 14150 + }, + { + "epoch": 0.8872656592889836, + "grad_norm": 3.3324484825134277, + "learning_rate": 6.592316926728015e-07, + "loss": 1.1944, + "step": 14151 + }, + { + "epoch": 0.8873283591447739, + "grad_norm": 3.342975616455078, + "learning_rate": 6.585067559757707e-07, + "loss": 0.9635, + "step": 14152 + }, + { + "epoch": 0.8873910590005643, + "grad_norm": 3.48911714553833, + "learning_rate": 6.577822045223303e-07, + "loss": 1.1049, + "step": 14153 + }, + { + "epoch": 0.8874537588563546, + "grad_norm": 2.9755287170410156, + "learning_rate": 6.570580383423608e-07, + "loss": 1.1111, + "step": 14154 + }, + { + "epoch": 0.8875164587121449, + "grad_norm": 3.3191943168640137, + "learning_rate": 6.563342574657228e-07, + "loss": 1.0501, + "step": 14155 + }, + { + "epoch": 0.8875791585679353, + "grad_norm": 3.3548407554626465, + "learning_rate": 6.556108619222712e-07, + "loss": 1.3028, + "step": 14156 + }, + { + "epoch": 0.8876418584237257, + "grad_norm": 3.588686227798462, + "learning_rate": 6.548878517418345e-07, + "loss": 0.9768, + "step": 14157 + }, + { + "epoch": 0.887704558279516, + "grad_norm": 3.664928674697876, + "learning_rate": 6.54165226954232e-07, + "loss": 1.0823, + "step": 14158 + }, + { + "epoch": 0.8877672581353063, + "grad_norm": 3.495864152908325, + "learning_rate": 6.534429875892612e-07, + "loss": 1.1879, + "step": 14159 + }, + { + "epoch": 0.8878299579910967, + "grad_norm": 3.7475969791412354, + "learning_rate": 6.527211336767114e-07, + "loss": 0.9814, + "step": 14160 + }, + { + "epoch": 0.887892657846887, + "grad_norm": 3.1338860988616943, + "learning_rate": 6.519996652463512e-07, + "loss": 0.9762, + "step": 14161 + }, + { + "epoch": 0.8879553577026773, + "grad_norm": 3.4683547019958496, + "learning_rate": 6.512785823279299e-07, + "loss": 1.0071, + "step": 14162 + }, + { + "epoch": 0.8880180575584676, + "grad_norm": 2.906459093093872, + "learning_rate": 6.505578849511907e-07, + "loss": 1.3279, + "step": 14163 + }, + { + "epoch": 0.888080757414258, + "grad_norm": 3.254157781600952, + "learning_rate": 6.498375731458529e-07, + "loss": 1.2052, + "step": 14164 + }, + { + "epoch": 0.8881434572700483, + "grad_norm": 3.3803842067718506, + "learning_rate": 6.491176469416216e-07, + "loss": 1.2056, + "step": 14165 + }, + { + "epoch": 0.8882061571258386, + "grad_norm": 3.211942672729492, + "learning_rate": 6.483981063681844e-07, + "loss": 1.0334, + "step": 14166 + }, + { + "epoch": 0.8882688569816289, + "grad_norm": 3.6667137145996094, + "learning_rate": 6.476789514552206e-07, + "loss": 1.1646, + "step": 14167 + }, + { + "epoch": 0.8883315568374193, + "grad_norm": 3.536526679992676, + "learning_rate": 6.469601822323845e-07, + "loss": 0.908, + "step": 14168 + }, + { + "epoch": 0.8883942566932096, + "grad_norm": 3.486635446548462, + "learning_rate": 6.46241798729319e-07, + "loss": 0.9881, + "step": 14169 + }, + { + "epoch": 0.8884569565489999, + "grad_norm": 3.3698041439056396, + "learning_rate": 6.455238009756492e-07, + "loss": 0.9112, + "step": 14170 + }, + { + "epoch": 0.8885196564047902, + "grad_norm": 3.5484817028045654, + "learning_rate": 6.448061890009872e-07, + "loss": 0.9575, + "step": 14171 + }, + { + "epoch": 0.8885823562605806, + "grad_norm": 3.2253661155700684, + "learning_rate": 6.440889628349267e-07, + "loss": 1.1622, + "step": 14172 + }, + { + "epoch": 0.8886450561163709, + "grad_norm": 3.419725179672241, + "learning_rate": 6.433721225070443e-07, + "loss": 0.9917, + "step": 14173 + }, + { + "epoch": 0.8887077559721612, + "grad_norm": 3.4898488521575928, + "learning_rate": 6.426556680469065e-07, + "loss": 1.0892, + "step": 14174 + }, + { + "epoch": 0.8887704558279516, + "grad_norm": 3.7853970527648926, + "learning_rate": 6.419395994840571e-07, + "loss": 1.0517, + "step": 14175 + }, + { + "epoch": 0.8888331556837419, + "grad_norm": 3.402331590652466, + "learning_rate": 6.412239168480272e-07, + "loss": 0.9756, + "step": 14176 + }, + { + "epoch": 0.8888958555395322, + "grad_norm": 3.1627795696258545, + "learning_rate": 6.405086201683297e-07, + "loss": 0.9339, + "step": 14177 + }, + { + "epoch": 0.8889585553953225, + "grad_norm": 3.4641001224517822, + "learning_rate": 6.397937094744678e-07, + "loss": 1.1223, + "step": 14178 + }, + { + "epoch": 0.889021255251113, + "grad_norm": 3.2530429363250732, + "learning_rate": 6.39079184795921e-07, + "loss": 0.9252, + "step": 14179 + }, + { + "epoch": 0.8890839551069033, + "grad_norm": 3.252547025680542, + "learning_rate": 6.383650461621583e-07, + "loss": 1.1949, + "step": 14180 + }, + { + "epoch": 0.8891466549626936, + "grad_norm": 3.377549409866333, + "learning_rate": 6.37651293602628e-07, + "loss": 0.9802, + "step": 14181 + }, + { + "epoch": 0.889209354818484, + "grad_norm": 3.1779696941375732, + "learning_rate": 6.36937927146769e-07, + "loss": 1.1238, + "step": 14182 + }, + { + "epoch": 0.8892720546742743, + "grad_norm": 3.3153576850891113, + "learning_rate": 6.36224946823999e-07, + "loss": 1.0847, + "step": 14183 + }, + { + "epoch": 0.8893347545300646, + "grad_norm": 3.5533885955810547, + "learning_rate": 6.355123526637197e-07, + "loss": 1.1014, + "step": 14184 + }, + { + "epoch": 0.8893974543858549, + "grad_norm": 3.4295735359191895, + "learning_rate": 6.348001446953201e-07, + "loss": 1.0894, + "step": 14185 + }, + { + "epoch": 0.8894601542416453, + "grad_norm": 3.434955596923828, + "learning_rate": 6.340883229481731e-07, + "loss": 1.11, + "step": 14186 + }, + { + "epoch": 0.8895228540974356, + "grad_norm": 3.4298183917999268, + "learning_rate": 6.333768874516322e-07, + "loss": 1.0123, + "step": 14187 + }, + { + "epoch": 0.8895855539532259, + "grad_norm": 3.5085532665252686, + "learning_rate": 6.326658382350348e-07, + "loss": 0.9396, + "step": 14188 + }, + { + "epoch": 0.8896482538090162, + "grad_norm": 3.52248215675354, + "learning_rate": 6.319551753277098e-07, + "loss": 1.0688, + "step": 14189 + }, + { + "epoch": 0.8897109536648066, + "grad_norm": 3.5212318897247314, + "learning_rate": 6.312448987589626e-07, + "loss": 0.975, + "step": 14190 + }, + { + "epoch": 0.8897736535205969, + "grad_norm": 3.4200265407562256, + "learning_rate": 6.305350085580852e-07, + "loss": 0.952, + "step": 14191 + }, + { + "epoch": 0.8898363533763872, + "grad_norm": 3.4192633628845215, + "learning_rate": 6.298255047543511e-07, + "loss": 1.1121, + "step": 14192 + }, + { + "epoch": 0.8898990532321776, + "grad_norm": 3.6112146377563477, + "learning_rate": 6.291163873770245e-07, + "loss": 0.9576, + "step": 14193 + }, + { + "epoch": 0.8899617530879679, + "grad_norm": 3.3207569122314453, + "learning_rate": 6.284076564553465e-07, + "loss": 1.1118, + "step": 14194 + }, + { + "epoch": 0.8900244529437582, + "grad_norm": 3.3010120391845703, + "learning_rate": 6.276993120185448e-07, + "loss": 0.9421, + "step": 14195 + }, + { + "epoch": 0.8900871527995485, + "grad_norm": 3.513561487197876, + "learning_rate": 6.26991354095835e-07, + "loss": 1.2033, + "step": 14196 + }, + { + "epoch": 0.8901498526553389, + "grad_norm": 3.5622551441192627, + "learning_rate": 6.262837827164103e-07, + "loss": 1.1811, + "step": 14197 + }, + { + "epoch": 0.8902125525111292, + "grad_norm": 3.468595266342163, + "learning_rate": 6.255765979094519e-07, + "loss": 1.0517, + "step": 14198 + }, + { + "epoch": 0.8902752523669195, + "grad_norm": 3.629255533218384, + "learning_rate": 6.248697997041219e-07, + "loss": 0.9324, + "step": 14199 + }, + { + "epoch": 0.8903379522227098, + "grad_norm": 3.347208261489868, + "learning_rate": 6.241633881295728e-07, + "loss": 1.0709, + "step": 14200 + }, + { + "epoch": 0.8904006520785002, + "grad_norm": 3.659132719039917, + "learning_rate": 6.234573632149354e-07, + "loss": 1.1318, + "step": 14201 + }, + { + "epoch": 0.8904633519342906, + "grad_norm": 3.600517749786377, + "learning_rate": 6.227517249893256e-07, + "loss": 0.9934, + "step": 14202 + }, + { + "epoch": 0.8905260517900809, + "grad_norm": 3.365412473678589, + "learning_rate": 6.220464734818421e-07, + "loss": 1.0612, + "step": 14203 + }, + { + "epoch": 0.8905887516458713, + "grad_norm": 3.454857349395752, + "learning_rate": 6.21341608721574e-07, + "loss": 1.1794, + "step": 14204 + }, + { + "epoch": 0.8906514515016616, + "grad_norm": 3.127106189727783, + "learning_rate": 6.206371307375869e-07, + "loss": 1.0448, + "step": 14205 + }, + { + "epoch": 0.8907141513574519, + "grad_norm": 3.2981739044189453, + "learning_rate": 6.199330395589331e-07, + "loss": 1.085, + "step": 14206 + }, + { + "epoch": 0.8907768512132422, + "grad_norm": 3.679645538330078, + "learning_rate": 6.192293352146506e-07, + "loss": 1.1478, + "step": 14207 + }, + { + "epoch": 0.8908395510690326, + "grad_norm": 3.6675899028778076, + "learning_rate": 6.185260177337615e-07, + "loss": 1.1601, + "step": 14208 + }, + { + "epoch": 0.8909022509248229, + "grad_norm": 3.6305203437805176, + "learning_rate": 6.178230871452684e-07, + "loss": 1.1628, + "step": 14209 + }, + { + "epoch": 0.8909649507806132, + "grad_norm": 3.463263750076294, + "learning_rate": 6.171205434781591e-07, + "loss": 0.9628, + "step": 14210 + }, + { + "epoch": 0.8910276506364035, + "grad_norm": 3.5244147777557373, + "learning_rate": 6.164183867614104e-07, + "loss": 1.1158, + "step": 14211 + }, + { + "epoch": 0.8910903504921939, + "grad_norm": 3.1647121906280518, + "learning_rate": 6.15716617023977e-07, + "loss": 1.2249, + "step": 14212 + }, + { + "epoch": 0.8911530503479842, + "grad_norm": 3.62410044670105, + "learning_rate": 6.15015234294799e-07, + "loss": 1.1535, + "step": 14213 + }, + { + "epoch": 0.8912157502037745, + "grad_norm": 3.6583714485168457, + "learning_rate": 6.143142386028034e-07, + "loss": 1.0657, + "step": 14214 + }, + { + "epoch": 0.8912784500595649, + "grad_norm": 3.251020908355713, + "learning_rate": 6.136136299768991e-07, + "loss": 1.16, + "step": 14215 + }, + { + "epoch": 0.8913411499153552, + "grad_norm": 3.2827610969543457, + "learning_rate": 6.129134084459787e-07, + "loss": 1.1295, + "step": 14216 + }, + { + "epoch": 0.8914038497711455, + "grad_norm": 3.232605457305908, + "learning_rate": 6.122135740389168e-07, + "loss": 0.9871, + "step": 14217 + }, + { + "epoch": 0.8914665496269358, + "grad_norm": 3.7098093032836914, + "learning_rate": 6.115141267845792e-07, + "loss": 0.9701, + "step": 14218 + }, + { + "epoch": 0.8915292494827262, + "grad_norm": 3.370229482650757, + "learning_rate": 6.108150667118096e-07, + "loss": 1.0984, + "step": 14219 + }, + { + "epoch": 0.8915919493385165, + "grad_norm": 3.6407864093780518, + "learning_rate": 6.101163938494359e-07, + "loss": 1.1938, + "step": 14220 + }, + { + "epoch": 0.8916546491943068, + "grad_norm": 3.6778550148010254, + "learning_rate": 6.094181082262706e-07, + "loss": 1.0445, + "step": 14221 + }, + { + "epoch": 0.8917173490500971, + "grad_norm": 3.9086763858795166, + "learning_rate": 6.087202098711154e-07, + "loss": 1.1001, + "step": 14222 + }, + { + "epoch": 0.8917800489058875, + "grad_norm": 3.482135772705078, + "learning_rate": 6.080226988127469e-07, + "loss": 1.1275, + "step": 14223 + }, + { + "epoch": 0.8918427487616778, + "grad_norm": 3.46454119682312, + "learning_rate": 6.073255750799323e-07, + "loss": 1.0899, + "step": 14224 + }, + { + "epoch": 0.8919054486174682, + "grad_norm": 3.5932559967041016, + "learning_rate": 6.066288387014219e-07, + "loss": 1.2086, + "step": 14225 + }, + { + "epoch": 0.8919681484732586, + "grad_norm": 3.438962936401367, + "learning_rate": 6.059324897059493e-07, + "loss": 1.0652, + "step": 14226 + }, + { + "epoch": 0.8920308483290489, + "grad_norm": 3.4012553691864014, + "learning_rate": 6.052365281222306e-07, + "loss": 1.0255, + "step": 14227 + }, + { + "epoch": 0.8920935481848392, + "grad_norm": 3.531252384185791, + "learning_rate": 6.04540953978966e-07, + "loss": 0.8911, + "step": 14228 + }, + { + "epoch": 0.8921562480406295, + "grad_norm": 3.4797604084014893, + "learning_rate": 6.038457673048448e-07, + "loss": 1.031, + "step": 14229 + }, + { + "epoch": 0.8922189478964199, + "grad_norm": 3.3216397762298584, + "learning_rate": 6.031509681285341e-07, + "loss": 1.127, + "step": 14230 + }, + { + "epoch": 0.8922816477522102, + "grad_norm": 3.8873517513275146, + "learning_rate": 6.024565564786878e-07, + "loss": 1.0515, + "step": 14231 + }, + { + "epoch": 0.8923443476080005, + "grad_norm": 3.8046510219573975, + "learning_rate": 6.017625323839415e-07, + "loss": 1.0413, + "step": 14232 + }, + { + "epoch": 0.8924070474637908, + "grad_norm": 3.541227340698242, + "learning_rate": 6.010688958729205e-07, + "loss": 1.1798, + "step": 14233 + }, + { + "epoch": 0.8924697473195812, + "grad_norm": 3.560779333114624, + "learning_rate": 6.003756469742294e-07, + "loss": 1.103, + "step": 14234 + }, + { + "epoch": 0.8925324471753715, + "grad_norm": 3.3688371181488037, + "learning_rate": 5.996827857164556e-07, + "loss": 0.9961, + "step": 14235 + }, + { + "epoch": 0.8925951470311618, + "grad_norm": 3.1717782020568848, + "learning_rate": 5.989903121281748e-07, + "loss": 1.1303, + "step": 14236 + }, + { + "epoch": 0.8926578468869522, + "grad_norm": 3.0767621994018555, + "learning_rate": 5.982982262379444e-07, + "loss": 1.0714, + "step": 14237 + }, + { + "epoch": 0.8927205467427425, + "grad_norm": 3.4228410720825195, + "learning_rate": 5.976065280743059e-07, + "loss": 1.1798, + "step": 14238 + }, + { + "epoch": 0.8927832465985328, + "grad_norm": 3.582615613937378, + "learning_rate": 5.969152176657844e-07, + "loss": 0.8455, + "step": 14239 + }, + { + "epoch": 0.8928459464543231, + "grad_norm": 3.553889513015747, + "learning_rate": 5.962242950408903e-07, + "loss": 1.1064, + "step": 14240 + }, + { + "epoch": 0.8929086463101135, + "grad_norm": 3.321620464324951, + "learning_rate": 5.955337602281164e-07, + "loss": 1.0704, + "step": 14241 + }, + { + "epoch": 0.8929713461659038, + "grad_norm": 2.940849781036377, + "learning_rate": 5.94843613255942e-07, + "loss": 0.9349, + "step": 14242 + }, + { + "epoch": 0.8930340460216941, + "grad_norm": 3.6583306789398193, + "learning_rate": 5.941538541528258e-07, + "loss": 0.9863, + "step": 14243 + }, + { + "epoch": 0.8930967458774844, + "grad_norm": 3.1917998790740967, + "learning_rate": 5.93464482947217e-07, + "loss": 1.0919, + "step": 14244 + }, + { + "epoch": 0.8931594457332748, + "grad_norm": 3.42508864402771, + "learning_rate": 5.927754996675428e-07, + "loss": 0.9599, + "step": 14245 + }, + { + "epoch": 0.8932221455890651, + "grad_norm": 3.8218324184417725, + "learning_rate": 5.920869043422172e-07, + "loss": 0.9875, + "step": 14246 + }, + { + "epoch": 0.8932848454448554, + "grad_norm": 3.1399238109588623, + "learning_rate": 5.913986969996399e-07, + "loss": 0.9405, + "step": 14247 + }, + { + "epoch": 0.8933475453006459, + "grad_norm": 3.533066987991333, + "learning_rate": 5.907108776681913e-07, + "loss": 1.042, + "step": 14248 + }, + { + "epoch": 0.8934102451564362, + "grad_norm": 3.354403018951416, + "learning_rate": 5.900234463762367e-07, + "loss": 1.0464, + "step": 14249 + }, + { + "epoch": 0.8934729450122265, + "grad_norm": 3.9263744354248047, + "learning_rate": 5.893364031521254e-07, + "loss": 1.1163, + "step": 14250 + }, + { + "epoch": 0.8935356448680168, + "grad_norm": 3.4037835597991943, + "learning_rate": 5.886497480241926e-07, + "loss": 0.8531, + "step": 14251 + }, + { + "epoch": 0.8935983447238072, + "grad_norm": 3.7609915733337402, + "learning_rate": 5.879634810207557e-07, + "loss": 0.9974, + "step": 14252 + }, + { + "epoch": 0.8936610445795975, + "grad_norm": 3.4855780601501465, + "learning_rate": 5.872776021701154e-07, + "loss": 0.9809, + "step": 14253 + }, + { + "epoch": 0.8937237444353878, + "grad_norm": 3.308877468109131, + "learning_rate": 5.865921115005557e-07, + "loss": 1.1152, + "step": 14254 + }, + { + "epoch": 0.8937864442911782, + "grad_norm": 3.6140060424804688, + "learning_rate": 5.859070090403507e-07, + "loss": 1.1237, + "step": 14255 + }, + { + "epoch": 0.8938491441469685, + "grad_norm": 3.5803415775299072, + "learning_rate": 5.852222948177522e-07, + "loss": 1.084, + "step": 14256 + }, + { + "epoch": 0.8939118440027588, + "grad_norm": 3.1130189895629883, + "learning_rate": 5.845379688609954e-07, + "loss": 1.2406, + "step": 14257 + }, + { + "epoch": 0.8939745438585491, + "grad_norm": 3.0329132080078125, + "learning_rate": 5.838540311983043e-07, + "loss": 1.0573, + "step": 14258 + }, + { + "epoch": 0.8940372437143395, + "grad_norm": 3.4396286010742188, + "learning_rate": 5.831704818578842e-07, + "loss": 1.1216, + "step": 14259 + }, + { + "epoch": 0.8940999435701298, + "grad_norm": 3.8590147495269775, + "learning_rate": 5.824873208679261e-07, + "loss": 1.1636, + "step": 14260 + }, + { + "epoch": 0.8941626434259201, + "grad_norm": 3.5005006790161133, + "learning_rate": 5.818045482566015e-07, + "loss": 1.1199, + "step": 14261 + }, + { + "epoch": 0.8942253432817104, + "grad_norm": 3.4257826805114746, + "learning_rate": 5.811221640520681e-07, + "loss": 1.1689, + "step": 14262 + }, + { + "epoch": 0.8942880431375008, + "grad_norm": 2.9207096099853516, + "learning_rate": 5.80440168282469e-07, + "loss": 1.0743, + "step": 14263 + }, + { + "epoch": 0.8943507429932911, + "grad_norm": 3.4775424003601074, + "learning_rate": 5.797585609759282e-07, + "loss": 1.1087, + "step": 14264 + }, + { + "epoch": 0.8944134428490814, + "grad_norm": 3.361865520477295, + "learning_rate": 5.790773421605544e-07, + "loss": 1.1476, + "step": 14265 + }, + { + "epoch": 0.8944761427048717, + "grad_norm": 4.123421669006348, + "learning_rate": 5.783965118644441e-07, + "loss": 0.9326, + "step": 14266 + }, + { + "epoch": 0.8945388425606621, + "grad_norm": 2.9853668212890625, + "learning_rate": 5.777160701156736e-07, + "loss": 1.2062, + "step": 14267 + }, + { + "epoch": 0.8946015424164524, + "grad_norm": 3.0377895832061768, + "learning_rate": 5.770360169423028e-07, + "loss": 1.0332, + "step": 14268 + }, + { + "epoch": 0.8946642422722427, + "grad_norm": 3.4823033809661865, + "learning_rate": 5.763563523723769e-07, + "loss": 1.1639, + "step": 14269 + }, + { + "epoch": 0.894726942128033, + "grad_norm": 3.20991587638855, + "learning_rate": 5.756770764339293e-07, + "loss": 1.0755, + "step": 14270 + }, + { + "epoch": 0.8947896419838235, + "grad_norm": 3.275437831878662, + "learning_rate": 5.749981891549694e-07, + "loss": 1.0611, + "step": 14271 + }, + { + "epoch": 0.8948523418396138, + "grad_norm": 3.2029643058776855, + "learning_rate": 5.743196905634951e-07, + "loss": 1.0448, + "step": 14272 + }, + { + "epoch": 0.8949150416954041, + "grad_norm": 3.637375593185425, + "learning_rate": 5.736415806874884e-07, + "loss": 1.2029, + "step": 14273 + }, + { + "epoch": 0.8949777415511945, + "grad_norm": 3.261613130569458, + "learning_rate": 5.729638595549159e-07, + "loss": 0.9747, + "step": 14274 + }, + { + "epoch": 0.8950404414069848, + "grad_norm": 3.3345999717712402, + "learning_rate": 5.722865271937251e-07, + "loss": 0.9978, + "step": 14275 + }, + { + "epoch": 0.8951031412627751, + "grad_norm": 3.6091442108154297, + "learning_rate": 5.71609583631848e-07, + "loss": 1.0562, + "step": 14276 + }, + { + "epoch": 0.8951658411185655, + "grad_norm": 3.7698631286621094, + "learning_rate": 5.709330288972048e-07, + "loss": 1.0633, + "step": 14277 + }, + { + "epoch": 0.8952285409743558, + "grad_norm": 3.341508150100708, + "learning_rate": 5.702568630176952e-07, + "loss": 1.0319, + "step": 14278 + }, + { + "epoch": 0.8952912408301461, + "grad_norm": 2.989527940750122, + "learning_rate": 5.695810860212047e-07, + "loss": 1.0574, + "step": 14279 + }, + { + "epoch": 0.8953539406859364, + "grad_norm": 3.3395915031433105, + "learning_rate": 5.689056979356011e-07, + "loss": 1.0953, + "step": 14280 + }, + { + "epoch": 0.8954166405417268, + "grad_norm": 3.200977087020874, + "learning_rate": 5.682306987887388e-07, + "loss": 1.1004, + "step": 14281 + }, + { + "epoch": 0.8954793403975171, + "grad_norm": 3.695795774459839, + "learning_rate": 5.675560886084552e-07, + "loss": 1.2297, + "step": 14282 + }, + { + "epoch": 0.8955420402533074, + "grad_norm": 3.909757137298584, + "learning_rate": 5.668818674225684e-07, + "loss": 1.018, + "step": 14283 + }, + { + "epoch": 0.8956047401090977, + "grad_norm": 3.6529042720794678, + "learning_rate": 5.662080352588873e-07, + "loss": 1.1061, + "step": 14284 + }, + { + "epoch": 0.8956674399648881, + "grad_norm": 2.995690107345581, + "learning_rate": 5.655345921451982e-07, + "loss": 1.1528, + "step": 14285 + }, + { + "epoch": 0.8957301398206784, + "grad_norm": 3.6514487266540527, + "learning_rate": 5.648615381092748e-07, + "loss": 1.0925, + "step": 14286 + }, + { + "epoch": 0.8957928396764687, + "grad_norm": 3.383979082107544, + "learning_rate": 5.641888731788713e-07, + "loss": 1.0988, + "step": 14287 + }, + { + "epoch": 0.895855539532259, + "grad_norm": 3.626304864883423, + "learning_rate": 5.635165973817336e-07, + "loss": 1.1629, + "step": 14288 + }, + { + "epoch": 0.8959182393880494, + "grad_norm": 3.7662947177886963, + "learning_rate": 5.628447107455826e-07, + "loss": 0.9354, + "step": 14289 + }, + { + "epoch": 0.8959809392438397, + "grad_norm": 3.6825058460235596, + "learning_rate": 5.621732132981283e-07, + "loss": 0.9941, + "step": 14290 + }, + { + "epoch": 0.89604363909963, + "grad_norm": 3.7065112590789795, + "learning_rate": 5.61502105067061e-07, + "loss": 1.0651, + "step": 14291 + }, + { + "epoch": 0.8961063389554204, + "grad_norm": 3.7854297161102295, + "learning_rate": 5.608313860800618e-07, + "loss": 1.1411, + "step": 14292 + }, + { + "epoch": 0.8961690388112107, + "grad_norm": 3.2989730834960938, + "learning_rate": 5.601610563647875e-07, + "loss": 1.1268, + "step": 14293 + }, + { + "epoch": 0.8962317386670011, + "grad_norm": 3.315917730331421, + "learning_rate": 5.594911159488814e-07, + "loss": 1.2799, + "step": 14294 + }, + { + "epoch": 0.8962944385227914, + "grad_norm": 3.188538074493408, + "learning_rate": 5.58821564859976e-07, + "loss": 0.9258, + "step": 14295 + }, + { + "epoch": 0.8963571383785818, + "grad_norm": 3.666895866394043, + "learning_rate": 5.581524031256813e-07, + "loss": 0.9798, + "step": 14296 + }, + { + "epoch": 0.8964198382343721, + "grad_norm": 3.5402159690856934, + "learning_rate": 5.574836307735942e-07, + "loss": 1.1217, + "step": 14297 + }, + { + "epoch": 0.8964825380901624, + "grad_norm": 3.2849979400634766, + "learning_rate": 5.568152478312927e-07, + "loss": 1.1238, + "step": 14298 + }, + { + "epoch": 0.8965452379459528, + "grad_norm": 3.56451153755188, + "learning_rate": 5.561472543263446e-07, + "loss": 1.1635, + "step": 14299 + }, + { + "epoch": 0.8966079378017431, + "grad_norm": 3.1713407039642334, + "learning_rate": 5.554796502862958e-07, + "loss": 0.9709, + "step": 14300 + }, + { + "epoch": 0.8966706376575334, + "grad_norm": 3.9167585372924805, + "learning_rate": 5.548124357386786e-07, + "loss": 1.2359, + "step": 14301 + }, + { + "epoch": 0.8967333375133237, + "grad_norm": 3.4999184608459473, + "learning_rate": 5.541456107110089e-07, + "loss": 1.0464, + "step": 14302 + }, + { + "epoch": 0.8967960373691141, + "grad_norm": 3.8405075073242188, + "learning_rate": 5.534791752307866e-07, + "loss": 0.9583, + "step": 14303 + }, + { + "epoch": 0.8968587372249044, + "grad_norm": 3.2984557151794434, + "learning_rate": 5.528131293254957e-07, + "loss": 1.1039, + "step": 14304 + }, + { + "epoch": 0.8969214370806947, + "grad_norm": 3.2300033569335938, + "learning_rate": 5.521474730226029e-07, + "loss": 1.1417, + "step": 14305 + }, + { + "epoch": 0.896984136936485, + "grad_norm": 3.215170383453369, + "learning_rate": 5.514822063495628e-07, + "loss": 0.8878, + "step": 14306 + }, + { + "epoch": 0.8970468367922754, + "grad_norm": 3.990687370300293, + "learning_rate": 5.508173293338081e-07, + "loss": 1.2466, + "step": 14307 + }, + { + "epoch": 0.8971095366480657, + "grad_norm": 3.728304624557495, + "learning_rate": 5.501528420027602e-07, + "loss": 1.0502, + "step": 14308 + }, + { + "epoch": 0.897172236503856, + "grad_norm": 3.6403768062591553, + "learning_rate": 5.494887443838204e-07, + "loss": 1.1548, + "step": 14309 + }, + { + "epoch": 0.8972349363596464, + "grad_norm": 3.6528265476226807, + "learning_rate": 5.48825036504379e-07, + "loss": 1.0481, + "step": 14310 + }, + { + "epoch": 0.8972976362154367, + "grad_norm": 3.021496295928955, + "learning_rate": 5.481617183918053e-07, + "loss": 1.0449, + "step": 14311 + }, + { + "epoch": 0.897360336071227, + "grad_norm": 3.462172269821167, + "learning_rate": 5.474987900734552e-07, + "loss": 1.0522, + "step": 14312 + }, + { + "epoch": 0.8974230359270173, + "grad_norm": 3.65384840965271, + "learning_rate": 5.468362515766656e-07, + "loss": 1.1123, + "step": 14313 + }, + { + "epoch": 0.8974857357828077, + "grad_norm": 3.6024930477142334, + "learning_rate": 5.461741029287648e-07, + "loss": 1.0079, + "step": 14314 + }, + { + "epoch": 0.897548435638598, + "grad_norm": 3.2774837017059326, + "learning_rate": 5.455123441570554e-07, + "loss": 1.0499, + "step": 14315 + }, + { + "epoch": 0.8976111354943883, + "grad_norm": 3.6567440032958984, + "learning_rate": 5.448509752888298e-07, + "loss": 1.0119, + "step": 14316 + }, + { + "epoch": 0.8976738353501786, + "grad_norm": 2.932476282119751, + "learning_rate": 5.441899963513631e-07, + "loss": 1.169, + "step": 14317 + }, + { + "epoch": 0.8977365352059691, + "grad_norm": 3.631153106689453, + "learning_rate": 5.435294073719144e-07, + "loss": 1.0615, + "step": 14318 + }, + { + "epoch": 0.8977992350617594, + "grad_norm": 3.4029550552368164, + "learning_rate": 5.428692083777254e-07, + "loss": 1.0516, + "step": 14319 + }, + { + "epoch": 0.8978619349175497, + "grad_norm": 3.6533455848693848, + "learning_rate": 5.422093993960231e-07, + "loss": 1.0903, + "step": 14320 + }, + { + "epoch": 0.8979246347733401, + "grad_norm": 3.737687587738037, + "learning_rate": 5.415499804540181e-07, + "loss": 1.0597, + "step": 14321 + }, + { + "epoch": 0.8979873346291304, + "grad_norm": 3.7317616939544678, + "learning_rate": 5.408909515789062e-07, + "loss": 1.1269, + "step": 14322 + }, + { + "epoch": 0.8980500344849207, + "grad_norm": 3.783475637435913, + "learning_rate": 5.402323127978637e-07, + "loss": 1.0772, + "step": 14323 + }, + { + "epoch": 0.898112734340711, + "grad_norm": 3.563751220703125, + "learning_rate": 5.395740641380532e-07, + "loss": 0.9444, + "step": 14324 + }, + { + "epoch": 0.8981754341965014, + "grad_norm": 3.38421368598938, + "learning_rate": 5.389162056266217e-07, + "loss": 1.0513, + "step": 14325 + }, + { + "epoch": 0.8982381340522917, + "grad_norm": 3.354578733444214, + "learning_rate": 5.382587372907e-07, + "loss": 1.0146, + "step": 14326 + }, + { + "epoch": 0.898300833908082, + "grad_norm": 3.2316555976867676, + "learning_rate": 5.376016591573984e-07, + "loss": 1.0197, + "step": 14327 + }, + { + "epoch": 0.8983635337638723, + "grad_norm": 3.6066248416900635, + "learning_rate": 5.3694497125382e-07, + "loss": 1.1024, + "step": 14328 + }, + { + "epoch": 0.8984262336196627, + "grad_norm": 3.288407802581787, + "learning_rate": 5.36288673607045e-07, + "loss": 1.0886, + "step": 14329 + }, + { + "epoch": 0.898488933475453, + "grad_norm": 3.5038511753082275, + "learning_rate": 5.356327662441374e-07, + "loss": 1.1949, + "step": 14330 + }, + { + "epoch": 0.8985516333312433, + "grad_norm": 3.358351230621338, + "learning_rate": 5.349772491921468e-07, + "loss": 1.0365, + "step": 14331 + }, + { + "epoch": 0.8986143331870337, + "grad_norm": 3.4560394287109375, + "learning_rate": 5.343221224781081e-07, + "loss": 1.0784, + "step": 14332 + }, + { + "epoch": 0.898677033042824, + "grad_norm": 3.6792917251586914, + "learning_rate": 5.336673861290398e-07, + "loss": 1.0921, + "step": 14333 + }, + { + "epoch": 0.8987397328986143, + "grad_norm": 3.7436623573303223, + "learning_rate": 5.330130401719413e-07, + "loss": 1.0805, + "step": 14334 + }, + { + "epoch": 0.8988024327544046, + "grad_norm": 3.310495615005493, + "learning_rate": 5.323590846337967e-07, + "loss": 1.0962, + "step": 14335 + }, + { + "epoch": 0.898865132610195, + "grad_norm": 3.7607147693634033, + "learning_rate": 5.317055195415788e-07, + "loss": 1.0101, + "step": 14336 + }, + { + "epoch": 0.8989278324659853, + "grad_norm": 3.595071792602539, + "learning_rate": 5.310523449222393e-07, + "loss": 1.223, + "step": 14337 + }, + { + "epoch": 0.8989905323217756, + "grad_norm": 3.3440017700195312, + "learning_rate": 5.303995608027124e-07, + "loss": 1.0691, + "step": 14338 + }, + { + "epoch": 0.8990532321775659, + "grad_norm": 3.1132965087890625, + "learning_rate": 5.29747167209923e-07, + "loss": 1.1513, + "step": 14339 + }, + { + "epoch": 0.8991159320333563, + "grad_norm": 3.716381072998047, + "learning_rate": 5.290951641707731e-07, + "loss": 1.0527, + "step": 14340 + }, + { + "epoch": 0.8991786318891467, + "grad_norm": 3.3375837802886963, + "learning_rate": 5.284435517121533e-07, + "loss": 1.1578, + "step": 14341 + }, + { + "epoch": 0.899241331744937, + "grad_norm": 3.8986082077026367, + "learning_rate": 5.277923298609334e-07, + "loss": 1.0403, + "step": 14342 + }, + { + "epoch": 0.8993040316007274, + "grad_norm": 3.6966958045959473, + "learning_rate": 5.271414986439727e-07, + "loss": 1.0999, + "step": 14343 + }, + { + "epoch": 0.8993667314565177, + "grad_norm": 3.22924542427063, + "learning_rate": 5.2649105808811e-07, + "loss": 1.0789, + "step": 14344 + }, + { + "epoch": 0.899429431312308, + "grad_norm": 3.051654577255249, + "learning_rate": 5.258410082201693e-07, + "loss": 1.12, + "step": 14345 + }, + { + "epoch": 0.8994921311680983, + "grad_norm": 3.1521425247192383, + "learning_rate": 5.251913490669591e-07, + "loss": 1.0262, + "step": 14346 + }, + { + "epoch": 0.8995548310238887, + "grad_norm": 3.62282395362854, + "learning_rate": 5.245420806552726e-07, + "loss": 1.1522, + "step": 14347 + }, + { + "epoch": 0.899617530879679, + "grad_norm": 3.224055767059326, + "learning_rate": 5.238932030118838e-07, + "loss": 1.2807, + "step": 14348 + }, + { + "epoch": 0.8996802307354693, + "grad_norm": 3.669191360473633, + "learning_rate": 5.232447161635523e-07, + "loss": 1.2527, + "step": 14349 + }, + { + "epoch": 0.8997429305912596, + "grad_norm": 3.3659772872924805, + "learning_rate": 5.225966201370236e-07, + "loss": 1.1875, + "step": 14350 + }, + { + "epoch": 0.89980563044705, + "grad_norm": 3.7072701454162598, + "learning_rate": 5.219489149590251e-07, + "loss": 1.0913, + "step": 14351 + }, + { + "epoch": 0.8998683303028403, + "grad_norm": 3.434981107711792, + "learning_rate": 5.213016006562654e-07, + "loss": 0.9936, + "step": 14352 + }, + { + "epoch": 0.8999310301586306, + "grad_norm": 3.523669481277466, + "learning_rate": 5.206546772554421e-07, + "loss": 1.0897, + "step": 14353 + }, + { + "epoch": 0.899993730014421, + "grad_norm": 3.4225504398345947, + "learning_rate": 5.200081447832339e-07, + "loss": 0.9379, + "step": 14354 + }, + { + "epoch": 0.9000564298702113, + "grad_norm": 3.887390375137329, + "learning_rate": 5.193620032663049e-07, + "loss": 1.1468, + "step": 14355 + }, + { + "epoch": 0.9001191297260016, + "grad_norm": 3.387462854385376, + "learning_rate": 5.187162527312984e-07, + "loss": 1.0728, + "step": 14356 + }, + { + "epoch": 0.9001818295817919, + "grad_norm": 3.4261865615844727, + "learning_rate": 5.180708932048495e-07, + "loss": 1.1691, + "step": 14357 + }, + { + "epoch": 0.9002445294375823, + "grad_norm": 3.4315268993377686, + "learning_rate": 5.174259247135716e-07, + "loss": 1.0345, + "step": 14358 + }, + { + "epoch": 0.9003072292933726, + "grad_norm": 3.588470458984375, + "learning_rate": 5.167813472840622e-07, + "loss": 1.0107, + "step": 14359 + }, + { + "epoch": 0.9003699291491629, + "grad_norm": 3.568477153778076, + "learning_rate": 5.161371609429033e-07, + "loss": 1.0088, + "step": 14360 + }, + { + "epoch": 0.9004326290049532, + "grad_norm": 3.237224817276001, + "learning_rate": 5.154933657166628e-07, + "loss": 1.1532, + "step": 14361 + }, + { + "epoch": 0.9004953288607436, + "grad_norm": 3.633355140686035, + "learning_rate": 5.148499616318903e-07, + "loss": 0.9403, + "step": 14362 + }, + { + "epoch": 0.9005580287165339, + "grad_norm": 3.3174262046813965, + "learning_rate": 5.142069487151191e-07, + "loss": 1.0126, + "step": 14363 + }, + { + "epoch": 0.9006207285723243, + "grad_norm": 3.232408046722412, + "learning_rate": 5.13564326992867e-07, + "loss": 1.0648, + "step": 14364 + }, + { + "epoch": 0.9006834284281147, + "grad_norm": 3.6117470264434814, + "learning_rate": 5.129220964916371e-07, + "loss": 0.9991, + "step": 14365 + }, + { + "epoch": 0.900746128283905, + "grad_norm": 3.279147148132324, + "learning_rate": 5.122802572379148e-07, + "loss": 1.0465, + "step": 14366 + }, + { + "epoch": 0.9008088281396953, + "grad_norm": 3.455509901046753, + "learning_rate": 5.11638809258167e-07, + "loss": 1.0902, + "step": 14367 + }, + { + "epoch": 0.9008715279954856, + "grad_norm": 3.2351722717285156, + "learning_rate": 5.109977525788512e-07, + "loss": 1.0922, + "step": 14368 + }, + { + "epoch": 0.900934227851276, + "grad_norm": 3.355221748352051, + "learning_rate": 5.103570872264018e-07, + "loss": 1.1073, + "step": 14369 + }, + { + "epoch": 0.9009969277070663, + "grad_norm": 3.187854766845703, + "learning_rate": 5.097168132272401e-07, + "loss": 1.1388, + "step": 14370 + }, + { + "epoch": 0.9010596275628566, + "grad_norm": 3.798107147216797, + "learning_rate": 5.090769306077714e-07, + "loss": 1.0167, + "step": 14371 + }, + { + "epoch": 0.901122327418647, + "grad_norm": 3.389697313308716, + "learning_rate": 5.084374393943847e-07, + "loss": 1.2014, + "step": 14372 + }, + { + "epoch": 0.9011850272744373, + "grad_norm": 3.9635512828826904, + "learning_rate": 5.077983396134534e-07, + "loss": 1.0047, + "step": 14373 + }, + { + "epoch": 0.9012477271302276, + "grad_norm": 3.262720823287964, + "learning_rate": 5.071596312913329e-07, + "loss": 0.991, + "step": 14374 + }, + { + "epoch": 0.9013104269860179, + "grad_norm": 3.6815714836120605, + "learning_rate": 5.065213144543624e-07, + "loss": 1.1931, + "step": 14375 + }, + { + "epoch": 0.9013731268418083, + "grad_norm": 3.659099817276001, + "learning_rate": 5.058833891288683e-07, + "loss": 0.8614, + "step": 14376 + }, + { + "epoch": 0.9014358266975986, + "grad_norm": 3.1306114196777344, + "learning_rate": 5.052458553411588e-07, + "loss": 1.0921, + "step": 14377 + }, + { + "epoch": 0.9014985265533889, + "grad_norm": 3.0488133430480957, + "learning_rate": 5.046087131175226e-07, + "loss": 1.0017, + "step": 14378 + }, + { + "epoch": 0.9015612264091792, + "grad_norm": 3.27401065826416, + "learning_rate": 5.039719624842398e-07, + "loss": 1.1075, + "step": 14379 + }, + { + "epoch": 0.9016239262649696, + "grad_norm": 3.500593900680542, + "learning_rate": 5.033356034675674e-07, + "loss": 1.1106, + "step": 14380 + }, + { + "epoch": 0.9016866261207599, + "grad_norm": 3.6191155910491943, + "learning_rate": 5.026996360937497e-07, + "loss": 1.0908, + "step": 14381 + }, + { + "epoch": 0.9017493259765502, + "grad_norm": 3.6809985637664795, + "learning_rate": 5.020640603890126e-07, + "loss": 1.1011, + "step": 14382 + }, + { + "epoch": 0.9018120258323405, + "grad_norm": 3.5408384799957275, + "learning_rate": 5.014288763795693e-07, + "loss": 1.1781, + "step": 14383 + }, + { + "epoch": 0.9018747256881309, + "grad_norm": 3.5927486419677734, + "learning_rate": 5.007940840916148e-07, + "loss": 1.045, + "step": 14384 + }, + { + "epoch": 0.9019374255439212, + "grad_norm": 3.250302314758301, + "learning_rate": 5.001596835513256e-07, + "loss": 1.1988, + "step": 14385 + }, + { + "epoch": 0.9020001253997115, + "grad_norm": 3.2284598350524902, + "learning_rate": 4.995256747848654e-07, + "loss": 1.2202, + "step": 14386 + }, + { + "epoch": 0.902062825255502, + "grad_norm": 3.833491086959839, + "learning_rate": 4.988920578183831e-07, + "loss": 1.1154, + "step": 14387 + }, + { + "epoch": 0.9021255251112923, + "grad_norm": 3.489950656890869, + "learning_rate": 4.982588326780069e-07, + "loss": 1.0251, + "step": 14388 + }, + { + "epoch": 0.9021882249670826, + "grad_norm": 3.311711311340332, + "learning_rate": 4.976259993898503e-07, + "loss": 1.1471, + "step": 14389 + }, + { + "epoch": 0.902250924822873, + "grad_norm": 3.07535457611084, + "learning_rate": 4.969935579800134e-07, + "loss": 1.1378, + "step": 14390 + }, + { + "epoch": 0.9023136246786633, + "grad_norm": 3.4639031887054443, + "learning_rate": 4.963615084745765e-07, + "loss": 1.1997, + "step": 14391 + }, + { + "epoch": 0.9023763245344536, + "grad_norm": 3.0429046154022217, + "learning_rate": 4.957298508996066e-07, + "loss": 1.1121, + "step": 14392 + }, + { + "epoch": 0.9024390243902439, + "grad_norm": 3.215902328491211, + "learning_rate": 4.950985852811518e-07, + "loss": 1.1328, + "step": 14393 + }, + { + "epoch": 0.9025017242460343, + "grad_norm": 3.394914150238037, + "learning_rate": 4.944677116452478e-07, + "loss": 1.044, + "step": 14394 + }, + { + "epoch": 0.9025644241018246, + "grad_norm": 3.6405134201049805, + "learning_rate": 4.938372300179095e-07, + "loss": 1.1, + "step": 14395 + }, + { + "epoch": 0.9026271239576149, + "grad_norm": 3.609908103942871, + "learning_rate": 4.932071404251382e-07, + "loss": 1.2358, + "step": 14396 + }, + { + "epoch": 0.9026898238134052, + "grad_norm": 3.521150827407837, + "learning_rate": 4.925774428929208e-07, + "loss": 1.0191, + "step": 14397 + }, + { + "epoch": 0.9027525236691956, + "grad_norm": 3.4305360317230225, + "learning_rate": 4.919481374472257e-07, + "loss": 1.0227, + "step": 14398 + }, + { + "epoch": 0.9028152235249859, + "grad_norm": 3.2883992195129395, + "learning_rate": 4.913192241140041e-07, + "loss": 1.177, + "step": 14399 + }, + { + "epoch": 0.9028779233807762, + "grad_norm": 3.490011692047119, + "learning_rate": 4.90690702919191e-07, + "loss": 1.0754, + "step": 14400 + }, + { + "epoch": 0.9029406232365665, + "grad_norm": 3.4838452339172363, + "learning_rate": 4.90062573888711e-07, + "loss": 1.0576, + "step": 14401 + }, + { + "epoch": 0.9030033230923569, + "grad_norm": 3.708928108215332, + "learning_rate": 4.894348370484648e-07, + "loss": 1.0812, + "step": 14402 + }, + { + "epoch": 0.9030660229481472, + "grad_norm": 3.2046549320220947, + "learning_rate": 4.888074924243413e-07, + "loss": 1.152, + "step": 14403 + }, + { + "epoch": 0.9031287228039375, + "grad_norm": 3.5145554542541504, + "learning_rate": 4.881805400422112e-07, + "loss": 1.1407, + "step": 14404 + }, + { + "epoch": 0.9031914226597278, + "grad_norm": 3.185311794281006, + "learning_rate": 4.875539799279328e-07, + "loss": 1.1868, + "step": 14405 + }, + { + "epoch": 0.9032541225155182, + "grad_norm": 3.5246872901916504, + "learning_rate": 4.869278121073428e-07, + "loss": 1.2206, + "step": 14406 + }, + { + "epoch": 0.9033168223713085, + "grad_norm": 3.4356470108032227, + "learning_rate": 4.863020366062643e-07, + "loss": 1.0212, + "step": 14407 + }, + { + "epoch": 0.9033795222270988, + "grad_norm": 3.359093189239502, + "learning_rate": 4.856766534505064e-07, + "loss": 0.9563, + "step": 14408 + }, + { + "epoch": 0.9034422220828892, + "grad_norm": 3.501159906387329, + "learning_rate": 4.850516626658585e-07, + "loss": 0.9698, + "step": 14409 + }, + { + "epoch": 0.9035049219386796, + "grad_norm": 3.4127070903778076, + "learning_rate": 4.844270642780968e-07, + "loss": 1.0808, + "step": 14410 + }, + { + "epoch": 0.9035676217944699, + "grad_norm": 3.627967119216919, + "learning_rate": 4.838028583129762e-07, + "loss": 1.0293, + "step": 14411 + }, + { + "epoch": 0.9036303216502602, + "grad_norm": 3.3730573654174805, + "learning_rate": 4.831790447962425e-07, + "loss": 0.9089, + "step": 14412 + }, + { + "epoch": 0.9036930215060506, + "grad_norm": 3.4535396099090576, + "learning_rate": 4.825556237536211e-07, + "loss": 1.1059, + "step": 14413 + }, + { + "epoch": 0.9037557213618409, + "grad_norm": 3.6055943965911865, + "learning_rate": 4.819325952108212e-07, + "loss": 1.025, + "step": 14414 + }, + { + "epoch": 0.9038184212176312, + "grad_norm": 3.1262595653533936, + "learning_rate": 4.813099591935356e-07, + "loss": 1.0683, + "step": 14415 + }, + { + "epoch": 0.9038811210734216, + "grad_norm": 3.4075775146484375, + "learning_rate": 4.806877157274437e-07, + "loss": 1.1984, + "step": 14416 + }, + { + "epoch": 0.9039438209292119, + "grad_norm": 3.3121140003204346, + "learning_rate": 4.800658648382073e-07, + "loss": 1.0558, + "step": 14417 + }, + { + "epoch": 0.9040065207850022, + "grad_norm": 3.231618642807007, + "learning_rate": 4.794444065514681e-07, + "loss": 1.172, + "step": 14418 + }, + { + "epoch": 0.9040692206407925, + "grad_norm": 3.496494770050049, + "learning_rate": 4.788233408928588e-07, + "loss": 0.951, + "step": 14419 + }, + { + "epoch": 0.9041319204965829, + "grad_norm": 3.3825275897979736, + "learning_rate": 4.782026678879903e-07, + "loss": 1.0473, + "step": 14420 + }, + { + "epoch": 0.9041946203523732, + "grad_norm": 3.7591190338134766, + "learning_rate": 4.775823875624607e-07, + "loss": 0.9659, + "step": 14421 + }, + { + "epoch": 0.9042573202081635, + "grad_norm": 3.370344400405884, + "learning_rate": 4.769624999418465e-07, + "loss": 1.1107, + "step": 14422 + }, + { + "epoch": 0.9043200200639538, + "grad_norm": 3.799173355102539, + "learning_rate": 4.7634300505171706e-07, + "loss": 1.1523, + "step": 14423 + }, + { + "epoch": 0.9043827199197442, + "grad_norm": 3.3367416858673096, + "learning_rate": 4.757239029176175e-07, + "loss": 1.1423, + "step": 14424 + }, + { + "epoch": 0.9044454197755345, + "grad_norm": 3.450531005859375, + "learning_rate": 4.751051935650808e-07, + "loss": 1.1291, + "step": 14425 + }, + { + "epoch": 0.9045081196313248, + "grad_norm": 3.666236400604248, + "learning_rate": 4.7448687701961983e-07, + "loss": 1.2398, + "step": 14426 + }, + { + "epoch": 0.9045708194871152, + "grad_norm": 3.2909045219421387, + "learning_rate": 4.7386895330673753e-07, + "loss": 1.0086, + "step": 14427 + }, + { + "epoch": 0.9046335193429055, + "grad_norm": 3.21520733833313, + "learning_rate": 4.732514224519158e-07, + "loss": 1.1216, + "step": 14428 + }, + { + "epoch": 0.9046962191986958, + "grad_norm": 3.8775839805603027, + "learning_rate": 4.726342844806209e-07, + "loss": 1.2344, + "step": 14429 + }, + { + "epoch": 0.9047589190544861, + "grad_norm": 3.3566787242889404, + "learning_rate": 4.7201753941830466e-07, + "loss": 1.1082, + "step": 14430 + }, + { + "epoch": 0.9048216189102765, + "grad_norm": 4.112123012542725, + "learning_rate": 4.714011872904001e-07, + "loss": 0.9849, + "step": 14431 + }, + { + "epoch": 0.9048843187660668, + "grad_norm": 3.2783145904541016, + "learning_rate": 4.7078522812232797e-07, + "loss": 1.0748, + "step": 14432 + }, + { + "epoch": 0.9049470186218572, + "grad_norm": 3.3472671508789062, + "learning_rate": 4.70169661939488e-07, + "loss": 1.0516, + "step": 14433 + }, + { + "epoch": 0.9050097184776476, + "grad_norm": 3.3586947917938232, + "learning_rate": 4.6955448876726764e-07, + "loss": 0.9836, + "step": 14434 + }, + { + "epoch": 0.9050724183334379, + "grad_norm": 3.6860506534576416, + "learning_rate": 4.6893970863103766e-07, + "loss": 1.2, + "step": 14435 + }, + { + "epoch": 0.9051351181892282, + "grad_norm": 3.4776148796081543, + "learning_rate": 4.6832532155614895e-07, + "loss": 0.9228, + "step": 14436 + }, + { + "epoch": 0.9051978180450185, + "grad_norm": 3.630284309387207, + "learning_rate": 4.6771132756794014e-07, + "loss": 1.0765, + "step": 14437 + }, + { + "epoch": 0.9052605179008089, + "grad_norm": 3.1803135871887207, + "learning_rate": 4.6709772669173317e-07, + "loss": 1.0179, + "step": 14438 + }, + { + "epoch": 0.9053232177565992, + "grad_norm": 3.3425240516662598, + "learning_rate": 4.664845189528322e-07, + "loss": 1.1048, + "step": 14439 + }, + { + "epoch": 0.9053859176123895, + "grad_norm": 3.3005428314208984, + "learning_rate": 4.6587170437652485e-07, + "loss": 0.9936, + "step": 14440 + }, + { + "epoch": 0.9054486174681798, + "grad_norm": 3.3141846656799316, + "learning_rate": 4.6525928298808645e-07, + "loss": 1.0173, + "step": 14441 + }, + { + "epoch": 0.9055113173239702, + "grad_norm": 3.483116388320923, + "learning_rate": 4.6464725481277117e-07, + "loss": 1.1543, + "step": 14442 + }, + { + "epoch": 0.9055740171797605, + "grad_norm": 3.6382718086242676, + "learning_rate": 4.640356198758189e-07, + "loss": 1.1926, + "step": 14443 + }, + { + "epoch": 0.9056367170355508, + "grad_norm": 3.505671262741089, + "learning_rate": 4.634243782024539e-07, + "loss": 1.0804, + "step": 14444 + }, + { + "epoch": 0.9056994168913411, + "grad_norm": 3.722381830215454, + "learning_rate": 4.6281352981788484e-07, + "loss": 1.1864, + "step": 14445 + }, + { + "epoch": 0.9057621167471315, + "grad_norm": 3.1961028575897217, + "learning_rate": 4.6220307474730274e-07, + "loss": 1.1119, + "step": 14446 + }, + { + "epoch": 0.9058248166029218, + "grad_norm": 3.4882285594940186, + "learning_rate": 4.6159301301587966e-07, + "loss": 1.1832, + "step": 14447 + }, + { + "epoch": 0.9058875164587121, + "grad_norm": 3.367220401763916, + "learning_rate": 4.609833446487799e-07, + "loss": 1.1081, + "step": 14448 + }, + { + "epoch": 0.9059502163145025, + "grad_norm": 4.052770137786865, + "learning_rate": 4.603740696711434e-07, + "loss": 1.0296, + "step": 14449 + }, + { + "epoch": 0.9060129161702928, + "grad_norm": 3.4293971061706543, + "learning_rate": 4.5976518810809666e-07, + "loss": 0.9763, + "step": 14450 + }, + { + "epoch": 0.9060756160260831, + "grad_norm": 3.4709975719451904, + "learning_rate": 4.591566999847485e-07, + "loss": 1.0286, + "step": 14451 + }, + { + "epoch": 0.9061383158818734, + "grad_norm": 4.00352144241333, + "learning_rate": 4.5854860532619674e-07, + "loss": 1.101, + "step": 14452 + }, + { + "epoch": 0.9062010157376638, + "grad_norm": 3.461818218231201, + "learning_rate": 4.5794090415751666e-07, + "loss": 0.9504, + "step": 14453 + }, + { + "epoch": 0.9062637155934541, + "grad_norm": 3.295318841934204, + "learning_rate": 4.573335965037706e-07, + "loss": 1.2179, + "step": 14454 + }, + { + "epoch": 0.9063264154492444, + "grad_norm": 3.651986598968506, + "learning_rate": 4.5672668239000184e-07, + "loss": 0.8584, + "step": 14455 + }, + { + "epoch": 0.9063891153050349, + "grad_norm": 3.512819290161133, + "learning_rate": 4.561201618412436e-07, + "loss": 1.1379, + "step": 14456 + }, + { + "epoch": 0.9064518151608252, + "grad_norm": 3.9576468467712402, + "learning_rate": 4.555140348825071e-07, + "loss": 0.8401, + "step": 14457 + }, + { + "epoch": 0.9065145150166155, + "grad_norm": 3.4453587532043457, + "learning_rate": 4.549083015387856e-07, + "loss": 1.2852, + "step": 14458 + }, + { + "epoch": 0.9065772148724058, + "grad_norm": 3.303769588470459, + "learning_rate": 4.5430296183506583e-07, + "loss": 1.0689, + "step": 14459 + }, + { + "epoch": 0.9066399147281962, + "grad_norm": 3.821962594985962, + "learning_rate": 4.536980157963078e-07, + "loss": 0.7899, + "step": 14460 + }, + { + "epoch": 0.9067026145839865, + "grad_norm": 3.046253204345703, + "learning_rate": 4.530934634474604e-07, + "loss": 1.1864, + "step": 14461 + }, + { + "epoch": 0.9067653144397768, + "grad_norm": 3.6725449562072754, + "learning_rate": 4.5248930481345377e-07, + "loss": 1.1241, + "step": 14462 + }, + { + "epoch": 0.9068280142955671, + "grad_norm": 3.544595956802368, + "learning_rate": 4.518855399192068e-07, + "loss": 1.0498, + "step": 14463 + }, + { + "epoch": 0.9068907141513575, + "grad_norm": 3.3234927654266357, + "learning_rate": 4.512821687896174e-07, + "loss": 1.0927, + "step": 14464 + }, + { + "epoch": 0.9069534140071478, + "grad_norm": 3.7628042697906494, + "learning_rate": 4.5067919144956786e-07, + "loss": 0.9806, + "step": 14465 + }, + { + "epoch": 0.9070161138629381, + "grad_norm": 3.4140753746032715, + "learning_rate": 4.5007660792392495e-07, + "loss": 1.3863, + "step": 14466 + }, + { + "epoch": 0.9070788137187284, + "grad_norm": 3.5649967193603516, + "learning_rate": 4.49474418237541e-07, + "loss": 0.9205, + "step": 14467 + }, + { + "epoch": 0.9071415135745188, + "grad_norm": 3.4098894596099854, + "learning_rate": 4.4887262241524843e-07, + "loss": 1.1693, + "step": 14468 + }, + { + "epoch": 0.9072042134303091, + "grad_norm": 3.356112480163574, + "learning_rate": 4.482712204818651e-07, + "loss": 1.0351, + "step": 14469 + }, + { + "epoch": 0.9072669132860994, + "grad_norm": 3.8271193504333496, + "learning_rate": 4.4767021246219566e-07, + "loss": 0.8562, + "step": 14470 + }, + { + "epoch": 0.9073296131418898, + "grad_norm": 3.1294519901275635, + "learning_rate": 4.470695983810225e-07, + "loss": 1.0687, + "step": 14471 + }, + { + "epoch": 0.9073923129976801, + "grad_norm": 3.229398727416992, + "learning_rate": 4.464693782631169e-07, + "loss": 1.0309, + "step": 14472 + }, + { + "epoch": 0.9074550128534704, + "grad_norm": 3.337008237838745, + "learning_rate": 4.4586955213323015e-07, + "loss": 1.1122, + "step": 14473 + }, + { + "epoch": 0.9075177127092607, + "grad_norm": 3.6125879287719727, + "learning_rate": 4.452701200161014e-07, + "loss": 0.8996, + "step": 14474 + }, + { + "epoch": 0.9075804125650511, + "grad_norm": 3.208955764770508, + "learning_rate": 4.446710819364508e-07, + "loss": 1.0162, + "step": 14475 + }, + { + "epoch": 0.9076431124208414, + "grad_norm": 3.3407373428344727, + "learning_rate": 4.44072437918982e-07, + "loss": 1.107, + "step": 14476 + }, + { + "epoch": 0.9077058122766317, + "grad_norm": 3.520972967147827, + "learning_rate": 4.434741879883808e-07, + "loss": 1.0921, + "step": 14477 + }, + { + "epoch": 0.907768512132422, + "grad_norm": 3.5855188369750977, + "learning_rate": 4.4287633216932415e-07, + "loss": 1.1973, + "step": 14478 + }, + { + "epoch": 0.9078312119882124, + "grad_norm": 3.950249195098877, + "learning_rate": 4.4227887048646335e-07, + "loss": 1.0344, + "step": 14479 + }, + { + "epoch": 0.9078939118440028, + "grad_norm": 3.537121534347534, + "learning_rate": 4.4168180296443875e-07, + "loss": 1.0916, + "step": 14480 + }, + { + "epoch": 0.9079566116997931, + "grad_norm": 3.3241641521453857, + "learning_rate": 4.410851296278762e-07, + "loss": 1.1148, + "step": 14481 + }, + { + "epoch": 0.9080193115555835, + "grad_norm": 3.8792121410369873, + "learning_rate": 4.4048885050137935e-07, + "loss": 1.2894, + "step": 14482 + }, + { + "epoch": 0.9080820114113738, + "grad_norm": 3.43345046043396, + "learning_rate": 4.3989296560953964e-07, + "loss": 1.0876, + "step": 14483 + }, + { + "epoch": 0.9081447112671641, + "grad_norm": 3.43645977973938, + "learning_rate": 4.3929747497693076e-07, + "loss": 1.1741, + "step": 14484 + }, + { + "epoch": 0.9082074111229544, + "grad_norm": 3.1538608074188232, + "learning_rate": 4.387023786281119e-07, + "loss": 1.1174, + "step": 14485 + }, + { + "epoch": 0.9082701109787448, + "grad_norm": 3.8712363243103027, + "learning_rate": 4.3810767658762576e-07, + "loss": 1.0463, + "step": 14486 + }, + { + "epoch": 0.9083328108345351, + "grad_norm": 2.915879964828491, + "learning_rate": 4.3751336887999597e-07, + "loss": 1.0073, + "step": 14487 + }, + { + "epoch": 0.9083955106903254, + "grad_norm": 3.4744818210601807, + "learning_rate": 4.369194555297307e-07, + "loss": 1.0573, + "step": 14488 + }, + { + "epoch": 0.9084582105461158, + "grad_norm": 3.4317612648010254, + "learning_rate": 4.3632593656132706e-07, + "loss": 1.2604, + "step": 14489 + }, + { + "epoch": 0.9085209104019061, + "grad_norm": 3.5961756706237793, + "learning_rate": 4.357328119992588e-07, + "loss": 1.0982, + "step": 14490 + }, + { + "epoch": 0.9085836102576964, + "grad_norm": 3.2267019748687744, + "learning_rate": 4.3514008186798626e-07, + "loss": 1.1015, + "step": 14491 + }, + { + "epoch": 0.9086463101134867, + "grad_norm": 3.3837099075317383, + "learning_rate": 4.345477461919545e-07, + "loss": 1.098, + "step": 14492 + }, + { + "epoch": 0.9087090099692771, + "grad_norm": 3.727262020111084, + "learning_rate": 4.3395580499559276e-07, + "loss": 1.1118, + "step": 14493 + }, + { + "epoch": 0.9087717098250674, + "grad_norm": 3.272312641143799, + "learning_rate": 4.333642583033104e-07, + "loss": 1.0512, + "step": 14494 + }, + { + "epoch": 0.9088344096808577, + "grad_norm": 3.2941694259643555, + "learning_rate": 4.3277310613950354e-07, + "loss": 1.1929, + "step": 14495 + }, + { + "epoch": 0.908897109536648, + "grad_norm": 3.444403886795044, + "learning_rate": 4.3218234852855146e-07, + "loss": 1.083, + "step": 14496 + }, + { + "epoch": 0.9089598093924384, + "grad_norm": 3.411515951156616, + "learning_rate": 4.315919854948181e-07, + "loss": 0.9643, + "step": 14497 + }, + { + "epoch": 0.9090225092482287, + "grad_norm": 3.679090976715088, + "learning_rate": 4.3100201706264946e-07, + "loss": 0.9275, + "step": 14498 + }, + { + "epoch": 0.909085209104019, + "grad_norm": 3.0508408546447754, + "learning_rate": 4.3041244325637387e-07, + "loss": 0.9757, + "step": 14499 + }, + { + "epoch": 0.9091479089598093, + "grad_norm": 3.454246997833252, + "learning_rate": 4.298232641003075e-07, + "loss": 1.0946, + "step": 14500 + }, + { + "epoch": 0.9092106088155997, + "grad_norm": 3.4845685958862305, + "learning_rate": 4.2923447961874866e-07, + "loss": 1.076, + "step": 14501 + }, + { + "epoch": 0.90927330867139, + "grad_norm": 3.327927589416504, + "learning_rate": 4.2864608983597675e-07, + "loss": 0.9473, + "step": 14502 + }, + { + "epoch": 0.9093360085271804, + "grad_norm": 3.4905455112457275, + "learning_rate": 4.2805809477625805e-07, + "loss": 1.062, + "step": 14503 + }, + { + "epoch": 0.9093987083829708, + "grad_norm": 3.256087064743042, + "learning_rate": 4.27470494463843e-07, + "loss": 1.0177, + "step": 14504 + }, + { + "epoch": 0.9094614082387611, + "grad_norm": 3.2722747325897217, + "learning_rate": 4.268832889229624e-07, + "loss": 1.1615, + "step": 14505 + }, + { + "epoch": 0.9095241080945514, + "grad_norm": 3.4027156829833984, + "learning_rate": 4.2629647817783116e-07, + "loss": 1.1792, + "step": 14506 + }, + { + "epoch": 0.9095868079503417, + "grad_norm": 3.3918845653533936, + "learning_rate": 4.257100622526522e-07, + "loss": 1.1063, + "step": 14507 + }, + { + "epoch": 0.9096495078061321, + "grad_norm": 3.5913455486297607, + "learning_rate": 4.2512404117160954e-07, + "loss": 0.986, + "step": 14508 + }, + { + "epoch": 0.9097122076619224, + "grad_norm": 3.694856643676758, + "learning_rate": 4.2453841495886716e-07, + "loss": 1.0149, + "step": 14509 + }, + { + "epoch": 0.9097749075177127, + "grad_norm": 3.323190450668335, + "learning_rate": 4.239531836385813e-07, + "loss": 1.1348, + "step": 14510 + }, + { + "epoch": 0.909837607373503, + "grad_norm": 3.4355924129486084, + "learning_rate": 4.233683472348837e-07, + "loss": 1.1088, + "step": 14511 + }, + { + "epoch": 0.9099003072292934, + "grad_norm": 3.4133949279785156, + "learning_rate": 4.22783905771893e-07, + "loss": 0.9859, + "step": 14512 + }, + { + "epoch": 0.9099630070850837, + "grad_norm": 3.3245649337768555, + "learning_rate": 4.22199859273712e-07, + "loss": 1.1169, + "step": 14513 + }, + { + "epoch": 0.910025706940874, + "grad_norm": 3.311845302581787, + "learning_rate": 4.216162077644281e-07, + "loss": 1.1658, + "step": 14514 + }, + { + "epoch": 0.9100884067966644, + "grad_norm": 3.8288068771362305, + "learning_rate": 4.210329512681099e-07, + "loss": 1.1016, + "step": 14515 + }, + { + "epoch": 0.9101511066524547, + "grad_norm": 3.388892889022827, + "learning_rate": 4.204500898088104e-07, + "loss": 0.9858, + "step": 14516 + }, + { + "epoch": 0.910213806508245, + "grad_norm": 3.616637945175171, + "learning_rate": 4.1986762341056695e-07, + "loss": 1.1456, + "step": 14517 + }, + { + "epoch": 0.9102765063640353, + "grad_norm": 3.3986411094665527, + "learning_rate": 4.1928555209740265e-07, + "loss": 0.9905, + "step": 14518 + }, + { + "epoch": 0.9103392062198257, + "grad_norm": 3.592400550842285, + "learning_rate": 4.187038758933204e-07, + "loss": 0.8814, + "step": 14519 + }, + { + "epoch": 0.910401906075616, + "grad_norm": 2.952040672302246, + "learning_rate": 4.1812259482230665e-07, + "loss": 1.1318, + "step": 14520 + }, + { + "epoch": 0.9104646059314063, + "grad_norm": 3.587735414505005, + "learning_rate": 4.1754170890833777e-07, + "loss": 1.0203, + "step": 14521 + }, + { + "epoch": 0.9105273057871967, + "grad_norm": 3.3828563690185547, + "learning_rate": 4.169612181753668e-07, + "loss": 1.2047, + "step": 14522 + }, + { + "epoch": 0.910590005642987, + "grad_norm": 3.443241834640503, + "learning_rate": 4.163811226473347e-07, + "loss": 1.0851, + "step": 14523 + }, + { + "epoch": 0.9106527054987773, + "grad_norm": 3.4772419929504395, + "learning_rate": 4.15801422348161e-07, + "loss": 0.9804, + "step": 14524 + }, + { + "epoch": 0.9107154053545676, + "grad_norm": 3.4790704250335693, + "learning_rate": 4.1522211730175674e-07, + "loss": 1.0291, + "step": 14525 + }, + { + "epoch": 0.9107781052103581, + "grad_norm": 3.6911449432373047, + "learning_rate": 4.146432075320117e-07, + "loss": 1.154, + "step": 14526 + }, + { + "epoch": 0.9108408050661484, + "grad_norm": 3.424713373184204, + "learning_rate": 4.1406469306279894e-07, + "loss": 1.0727, + "step": 14527 + }, + { + "epoch": 0.9109035049219387, + "grad_norm": 3.7704272270202637, + "learning_rate": 4.1348657391797496e-07, + "loss": 1.2451, + "step": 14528 + }, + { + "epoch": 0.910966204777729, + "grad_norm": 3.133807420730591, + "learning_rate": 4.1290885012138514e-07, + "loss": 1.27, + "step": 14529 + }, + { + "epoch": 0.9110289046335194, + "grad_norm": 2.9583375453948975, + "learning_rate": 4.1233152169685374e-07, + "loss": 1.044, + "step": 14530 + }, + { + "epoch": 0.9110916044893097, + "grad_norm": 3.5290281772613525, + "learning_rate": 4.1175458866818616e-07, + "loss": 1.0517, + "step": 14531 + }, + { + "epoch": 0.9111543043451, + "grad_norm": 3.3600142002105713, + "learning_rate": 4.1117805105918116e-07, + "loss": 1.1561, + "step": 14532 + }, + { + "epoch": 0.9112170042008904, + "grad_norm": 3.4577741622924805, + "learning_rate": 4.1060190889361084e-07, + "loss": 1.02, + "step": 14533 + }, + { + "epoch": 0.9112797040566807, + "grad_norm": 3.8710196018218994, + "learning_rate": 4.1002616219523726e-07, + "loss": 0.9594, + "step": 14534 + }, + { + "epoch": 0.911342403912471, + "grad_norm": 3.7830560207366943, + "learning_rate": 4.094508109878015e-07, + "loss": 1.0302, + "step": 14535 + }, + { + "epoch": 0.9114051037682613, + "grad_norm": 3.6029751300811768, + "learning_rate": 4.088758552950345e-07, + "loss": 1.0224, + "step": 14536 + }, + { + "epoch": 0.9114678036240517, + "grad_norm": 3.465776205062866, + "learning_rate": 4.083012951406462e-07, + "loss": 1.0279, + "step": 14537 + }, + { + "epoch": 0.911530503479842, + "grad_norm": 3.060717821121216, + "learning_rate": 4.077271305483321e-07, + "loss": 1.0532, + "step": 14538 + }, + { + "epoch": 0.9115932033356323, + "grad_norm": 3.7185440063476562, + "learning_rate": 4.071533615417678e-07, + "loss": 1.014, + "step": 14539 + }, + { + "epoch": 0.9116559031914226, + "grad_norm": 3.5739686489105225, + "learning_rate": 4.0657998814461975e-07, + "loss": 1.0598, + "step": 14540 + }, + { + "epoch": 0.911718603047213, + "grad_norm": 3.733513832092285, + "learning_rate": 4.060070103805314e-07, + "loss": 1.0817, + "step": 14541 + }, + { + "epoch": 0.9117813029030033, + "grad_norm": 3.4562816619873047, + "learning_rate": 4.054344282731315e-07, + "loss": 1.0906, + "step": 14542 + }, + { + "epoch": 0.9118440027587936, + "grad_norm": 3.2510480880737305, + "learning_rate": 4.0486224184603685e-07, + "loss": 1.0645, + "step": 14543 + }, + { + "epoch": 0.911906702614584, + "grad_norm": 3.445502519607544, + "learning_rate": 4.042904511228418e-07, + "loss": 1.0553, + "step": 14544 + }, + { + "epoch": 0.9119694024703743, + "grad_norm": 3.1072940826416016, + "learning_rate": 4.037190561271276e-07, + "loss": 1.0146, + "step": 14545 + }, + { + "epoch": 0.9120321023261646, + "grad_norm": 3.680482864379883, + "learning_rate": 4.031480568824575e-07, + "loss": 1.0984, + "step": 14546 + }, + { + "epoch": 0.9120948021819549, + "grad_norm": 3.248053550720215, + "learning_rate": 4.025774534123816e-07, + "loss": 1.0723, + "step": 14547 + }, + { + "epoch": 0.9121575020377453, + "grad_norm": 3.678431510925293, + "learning_rate": 4.0200724574043115e-07, + "loss": 0.9005, + "step": 14548 + }, + { + "epoch": 0.9122202018935357, + "grad_norm": 3.6168971061706543, + "learning_rate": 4.014374338901206e-07, + "loss": 1.0257, + "step": 14549 + }, + { + "epoch": 0.912282901749326, + "grad_norm": 3.941624164581299, + "learning_rate": 4.0086801788494886e-07, + "loss": 1.0561, + "step": 14550 + }, + { + "epoch": 0.9123456016051164, + "grad_norm": 3.5884764194488525, + "learning_rate": 4.002989977483995e-07, + "loss": 1.2664, + "step": 14551 + }, + { + "epoch": 0.9124083014609067, + "grad_norm": 3.2968523502349854, + "learning_rate": 3.997303735039393e-07, + "loss": 1.086, + "step": 14552 + }, + { + "epoch": 0.912471001316697, + "grad_norm": 3.2609684467315674, + "learning_rate": 3.9916214517501716e-07, + "loss": 1.0975, + "step": 14553 + }, + { + "epoch": 0.9125337011724873, + "grad_norm": 3.3523190021514893, + "learning_rate": 3.9859431278506777e-07, + "loss": 1.0249, + "step": 14554 + }, + { + "epoch": 0.9125964010282777, + "grad_norm": 3.392965793609619, + "learning_rate": 3.980268763575079e-07, + "loss": 1.2153, + "step": 14555 + }, + { + "epoch": 0.912659100884068, + "grad_norm": 3.265131950378418, + "learning_rate": 3.9745983591574e-07, + "loss": 1.1158, + "step": 14556 + }, + { + "epoch": 0.9127218007398583, + "grad_norm": 3.4419703483581543, + "learning_rate": 3.968931914831453e-07, + "loss": 0.989, + "step": 14557 + }, + { + "epoch": 0.9127845005956486, + "grad_norm": 3.2599310874938965, + "learning_rate": 3.963269430830974e-07, + "loss": 1.1474, + "step": 14558 + }, + { + "epoch": 0.912847200451439, + "grad_norm": 4.045736312866211, + "learning_rate": 3.957610907389442e-07, + "loss": 1.0128, + "step": 14559 + }, + { + "epoch": 0.9129099003072293, + "grad_norm": 3.045090436935425, + "learning_rate": 3.9519563447402265e-07, + "loss": 1.1172, + "step": 14560 + }, + { + "epoch": 0.9129726001630196, + "grad_norm": 3.053525924682617, + "learning_rate": 3.9463057431165407e-07, + "loss": 0.9799, + "step": 14561 + }, + { + "epoch": 0.91303530001881, + "grad_norm": 3.127028703689575, + "learning_rate": 3.9406591027513983e-07, + "loss": 1.0571, + "step": 14562 + }, + { + "epoch": 0.9130979998746003, + "grad_norm": 3.2378928661346436, + "learning_rate": 3.935016423877669e-07, + "loss": 1.1463, + "step": 14563 + }, + { + "epoch": 0.9131606997303906, + "grad_norm": 3.1173255443573, + "learning_rate": 3.9293777067280434e-07, + "loss": 1.0792, + "step": 14564 + }, + { + "epoch": 0.9132233995861809, + "grad_norm": 3.908456802368164, + "learning_rate": 3.9237429515350924e-07, + "loss": 1.0516, + "step": 14565 + }, + { + "epoch": 0.9132860994419713, + "grad_norm": 3.2395734786987305, + "learning_rate": 3.918112158531173e-07, + "loss": 1.0843, + "step": 14566 + }, + { + "epoch": 0.9133487992977616, + "grad_norm": 3.736769914627075, + "learning_rate": 3.912485327948512e-07, + "loss": 1.1245, + "step": 14567 + }, + { + "epoch": 0.9134114991535519, + "grad_norm": 3.6961631774902344, + "learning_rate": 3.906862460019134e-07, + "loss": 0.9483, + "step": 14568 + }, + { + "epoch": 0.9134741990093422, + "grad_norm": 3.6284804344177246, + "learning_rate": 3.901243554974965e-07, + "loss": 0.8604, + "step": 14569 + }, + { + "epoch": 0.9135368988651326, + "grad_norm": 3.313481092453003, + "learning_rate": 3.895628613047697e-07, + "loss": 0.9476, + "step": 14570 + }, + { + "epoch": 0.9135995987209229, + "grad_norm": 3.1542110443115234, + "learning_rate": 3.89001763446889e-07, + "loss": 1.1256, + "step": 14571 + }, + { + "epoch": 0.9136622985767133, + "grad_norm": 3.1869969367980957, + "learning_rate": 3.8844106194699696e-07, + "loss": 0.9575, + "step": 14572 + }, + { + "epoch": 0.9137249984325037, + "grad_norm": 3.453422784805298, + "learning_rate": 3.8788075682821616e-07, + "loss": 1.0889, + "step": 14573 + }, + { + "epoch": 0.913787698288294, + "grad_norm": 3.855394124984741, + "learning_rate": 3.8732084811365145e-07, + "loss": 1.0879, + "step": 14574 + }, + { + "epoch": 0.9138503981440843, + "grad_norm": 3.50838303565979, + "learning_rate": 3.867613358263933e-07, + "loss": 1.1634, + "step": 14575 + }, + { + "epoch": 0.9139130979998746, + "grad_norm": 3.3268542289733887, + "learning_rate": 3.862022199895199e-07, + "loss": 1.3112, + "step": 14576 + }, + { + "epoch": 0.913975797855665, + "grad_norm": 3.252993583679199, + "learning_rate": 3.8564350062608614e-07, + "loss": 1.1291, + "step": 14577 + }, + { + "epoch": 0.9140384977114553, + "grad_norm": 3.837040901184082, + "learning_rate": 3.850851777591336e-07, + "loss": 1.0545, + "step": 14578 + }, + { + "epoch": 0.9141011975672456, + "grad_norm": 3.307260274887085, + "learning_rate": 3.8452725141168824e-07, + "loss": 1.2507, + "step": 14579 + }, + { + "epoch": 0.9141638974230359, + "grad_norm": 2.9983160495758057, + "learning_rate": 3.839697216067595e-07, + "loss": 1.2129, + "step": 14580 + }, + { + "epoch": 0.9142265972788263, + "grad_norm": 3.2966020107269287, + "learning_rate": 3.8341258836733895e-07, + "loss": 1.1358, + "step": 14581 + }, + { + "epoch": 0.9142892971346166, + "grad_norm": 3.281698226928711, + "learning_rate": 3.828558517164027e-07, + "loss": 1.044, + "step": 14582 + }, + { + "epoch": 0.9143519969904069, + "grad_norm": 3.631061315536499, + "learning_rate": 3.8229951167691236e-07, + "loss": 1.005, + "step": 14583 + }, + { + "epoch": 0.9144146968461973, + "grad_norm": 3.621575117111206, + "learning_rate": 3.817435682718096e-07, + "loss": 0.9805, + "step": 14584 + }, + { + "epoch": 0.9144773967019876, + "grad_norm": 3.6233208179473877, + "learning_rate": 3.8118802152402266e-07, + "loss": 0.9881, + "step": 14585 + }, + { + "epoch": 0.9145400965577779, + "grad_norm": 3.3824543952941895, + "learning_rate": 3.8063287145646e-07, + "loss": 0.9698, + "step": 14586 + }, + { + "epoch": 0.9146027964135682, + "grad_norm": 3.2987005710601807, + "learning_rate": 3.800781180920199e-07, + "loss": 1.225, + "step": 14587 + }, + { + "epoch": 0.9146654962693586, + "grad_norm": 2.9604334831237793, + "learning_rate": 3.795237614535774e-07, + "loss": 0.9422, + "step": 14588 + }, + { + "epoch": 0.9147281961251489, + "grad_norm": 3.9206488132476807, + "learning_rate": 3.7896980156399533e-07, + "loss": 1.0161, + "step": 14589 + }, + { + "epoch": 0.9147908959809392, + "grad_norm": 3.3463947772979736, + "learning_rate": 3.784162384461176e-07, + "loss": 1.2069, + "step": 14590 + }, + { + "epoch": 0.9148535958367295, + "grad_norm": 3.3979275226593018, + "learning_rate": 3.7786307212277605e-07, + "loss": 1.0666, + "step": 14591 + }, + { + "epoch": 0.9149162956925199, + "grad_norm": 3.942847967147827, + "learning_rate": 3.7731030261678125e-07, + "loss": 1.2262, + "step": 14592 + }, + { + "epoch": 0.9149789955483102, + "grad_norm": 3.33188533782959, + "learning_rate": 3.767579299509283e-07, + "loss": 1.085, + "step": 14593 + }, + { + "epoch": 0.9150416954041005, + "grad_norm": 3.2148687839508057, + "learning_rate": 3.762059541480001e-07, + "loss": 1.0643, + "step": 14594 + }, + { + "epoch": 0.915104395259891, + "grad_norm": 3.276921510696411, + "learning_rate": 3.7565437523075956e-07, + "loss": 1.1164, + "step": 14595 + }, + { + "epoch": 0.9151670951156813, + "grad_norm": 3.7781951427459717, + "learning_rate": 3.7510319322195175e-07, + "loss": 1.1636, + "step": 14596 + }, + { + "epoch": 0.9152297949714716, + "grad_norm": 3.121305465698242, + "learning_rate": 3.745524081443075e-07, + "loss": 1.0699, + "step": 14597 + }, + { + "epoch": 0.9152924948272619, + "grad_norm": 3.3727505207061768, + "learning_rate": 3.74002020020543e-07, + "loss": 1.0212, + "step": 14598 + }, + { + "epoch": 0.9153551946830523, + "grad_norm": 3.077042818069458, + "learning_rate": 3.734520288733567e-07, + "loss": 1.0679, + "step": 14599 + }, + { + "epoch": 0.9154178945388426, + "grad_norm": 3.10182785987854, + "learning_rate": 3.729024347254284e-07, + "loss": 1.1018, + "step": 14600 + }, + { + "epoch": 0.9154805943946329, + "grad_norm": 3.1756296157836914, + "learning_rate": 3.723532375994232e-07, + "loss": 1.154, + "step": 14601 + }, + { + "epoch": 0.9155432942504232, + "grad_norm": 3.2548959255218506, + "learning_rate": 3.7180443751799186e-07, + "loss": 1.227, + "step": 14602 + }, + { + "epoch": 0.9156059941062136, + "grad_norm": 3.307175636291504, + "learning_rate": 3.7125603450376524e-07, + "loss": 1.1046, + "step": 14603 + }, + { + "epoch": 0.9156686939620039, + "grad_norm": 3.6897239685058594, + "learning_rate": 3.707080285793607e-07, + "loss": 0.9447, + "step": 14604 + }, + { + "epoch": 0.9157313938177942, + "grad_norm": 3.833224058151245, + "learning_rate": 3.7016041976737803e-07, + "loss": 1.0347, + "step": 14605 + }, + { + "epoch": 0.9157940936735846, + "grad_norm": 3.426488161087036, + "learning_rate": 3.6961320809039914e-07, + "loss": 1.0894, + "step": 14606 + }, + { + "epoch": 0.9158567935293749, + "grad_norm": 3.5917205810546875, + "learning_rate": 3.690663935709937e-07, + "loss": 1.1777, + "step": 14607 + }, + { + "epoch": 0.9159194933851652, + "grad_norm": 3.439993381500244, + "learning_rate": 3.6851997623170934e-07, + "loss": 1.1741, + "step": 14608 + }, + { + "epoch": 0.9159821932409555, + "grad_norm": 4.340750217437744, + "learning_rate": 3.6797395609508234e-07, + "loss": 1.1329, + "step": 14609 + }, + { + "epoch": 0.9160448930967459, + "grad_norm": 3.4461655616760254, + "learning_rate": 3.674283331836304e-07, + "loss": 1.002, + "step": 14610 + }, + { + "epoch": 0.9161075929525362, + "grad_norm": 3.566005229949951, + "learning_rate": 3.6688310751985314e-07, + "loss": 1.0304, + "step": 14611 + }, + { + "epoch": 0.9161702928083265, + "grad_norm": 3.036193370819092, + "learning_rate": 3.6633827912623823e-07, + "loss": 1.1415, + "step": 14612 + }, + { + "epoch": 0.9162329926641168, + "grad_norm": 3.393994092941284, + "learning_rate": 3.657938480252543e-07, + "loss": 0.9218, + "step": 14613 + }, + { + "epoch": 0.9162956925199072, + "grad_norm": 3.5484402179718018, + "learning_rate": 3.6524981423935127e-07, + "loss": 0.9854, + "step": 14614 + }, + { + "epoch": 0.9163583923756975, + "grad_norm": 3.4720635414123535, + "learning_rate": 3.647061777909666e-07, + "loss": 1.0764, + "step": 14615 + }, + { + "epoch": 0.9164210922314878, + "grad_norm": 3.5439038276672363, + "learning_rate": 3.641629387025214e-07, + "loss": 1.1171, + "step": 14616 + }, + { + "epoch": 0.9164837920872781, + "grad_norm": 3.32179594039917, + "learning_rate": 3.636200969964165e-07, + "loss": 1.0172, + "step": 14617 + }, + { + "epoch": 0.9165464919430686, + "grad_norm": 3.3052196502685547, + "learning_rate": 3.630776526950408e-07, + "loss": 1.1249, + "step": 14618 + }, + { + "epoch": 0.9166091917988589, + "grad_norm": 3.7402420043945312, + "learning_rate": 3.6253560582076075e-07, + "loss": 1.1483, + "step": 14619 + }, + { + "epoch": 0.9166718916546492, + "grad_norm": 3.507518768310547, + "learning_rate": 3.6199395639593515e-07, + "loss": 1.1854, + "step": 14620 + }, + { + "epoch": 0.9167345915104396, + "grad_norm": 3.6304848194122314, + "learning_rate": 3.6145270444290057e-07, + "loss": 1.1817, + "step": 14621 + }, + { + "epoch": 0.9167972913662299, + "grad_norm": 3.9551942348480225, + "learning_rate": 3.609118499839748e-07, + "loss": 0.9859, + "step": 14622 + }, + { + "epoch": 0.9168599912220202, + "grad_norm": 3.4513440132141113, + "learning_rate": 3.603713930414676e-07, + "loss": 0.9698, + "step": 14623 + }, + { + "epoch": 0.9169226910778105, + "grad_norm": 3.5748515129089355, + "learning_rate": 3.5983133363766465e-07, + "loss": 0.9754, + "step": 14624 + }, + { + "epoch": 0.9169853909336009, + "grad_norm": 3.548499822616577, + "learning_rate": 3.5929167179483805e-07, + "loss": 1.0851, + "step": 14625 + }, + { + "epoch": 0.9170480907893912, + "grad_norm": 3.1373353004455566, + "learning_rate": 3.587524075352433e-07, + "loss": 1.0361, + "step": 14626 + }, + { + "epoch": 0.9171107906451815, + "grad_norm": 3.161831855773926, + "learning_rate": 3.582135408811216e-07, + "loss": 1.0961, + "step": 14627 + }, + { + "epoch": 0.9171734905009719, + "grad_norm": 4.328183650970459, + "learning_rate": 3.5767507185469396e-07, + "loss": 1.1205, + "step": 14628 + }, + { + "epoch": 0.9172361903567622, + "grad_norm": 3.456774950027466, + "learning_rate": 3.5713700047816715e-07, + "loss": 1.0833, + "step": 14629 + }, + { + "epoch": 0.9172988902125525, + "grad_norm": 3.429117202758789, + "learning_rate": 3.565993267737311e-07, + "loss": 1.1031, + "step": 14630 + }, + { + "epoch": 0.9173615900683428, + "grad_norm": 4.03260612487793, + "learning_rate": 3.560620507635615e-07, + "loss": 1.0163, + "step": 14631 + }, + { + "epoch": 0.9174242899241332, + "grad_norm": 3.2156472206115723, + "learning_rate": 3.5552517246981277e-07, + "loss": 0.9637, + "step": 14632 + }, + { + "epoch": 0.9174869897799235, + "grad_norm": 3.50469708442688, + "learning_rate": 3.549886919146273e-07, + "loss": 0.8788, + "step": 14633 + }, + { + "epoch": 0.9175496896357138, + "grad_norm": 3.5978167057037354, + "learning_rate": 3.544526091201295e-07, + "loss": 0.9738, + "step": 14634 + }, + { + "epoch": 0.9176123894915041, + "grad_norm": 3.0502736568450928, + "learning_rate": 3.5391692410842836e-07, + "loss": 1.0049, + "step": 14635 + }, + { + "epoch": 0.9176750893472945, + "grad_norm": 3.4359283447265625, + "learning_rate": 3.5338163690161296e-07, + "loss": 1.1757, + "step": 14636 + }, + { + "epoch": 0.9177377892030848, + "grad_norm": 3.528785467147827, + "learning_rate": 3.5284674752176006e-07, + "loss": 0.8666, + "step": 14637 + }, + { + "epoch": 0.9178004890588751, + "grad_norm": 3.064028739929199, + "learning_rate": 3.523122559909298e-07, + "loss": 0.9619, + "step": 14638 + }, + { + "epoch": 0.9178631889146655, + "grad_norm": 3.706953525543213, + "learning_rate": 3.5177816233116335e-07, + "loss": 1.0957, + "step": 14639 + }, + { + "epoch": 0.9179258887704558, + "grad_norm": 3.1206815242767334, + "learning_rate": 3.5124446656448654e-07, + "loss": 1.1198, + "step": 14640 + }, + { + "epoch": 0.9179885886262461, + "grad_norm": 3.379124641418457, + "learning_rate": 3.5071116871290835e-07, + "loss": 0.9879, + "step": 14641 + }, + { + "epoch": 0.9180512884820365, + "grad_norm": 3.6209022998809814, + "learning_rate": 3.501782687984234e-07, + "loss": 0.9978, + "step": 14642 + }, + { + "epoch": 0.9181139883378269, + "grad_norm": 3.544029951095581, + "learning_rate": 3.4964576684300865e-07, + "loss": 1.0906, + "step": 14643 + }, + { + "epoch": 0.9181766881936172, + "grad_norm": 3.5892250537872314, + "learning_rate": 3.4911366286862316e-07, + "loss": 1.2808, + "step": 14644 + }, + { + "epoch": 0.9182393880494075, + "grad_norm": 3.6835126876831055, + "learning_rate": 3.485819568972126e-07, + "loss": 1.1653, + "step": 14645 + }, + { + "epoch": 0.9183020879051979, + "grad_norm": 3.601439952850342, + "learning_rate": 3.4805064895070297e-07, + "loss": 1.0208, + "step": 14646 + }, + { + "epoch": 0.9183647877609882, + "grad_norm": 3.8445160388946533, + "learning_rate": 3.4751973905100656e-07, + "loss": 1.0211, + "step": 14647 + }, + { + "epoch": 0.9184274876167785, + "grad_norm": 3.3851253986358643, + "learning_rate": 3.4698922722001703e-07, + "loss": 1.108, + "step": 14648 + }, + { + "epoch": 0.9184901874725688, + "grad_norm": 3.8473196029663086, + "learning_rate": 3.4645911347961357e-07, + "loss": 1.0701, + "step": 14649 + }, + { + "epoch": 0.9185528873283592, + "grad_norm": 3.399336099624634, + "learning_rate": 3.459293978516587e-07, + "loss": 1.0721, + "step": 14650 + }, + { + "epoch": 0.9186155871841495, + "grad_norm": 3.458381175994873, + "learning_rate": 3.4540008035799597e-07, + "loss": 1.092, + "step": 14651 + }, + { + "epoch": 0.9186782870399398, + "grad_norm": 3.2936229705810547, + "learning_rate": 3.448711610204558e-07, + "loss": 1.1293, + "step": 14652 + }, + { + "epoch": 0.9187409868957301, + "grad_norm": 3.1704859733581543, + "learning_rate": 3.443426398608507e-07, + "loss": 1.0095, + "step": 14653 + }, + { + "epoch": 0.9188036867515205, + "grad_norm": 3.068636178970337, + "learning_rate": 3.4381451690097653e-07, + "loss": 1.0519, + "step": 14654 + }, + { + "epoch": 0.9188663866073108, + "grad_norm": 3.6192209720611572, + "learning_rate": 3.432867921626126e-07, + "loss": 1.0925, + "step": 14655 + }, + { + "epoch": 0.9189290864631011, + "grad_norm": 3.7750208377838135, + "learning_rate": 3.4275946566752485e-07, + "loss": 1.1508, + "step": 14656 + }, + { + "epoch": 0.9189917863188914, + "grad_norm": 3.806755781173706, + "learning_rate": 3.42232537437458e-07, + "loss": 1.1762, + "step": 14657 + }, + { + "epoch": 0.9190544861746818, + "grad_norm": 3.0800282955169678, + "learning_rate": 3.4170600749414253e-07, + "loss": 1.1539, + "step": 14658 + }, + { + "epoch": 0.9191171860304721, + "grad_norm": 3.8060452938079834, + "learning_rate": 3.411798758592921e-07, + "loss": 0.9615, + "step": 14659 + }, + { + "epoch": 0.9191798858862624, + "grad_norm": 3.677821159362793, + "learning_rate": 3.406541425546073e-07, + "loss": 1.0402, + "step": 14660 + }, + { + "epoch": 0.9192425857420528, + "grad_norm": 3.074620246887207, + "learning_rate": 3.4012880760176723e-07, + "loss": 1.1129, + "step": 14661 + }, + { + "epoch": 0.9193052855978431, + "grad_norm": 3.6164984703063965, + "learning_rate": 3.3960387102243696e-07, + "loss": 0.8643, + "step": 14662 + }, + { + "epoch": 0.9193679854536334, + "grad_norm": 3.2786080837249756, + "learning_rate": 3.3907933283826356e-07, + "loss": 1.0895, + "step": 14663 + }, + { + "epoch": 0.9194306853094237, + "grad_norm": 3.533578395843506, + "learning_rate": 3.385551930708808e-07, + "loss": 1.0114, + "step": 14664 + }, + { + "epoch": 0.9194933851652142, + "grad_norm": 3.4454116821289062, + "learning_rate": 3.3803145174190476e-07, + "loss": 1.1543, + "step": 14665 + }, + { + "epoch": 0.9195560850210045, + "grad_norm": 3.5014307498931885, + "learning_rate": 3.375081088729326e-07, + "loss": 1.0441, + "step": 14666 + }, + { + "epoch": 0.9196187848767948, + "grad_norm": 3.467778444290161, + "learning_rate": 3.3698516448554817e-07, + "loss": 1.0323, + "step": 14667 + }, + { + "epoch": 0.9196814847325852, + "grad_norm": 3.684227705001831, + "learning_rate": 3.364626186013187e-07, + "loss": 1.1742, + "step": 14668 + }, + { + "epoch": 0.9197441845883755, + "grad_norm": 3.6506857872009277, + "learning_rate": 3.3594047124179243e-07, + "loss": 1.0011, + "step": 14669 + }, + { + "epoch": 0.9198068844441658, + "grad_norm": 3.242988348007202, + "learning_rate": 3.3541872242850215e-07, + "loss": 1.0826, + "step": 14670 + }, + { + "epoch": 0.9198695842999561, + "grad_norm": 3.4369819164276123, + "learning_rate": 3.348973721829663e-07, + "loss": 0.9708, + "step": 14671 + }, + { + "epoch": 0.9199322841557465, + "grad_norm": 3.3685483932495117, + "learning_rate": 3.343764205266853e-07, + "loss": 0.9893, + "step": 14672 + }, + { + "epoch": 0.9199949840115368, + "grad_norm": 3.2507832050323486, + "learning_rate": 3.3385586748114096e-07, + "loss": 1.0129, + "step": 14673 + }, + { + "epoch": 0.9200576838673271, + "grad_norm": 3.3989202976226807, + "learning_rate": 3.33335713067805e-07, + "loss": 1.0524, + "step": 14674 + }, + { + "epoch": 0.9201203837231174, + "grad_norm": 2.897657632827759, + "learning_rate": 3.328159573081258e-07, + "loss": 1.1129, + "step": 14675 + }, + { + "epoch": 0.9201830835789078, + "grad_norm": 3.4645965099334717, + "learning_rate": 3.322966002235395e-07, + "loss": 1.039, + "step": 14676 + }, + { + "epoch": 0.9202457834346981, + "grad_norm": 3.6798183917999268, + "learning_rate": 3.3177764183546125e-07, + "loss": 1.0939, + "step": 14677 + }, + { + "epoch": 0.9203084832904884, + "grad_norm": 3.435645341873169, + "learning_rate": 3.3125908216529723e-07, + "loss": 0.9791, + "step": 14678 + }, + { + "epoch": 0.9203711831462787, + "grad_norm": 3.56583833694458, + "learning_rate": 3.3074092123443036e-07, + "loss": 0.9718, + "step": 14679 + }, + { + "epoch": 0.9204338830020691, + "grad_norm": 3.4526572227478027, + "learning_rate": 3.302231590642302e-07, + "loss": 1.0136, + "step": 14680 + }, + { + "epoch": 0.9204965828578594, + "grad_norm": 3.3562192916870117, + "learning_rate": 3.2970579567604855e-07, + "loss": 0.871, + "step": 14681 + }, + { + "epoch": 0.9205592827136497, + "grad_norm": 3.6522750854492188, + "learning_rate": 3.291888310912239e-07, + "loss": 1.058, + "step": 14682 + }, + { + "epoch": 0.9206219825694401, + "grad_norm": 3.5723066329956055, + "learning_rate": 3.2867226533107257e-07, + "loss": 1.1697, + "step": 14683 + }, + { + "epoch": 0.9206846824252304, + "grad_norm": 3.189147710800171, + "learning_rate": 3.2815609841689964e-07, + "loss": 1.1785, + "step": 14684 + }, + { + "epoch": 0.9207473822810207, + "grad_norm": 3.4745357036590576, + "learning_rate": 3.2764033036999153e-07, + "loss": 1.1287, + "step": 14685 + }, + { + "epoch": 0.920810082136811, + "grad_norm": 3.5552690029144287, + "learning_rate": 3.27124961211619e-07, + "loss": 1.0306, + "step": 14686 + }, + { + "epoch": 0.9208727819926014, + "grad_norm": 2.8549437522888184, + "learning_rate": 3.26609990963036e-07, + "loss": 1.1059, + "step": 14687 + }, + { + "epoch": 0.9209354818483918, + "grad_norm": 3.623163938522339, + "learning_rate": 3.2609541964547796e-07, + "loss": 1.1262, + "step": 14688 + }, + { + "epoch": 0.9209981817041821, + "grad_norm": 3.2228384017944336, + "learning_rate": 3.255812472801689e-07, + "loss": 1.1812, + "step": 14689 + }, + { + "epoch": 0.9210608815599725, + "grad_norm": 3.198563575744629, + "learning_rate": 3.2506747388831085e-07, + "loss": 1.0305, + "step": 14690 + }, + { + "epoch": 0.9211235814157628, + "grad_norm": 3.895564556121826, + "learning_rate": 3.245540994910934e-07, + "loss": 1.0075, + "step": 14691 + }, + { + "epoch": 0.9211862812715531, + "grad_norm": 3.3571670055389404, + "learning_rate": 3.2404112410968524e-07, + "loss": 1.015, + "step": 14692 + }, + { + "epoch": 0.9212489811273434, + "grad_norm": 3.2431044578552246, + "learning_rate": 3.235285477652461e-07, + "loss": 0.9744, + "step": 14693 + }, + { + "epoch": 0.9213116809831338, + "grad_norm": 3.476562976837158, + "learning_rate": 3.230163704789113e-07, + "loss": 1.0946, + "step": 14694 + }, + { + "epoch": 0.9213743808389241, + "grad_norm": 3.3320674896240234, + "learning_rate": 3.225045922718029e-07, + "loss": 1.081, + "step": 14695 + }, + { + "epoch": 0.9214370806947144, + "grad_norm": 3.6655046939849854, + "learning_rate": 3.219932131650294e-07, + "loss": 1.0328, + "step": 14696 + }, + { + "epoch": 0.9214997805505047, + "grad_norm": 3.096719264984131, + "learning_rate": 3.2148223317967744e-07, + "loss": 1.0111, + "step": 14697 + }, + { + "epoch": 0.9215624804062951, + "grad_norm": 3.242731809616089, + "learning_rate": 3.2097165233682223e-07, + "loss": 1.1344, + "step": 14698 + }, + { + "epoch": 0.9216251802620854, + "grad_norm": 3.2299790382385254, + "learning_rate": 3.204614706575171e-07, + "loss": 1.0623, + "step": 14699 + }, + { + "epoch": 0.9216878801178757, + "grad_norm": 3.7825725078582764, + "learning_rate": 3.1995168816280395e-07, + "loss": 0.9729, + "step": 14700 + }, + { + "epoch": 0.921750579973666, + "grad_norm": 3.3184564113616943, + "learning_rate": 3.194423048737072e-07, + "loss": 1.1693, + "step": 14701 + }, + { + "epoch": 0.9218132798294564, + "grad_norm": 3.851288318634033, + "learning_rate": 3.1893332081123107e-07, + "loss": 1.0775, + "step": 14702 + }, + { + "epoch": 0.9218759796852467, + "grad_norm": 3.4689276218414307, + "learning_rate": 3.184247359963677e-07, + "loss": 1.0561, + "step": 14703 + }, + { + "epoch": 0.921938679541037, + "grad_norm": 3.8478012084960938, + "learning_rate": 3.179165504500914e-07, + "loss": 1.1686, + "step": 14704 + }, + { + "epoch": 0.9220013793968274, + "grad_norm": 3.316551446914673, + "learning_rate": 3.174087641933599e-07, + "loss": 1.2473, + "step": 14705 + }, + { + "epoch": 0.9220640792526177, + "grad_norm": 3.818598985671997, + "learning_rate": 3.16901377247113e-07, + "loss": 1.0923, + "step": 14706 + }, + { + "epoch": 0.922126779108408, + "grad_norm": 3.340440511703491, + "learning_rate": 3.163943896322763e-07, + "loss": 1.0775, + "step": 14707 + }, + { + "epoch": 0.9221894789641983, + "grad_norm": 3.471360206604004, + "learning_rate": 3.158878013697586e-07, + "loss": 1.0545, + "step": 14708 + }, + { + "epoch": 0.9222521788199887, + "grad_norm": 3.4068398475646973, + "learning_rate": 3.153816124804498e-07, + "loss": 1.1337, + "step": 14709 + }, + { + "epoch": 0.922314878675779, + "grad_norm": 3.4016940593719482, + "learning_rate": 3.148758229852267e-07, + "loss": 1.1038, + "step": 14710 + }, + { + "epoch": 0.9223775785315694, + "grad_norm": 3.548103094100952, + "learning_rate": 3.1437043290494793e-07, + "loss": 1.2082, + "step": 14711 + }, + { + "epoch": 0.9224402783873598, + "grad_norm": 3.2327325344085693, + "learning_rate": 3.1386544226045477e-07, + "loss": 0.9917, + "step": 14712 + }, + { + "epoch": 0.9225029782431501, + "grad_norm": 3.2848403453826904, + "learning_rate": 3.1336085107257496e-07, + "loss": 1.3254, + "step": 14713 + }, + { + "epoch": 0.9225656780989404, + "grad_norm": 3.390768051147461, + "learning_rate": 3.128566593621152e-07, + "loss": 0.9663, + "step": 14714 + }, + { + "epoch": 0.9226283779547307, + "grad_norm": 3.5043439865112305, + "learning_rate": 3.123528671498699e-07, + "loss": 1.0568, + "step": 14715 + }, + { + "epoch": 0.9226910778105211, + "grad_norm": 3.615680456161499, + "learning_rate": 3.1184947445661696e-07, + "loss": 1.0778, + "step": 14716 + }, + { + "epoch": 0.9227537776663114, + "grad_norm": 3.1898081302642822, + "learning_rate": 3.1134648130311305e-07, + "loss": 1.0623, + "step": 14717 + }, + { + "epoch": 0.9228164775221017, + "grad_norm": 2.9372637271881104, + "learning_rate": 3.108438877101039e-07, + "loss": 1.0201, + "step": 14718 + }, + { + "epoch": 0.922879177377892, + "grad_norm": 3.3042612075805664, + "learning_rate": 3.103416936983172e-07, + "loss": 1.0224, + "step": 14719 + }, + { + "epoch": 0.9229418772336824, + "grad_norm": 3.9371590614318848, + "learning_rate": 3.09839899288461e-07, + "loss": 1.1558, + "step": 14720 + }, + { + "epoch": 0.9230045770894727, + "grad_norm": 3.045271158218384, + "learning_rate": 3.093385045012298e-07, + "loss": 1.0764, + "step": 14721 + }, + { + "epoch": 0.923067276945263, + "grad_norm": 3.283461570739746, + "learning_rate": 3.088375093573037e-07, + "loss": 1.0511, + "step": 14722 + }, + { + "epoch": 0.9231299768010534, + "grad_norm": 3.9234631061553955, + "learning_rate": 3.083369138773407e-07, + "loss": 1.0023, + "step": 14723 + }, + { + "epoch": 0.9231926766568437, + "grad_norm": 3.2734742164611816, + "learning_rate": 3.078367180819863e-07, + "loss": 1.0497, + "step": 14724 + }, + { + "epoch": 0.923255376512634, + "grad_norm": 3.536924362182617, + "learning_rate": 3.073369219918698e-07, + "loss": 1.3487, + "step": 14725 + }, + { + "epoch": 0.9233180763684243, + "grad_norm": 3.356498956680298, + "learning_rate": 3.068375256276024e-07, + "loss": 1.1527, + "step": 14726 + }, + { + "epoch": 0.9233807762242147, + "grad_norm": 3.3121542930603027, + "learning_rate": 3.0633852900977755e-07, + "loss": 1.0715, + "step": 14727 + }, + { + "epoch": 0.923443476080005, + "grad_norm": 3.4568707942962646, + "learning_rate": 3.0583993215897555e-07, + "loss": 0.9691, + "step": 14728 + }, + { + "epoch": 0.9235061759357953, + "grad_norm": 4.0493011474609375, + "learning_rate": 3.053417350957577e-07, + "loss": 1.1554, + "step": 14729 + }, + { + "epoch": 0.9235688757915856, + "grad_norm": 3.609839677810669, + "learning_rate": 3.048439378406709e-07, + "loss": 1.0824, + "step": 14730 + }, + { + "epoch": 0.923631575647376, + "grad_norm": 3.3382575511932373, + "learning_rate": 3.0434654041424315e-07, + "loss": 0.8998, + "step": 14731 + }, + { + "epoch": 0.9236942755031663, + "grad_norm": 3.551832675933838, + "learning_rate": 3.0384954283698697e-07, + "loss": 1.0026, + "step": 14732 + }, + { + "epoch": 0.9237569753589566, + "grad_norm": 3.771453857421875, + "learning_rate": 3.033529451293993e-07, + "loss": 1.0964, + "step": 14733 + }, + { + "epoch": 0.9238196752147471, + "grad_norm": 3.4273507595062256, + "learning_rate": 3.0285674731196033e-07, + "loss": 1.1132, + "step": 14734 + }, + { + "epoch": 0.9238823750705374, + "grad_norm": 3.4913532733917236, + "learning_rate": 3.023609494051316e-07, + "loss": 1.0824, + "step": 14735 + }, + { + "epoch": 0.9239450749263277, + "grad_norm": 3.110999584197998, + "learning_rate": 3.0186555142936116e-07, + "loss": 1.0059, + "step": 14736 + }, + { + "epoch": 0.924007774782118, + "grad_norm": 3.20033860206604, + "learning_rate": 3.0137055340507927e-07, + "loss": 1.1383, + "step": 14737 + }, + { + "epoch": 0.9240704746379084, + "grad_norm": 3.510699510574341, + "learning_rate": 3.008759553526996e-07, + "loss": 1.0008, + "step": 14738 + }, + { + "epoch": 0.9241331744936987, + "grad_norm": 3.218859910964966, + "learning_rate": 3.003817572926171e-07, + "loss": 1.1236, + "step": 14739 + }, + { + "epoch": 0.924195874349489, + "grad_norm": 3.399404525756836, + "learning_rate": 2.998879592452164e-07, + "loss": 1.1122, + "step": 14740 + }, + { + "epoch": 0.9242585742052793, + "grad_norm": 3.732961654663086, + "learning_rate": 2.993945612308591e-07, + "loss": 0.9513, + "step": 14741 + }, + { + "epoch": 0.9243212740610697, + "grad_norm": 3.897552013397217, + "learning_rate": 2.989015632698944e-07, + "loss": 1.1098, + "step": 14742 + }, + { + "epoch": 0.92438397391686, + "grad_norm": 3.315135955810547, + "learning_rate": 2.984089653826516e-07, + "loss": 1.0554, + "step": 14743 + }, + { + "epoch": 0.9244466737726503, + "grad_norm": 3.5773909091949463, + "learning_rate": 2.979167675894479e-07, + "loss": 0.9941, + "step": 14744 + }, + { + "epoch": 0.9245093736284407, + "grad_norm": 3.723658561706543, + "learning_rate": 2.974249699105802e-07, + "loss": 0.9496, + "step": 14745 + }, + { + "epoch": 0.924572073484231, + "grad_norm": 3.625481605529785, + "learning_rate": 2.9693357236633025e-07, + "loss": 1.1653, + "step": 14746 + }, + { + "epoch": 0.9246347733400213, + "grad_norm": 3.210786819458008, + "learning_rate": 2.964425749769628e-07, + "loss": 1.2327, + "step": 14747 + }, + { + "epoch": 0.9246974731958116, + "grad_norm": 4.016934394836426, + "learning_rate": 2.959519777627284e-07, + "loss": 1.0444, + "step": 14748 + }, + { + "epoch": 0.924760173051602, + "grad_norm": 3.489818811416626, + "learning_rate": 2.954617807438587e-07, + "loss": 1.1265, + "step": 14749 + }, + { + "epoch": 0.9248228729073923, + "grad_norm": 3.514275074005127, + "learning_rate": 2.9497198394056846e-07, + "loss": 1.0577, + "step": 14750 + }, + { + "epoch": 0.9248855727631826, + "grad_norm": 3.5810883045196533, + "learning_rate": 2.944825873730561e-07, + "loss": 1.0781, + "step": 14751 + }, + { + "epoch": 0.9249482726189729, + "grad_norm": 3.1567842960357666, + "learning_rate": 2.939935910615077e-07, + "loss": 1.1744, + "step": 14752 + }, + { + "epoch": 0.9250109724747633, + "grad_norm": 3.537292718887329, + "learning_rate": 2.9350499502608707e-07, + "loss": 1.0675, + "step": 14753 + }, + { + "epoch": 0.9250736723305536, + "grad_norm": 3.684541702270508, + "learning_rate": 2.930167992869426e-07, + "loss": 1.0288, + "step": 14754 + }, + { + "epoch": 0.9251363721863439, + "grad_norm": 4.263824462890625, + "learning_rate": 2.925290038642103e-07, + "loss": 1.1371, + "step": 14755 + }, + { + "epoch": 0.9251990720421343, + "grad_norm": 3.603472948074341, + "learning_rate": 2.9204160877800646e-07, + "loss": 1.0454, + "step": 14756 + }, + { + "epoch": 0.9252617718979247, + "grad_norm": 3.7533414363861084, + "learning_rate": 2.9155461404843043e-07, + "loss": 1.1288, + "step": 14757 + }, + { + "epoch": 0.925324471753715, + "grad_norm": 3.6499087810516357, + "learning_rate": 2.9106801969556507e-07, + "loss": 1.1356, + "step": 14758 + }, + { + "epoch": 0.9253871716095053, + "grad_norm": 3.2866878509521484, + "learning_rate": 2.905818257394799e-07, + "loss": 1.1474, + "step": 14759 + }, + { + "epoch": 0.9254498714652957, + "grad_norm": 3.185267210006714, + "learning_rate": 2.9009603220022333e-07, + "loss": 1.2318, + "step": 14760 + }, + { + "epoch": 0.925512571321086, + "grad_norm": 3.339128255844116, + "learning_rate": 2.896106390978315e-07, + "loss": 1.051, + "step": 14761 + }, + { + "epoch": 0.9255752711768763, + "grad_norm": 3.6881580352783203, + "learning_rate": 2.8912564645231954e-07, + "loss": 1.0088, + "step": 14762 + }, + { + "epoch": 0.9256379710326667, + "grad_norm": 2.726001024246216, + "learning_rate": 2.8864105428369037e-07, + "loss": 0.9927, + "step": 14763 + }, + { + "epoch": 0.925700670888457, + "grad_norm": 3.393388509750366, + "learning_rate": 2.881568626119291e-07, + "loss": 0.9259, + "step": 14764 + }, + { + "epoch": 0.9257633707442473, + "grad_norm": 3.7627005577087402, + "learning_rate": 2.8767307145700085e-07, + "loss": 1.0425, + "step": 14765 + }, + { + "epoch": 0.9258260706000376, + "grad_norm": 3.3018791675567627, + "learning_rate": 2.871896808388608e-07, + "loss": 1.2051, + "step": 14766 + }, + { + "epoch": 0.925888770455828, + "grad_norm": 3.709223985671997, + "learning_rate": 2.8670669077744296e-07, + "loss": 1.0552, + "step": 14767 + }, + { + "epoch": 0.9259514703116183, + "grad_norm": 3.7577626705169678, + "learning_rate": 2.8622410129266474e-07, + "loss": 1.1209, + "step": 14768 + }, + { + "epoch": 0.9260141701674086, + "grad_norm": 3.6046853065490723, + "learning_rate": 2.857419124044292e-07, + "loss": 1.1005, + "step": 14769 + }, + { + "epoch": 0.9260768700231989, + "grad_norm": 3.42031192779541, + "learning_rate": 2.852601241326203e-07, + "loss": 1.2079, + "step": 14770 + }, + { + "epoch": 0.9261395698789893, + "grad_norm": 3.817284345626831, + "learning_rate": 2.8477873649711e-07, + "loss": 1.0591, + "step": 14771 + }, + { + "epoch": 0.9262022697347796, + "grad_norm": 3.592552661895752, + "learning_rate": 2.842977495177479e-07, + "loss": 1.0251, + "step": 14772 + }, + { + "epoch": 0.9262649695905699, + "grad_norm": 3.404703140258789, + "learning_rate": 2.8381716321436936e-07, + "loss": 1.1376, + "step": 14773 + }, + { + "epoch": 0.9263276694463602, + "grad_norm": 3.561640739440918, + "learning_rate": 2.8333697760679734e-07, + "loss": 1.045, + "step": 14774 + }, + { + "epoch": 0.9263903693021506, + "grad_norm": 3.49210786819458, + "learning_rate": 2.828571927148327e-07, + "loss": 1.2079, + "step": 14775 + }, + { + "epoch": 0.9264530691579409, + "grad_norm": 3.7973246574401855, + "learning_rate": 2.8237780855825957e-07, + "loss": 1.1854, + "step": 14776 + }, + { + "epoch": 0.9265157690137312, + "grad_norm": 3.51084041595459, + "learning_rate": 2.8189882515685216e-07, + "loss": 1.1132, + "step": 14777 + }, + { + "epoch": 0.9265784688695216, + "grad_norm": 3.493988037109375, + "learning_rate": 2.814202425303614e-07, + "loss": 1.1251, + "step": 14778 + }, + { + "epoch": 0.9266411687253119, + "grad_norm": 3.406982660293579, + "learning_rate": 2.809420606985236e-07, + "loss": 1.1991, + "step": 14779 + }, + { + "epoch": 0.9267038685811022, + "grad_norm": 3.359525680541992, + "learning_rate": 2.804642796810597e-07, + "loss": 1.1167, + "step": 14780 + }, + { + "epoch": 0.9267665684368926, + "grad_norm": 3.4427525997161865, + "learning_rate": 2.79986899497674e-07, + "loss": 1.0731, + "step": 14781 + }, + { + "epoch": 0.926829268292683, + "grad_norm": 3.4208717346191406, + "learning_rate": 2.795099201680529e-07, + "loss": 1.1941, + "step": 14782 + }, + { + "epoch": 0.9268919681484733, + "grad_norm": 3.205200433731079, + "learning_rate": 2.790333417118674e-07, + "loss": 1.1063, + "step": 14783 + }, + { + "epoch": 0.9269546680042636, + "grad_norm": 3.347123384475708, + "learning_rate": 2.7855716414876944e-07, + "loss": 1.1697, + "step": 14784 + }, + { + "epoch": 0.927017367860054, + "grad_norm": 2.874932289123535, + "learning_rate": 2.780813874984001e-07, + "loss": 1.1509, + "step": 14785 + }, + { + "epoch": 0.9270800677158443, + "grad_norm": 3.5395450592041016, + "learning_rate": 2.776060117803792e-07, + "loss": 1.0557, + "step": 14786 + }, + { + "epoch": 0.9271427675716346, + "grad_norm": 3.745417594909668, + "learning_rate": 2.771310370143099e-07, + "loss": 1.2898, + "step": 14787 + }, + { + "epoch": 0.9272054674274249, + "grad_norm": 3.3659629821777344, + "learning_rate": 2.766564632197821e-07, + "loss": 0.9985, + "step": 14788 + }, + { + "epoch": 0.9272681672832153, + "grad_norm": 3.422475814819336, + "learning_rate": 2.761822904163669e-07, + "loss": 1.0966, + "step": 14789 + }, + { + "epoch": 0.9273308671390056, + "grad_norm": 3.429534673690796, + "learning_rate": 2.7570851862361747e-07, + "loss": 0.9705, + "step": 14790 + }, + { + "epoch": 0.9273935669947959, + "grad_norm": 3.5062553882598877, + "learning_rate": 2.7523514786107373e-07, + "loss": 1.1118, + "step": 14791 + }, + { + "epoch": 0.9274562668505862, + "grad_norm": 3.3904190063476562, + "learning_rate": 2.747621781482568e-07, + "loss": 1.0722, + "step": 14792 + }, + { + "epoch": 0.9275189667063766, + "grad_norm": 3.1082873344421387, + "learning_rate": 2.742896095046732e-07, + "loss": 1.1181, + "step": 14793 + }, + { + "epoch": 0.9275816665621669, + "grad_norm": 2.9452908039093018, + "learning_rate": 2.7381744194980963e-07, + "loss": 1.1707, + "step": 14794 + }, + { + "epoch": 0.9276443664179572, + "grad_norm": 3.3909895420074463, + "learning_rate": 2.733456755031416e-07, + "loss": 1.1342, + "step": 14795 + }, + { + "epoch": 0.9277070662737475, + "grad_norm": 3.353316307067871, + "learning_rate": 2.7287431018412135e-07, + "loss": 1.1043, + "step": 14796 + }, + { + "epoch": 0.9277697661295379, + "grad_norm": 3.371201753616333, + "learning_rate": 2.7240334601218996e-07, + "loss": 1.1111, + "step": 14797 + }, + { + "epoch": 0.9278324659853282, + "grad_norm": 3.6537883281707764, + "learning_rate": 2.719327830067675e-07, + "loss": 0.9144, + "step": 14798 + }, + { + "epoch": 0.9278951658411185, + "grad_norm": 3.7747697830200195, + "learning_rate": 2.7146262118726395e-07, + "loss": 0.974, + "step": 14799 + }, + { + "epoch": 0.9279578656969089, + "grad_norm": 3.492544412612915, + "learning_rate": 2.709928605730661e-07, + "loss": 1.1445, + "step": 14800 + }, + { + "epoch": 0.9280205655526992, + "grad_norm": 3.8724753856658936, + "learning_rate": 2.705235011835472e-07, + "loss": 0.9582, + "step": 14801 + }, + { + "epoch": 0.9280832654084895, + "grad_norm": 3.339519739151001, + "learning_rate": 2.700545430380641e-07, + "loss": 0.9441, + "step": 14802 + }, + { + "epoch": 0.9281459652642798, + "grad_norm": 3.5354228019714355, + "learning_rate": 2.695859861559569e-07, + "loss": 1.0547, + "step": 14803 + }, + { + "epoch": 0.9282086651200703, + "grad_norm": 3.6392862796783447, + "learning_rate": 2.6911783055654896e-07, + "loss": 1.0347, + "step": 14804 + }, + { + "epoch": 0.9282713649758606, + "grad_norm": 3.4991843700408936, + "learning_rate": 2.6865007625914483e-07, + "loss": 1.0486, + "step": 14805 + }, + { + "epoch": 0.9283340648316509, + "grad_norm": 3.6300840377807617, + "learning_rate": 2.6818272328303806e-07, + "loss": 0.9877, + "step": 14806 + }, + { + "epoch": 0.9283967646874413, + "grad_norm": 3.260369300842285, + "learning_rate": 2.6771577164750097e-07, + "loss": 1.1522, + "step": 14807 + }, + { + "epoch": 0.9284594645432316, + "grad_norm": 3.39923357963562, + "learning_rate": 2.6724922137179034e-07, + "loss": 1.0597, + "step": 14808 + }, + { + "epoch": 0.9285221643990219, + "grad_norm": 3.4181103706359863, + "learning_rate": 2.6678307247514523e-07, + "loss": 1.0772, + "step": 14809 + }, + { + "epoch": 0.9285848642548122, + "grad_norm": 3.318297863006592, + "learning_rate": 2.6631732497679363e-07, + "loss": 1.1134, + "step": 14810 + }, + { + "epoch": 0.9286475641106026, + "grad_norm": 3.246382713317871, + "learning_rate": 2.6585197889593903e-07, + "loss": 1.0472, + "step": 14811 + }, + { + "epoch": 0.9287102639663929, + "grad_norm": 3.5362741947174072, + "learning_rate": 2.6538703425177505e-07, + "loss": 1.2695, + "step": 14812 + }, + { + "epoch": 0.9287729638221832, + "grad_norm": 3.7302348613739014, + "learning_rate": 2.649224910634729e-07, + "loss": 1.042, + "step": 14813 + }, + { + "epoch": 0.9288356636779735, + "grad_norm": 3.6199183464050293, + "learning_rate": 2.64458349350194e-07, + "loss": 0.9421, + "step": 14814 + }, + { + "epoch": 0.9288983635337639, + "grad_norm": 3.427168130874634, + "learning_rate": 2.639946091310786e-07, + "loss": 1.1539, + "step": 14815 + }, + { + "epoch": 0.9289610633895542, + "grad_norm": 4.043952465057373, + "learning_rate": 2.6353127042524795e-07, + "loss": 1.1363, + "step": 14816 + }, + { + "epoch": 0.9290237632453445, + "grad_norm": 3.8452577590942383, + "learning_rate": 2.630683332518158e-07, + "loss": 0.8494, + "step": 14817 + }, + { + "epoch": 0.9290864631011349, + "grad_norm": 3.2974820137023926, + "learning_rate": 2.6260579762986905e-07, + "loss": 1.2574, + "step": 14818 + }, + { + "epoch": 0.9291491629569252, + "grad_norm": 3.519914150238037, + "learning_rate": 2.6214366357848575e-07, + "loss": 1.133, + "step": 14819 + }, + { + "epoch": 0.9292118628127155, + "grad_norm": 3.2415788173675537, + "learning_rate": 2.616819311167218e-07, + "loss": 1.1054, + "step": 14820 + }, + { + "epoch": 0.9292745626685058, + "grad_norm": 3.1477043628692627, + "learning_rate": 2.6122060026362086e-07, + "loss": 1.1599, + "step": 14821 + }, + { + "epoch": 0.9293372625242962, + "grad_norm": 3.571658134460449, + "learning_rate": 2.6075967103820763e-07, + "loss": 1.0105, + "step": 14822 + }, + { + "epoch": 0.9293999623800865, + "grad_norm": 3.4458606243133545, + "learning_rate": 2.602991434594915e-07, + "loss": 0.9469, + "step": 14823 + }, + { + "epoch": 0.9294626622358768, + "grad_norm": 3.371685743331909, + "learning_rate": 2.598390175464627e-07, + "loss": 1.2007, + "step": 14824 + }, + { + "epoch": 0.9295253620916671, + "grad_norm": 3.7806155681610107, + "learning_rate": 2.593792933180983e-07, + "loss": 1.0265, + "step": 14825 + }, + { + "epoch": 0.9295880619474575, + "grad_norm": 3.3341333866119385, + "learning_rate": 2.5891997079335873e-07, + "loss": 1.1316, + "step": 14826 + }, + { + "epoch": 0.9296507618032479, + "grad_norm": 3.8742733001708984, + "learning_rate": 2.584610499911833e-07, + "loss": 1.1857, + "step": 14827 + }, + { + "epoch": 0.9297134616590382, + "grad_norm": 3.2734997272491455, + "learning_rate": 2.580025309305001e-07, + "loss": 1.0991, + "step": 14828 + }, + { + "epoch": 0.9297761615148286, + "grad_norm": 3.195796012878418, + "learning_rate": 2.5754441363021854e-07, + "loss": 1.1865, + "step": 14829 + }, + { + "epoch": 0.9298388613706189, + "grad_norm": 3.4848434925079346, + "learning_rate": 2.5708669810923016e-07, + "loss": 1.1976, + "step": 14830 + }, + { + "epoch": 0.9299015612264092, + "grad_norm": 3.3013429641723633, + "learning_rate": 2.5662938438641207e-07, + "loss": 0.9413, + "step": 14831 + }, + { + "epoch": 0.9299642610821995, + "grad_norm": 3.1467366218566895, + "learning_rate": 2.561724724806236e-07, + "loss": 1.2756, + "step": 14832 + }, + { + "epoch": 0.9300269609379899, + "grad_norm": 3.379002094268799, + "learning_rate": 2.5571596241070863e-07, + "loss": 1.2125, + "step": 14833 + }, + { + "epoch": 0.9300896607937802, + "grad_norm": 3.2007312774658203, + "learning_rate": 2.5525985419549205e-07, + "loss": 1.0614, + "step": 14834 + }, + { + "epoch": 0.9301523606495705, + "grad_norm": 3.706984758377075, + "learning_rate": 2.5480414785378437e-07, + "loss": 1.1976, + "step": 14835 + }, + { + "epoch": 0.9302150605053608, + "grad_norm": 3.562257766723633, + "learning_rate": 2.543488434043795e-07, + "loss": 1.1488, + "step": 14836 + }, + { + "epoch": 0.9302777603611512, + "grad_norm": 3.2565622329711914, + "learning_rate": 2.5389394086605456e-07, + "loss": 1.1054, + "step": 14837 + }, + { + "epoch": 0.9303404602169415, + "grad_norm": 3.579254627227783, + "learning_rate": 2.5343944025756795e-07, + "loss": 1.0036, + "step": 14838 + }, + { + "epoch": 0.9304031600727318, + "grad_norm": 3.4975059032440186, + "learning_rate": 2.529853415976646e-07, + "loss": 0.9303, + "step": 14839 + }, + { + "epoch": 0.9304658599285222, + "grad_norm": 3.4654672145843506, + "learning_rate": 2.52531644905073e-07, + "loss": 1.066, + "step": 14840 + }, + { + "epoch": 0.9305285597843125, + "grad_norm": 3.4239649772644043, + "learning_rate": 2.5207835019850023e-07, + "loss": 0.957, + "step": 14841 + }, + { + "epoch": 0.9305912596401028, + "grad_norm": 3.40706205368042, + "learning_rate": 2.516254574966426e-07, + "loss": 1.3129, + "step": 14842 + }, + { + "epoch": 0.9306539594958931, + "grad_norm": 3.616631507873535, + "learning_rate": 2.511729668181773e-07, + "loss": 1.0042, + "step": 14843 + }, + { + "epoch": 0.9307166593516835, + "grad_norm": 3.4108223915100098, + "learning_rate": 2.507208781817638e-07, + "loss": 1.1385, + "step": 14844 + }, + { + "epoch": 0.9307793592074738, + "grad_norm": 3.4123075008392334, + "learning_rate": 2.502691916060485e-07, + "loss": 1.0921, + "step": 14845 + }, + { + "epoch": 0.9308420590632641, + "grad_norm": 3.309405565261841, + "learning_rate": 2.4981790710965517e-07, + "loss": 0.9387, + "step": 14846 + }, + { + "epoch": 0.9309047589190544, + "grad_norm": 3.3783154487609863, + "learning_rate": 2.49367024711199e-07, + "loss": 0.9609, + "step": 14847 + }, + { + "epoch": 0.9309674587748448, + "grad_norm": 3.515336036682129, + "learning_rate": 2.4891654442927184e-07, + "loss": 1.0778, + "step": 14848 + }, + { + "epoch": 0.9310301586306351, + "grad_norm": 3.4859068393707275, + "learning_rate": 2.484664662824521e-07, + "loss": 0.9383, + "step": 14849 + }, + { + "epoch": 0.9310928584864255, + "grad_norm": 3.1351888179779053, + "learning_rate": 2.480167902893016e-07, + "loss": 1.0403, + "step": 14850 + }, + { + "epoch": 0.9311555583422159, + "grad_norm": 3.815262794494629, + "learning_rate": 2.475675164683644e-07, + "loss": 1.0278, + "step": 14851 + }, + { + "epoch": 0.9312182581980062, + "grad_norm": 3.4669628143310547, + "learning_rate": 2.471186448381691e-07, + "loss": 1.1721, + "step": 14852 + }, + { + "epoch": 0.9312809580537965, + "grad_norm": 3.942563056945801, + "learning_rate": 2.4667017541722516e-07, + "loss": 0.9883, + "step": 14853 + }, + { + "epoch": 0.9313436579095868, + "grad_norm": 3.599395275115967, + "learning_rate": 2.4622210822403123e-07, + "loss": 1.0292, + "step": 14854 + }, + { + "epoch": 0.9314063577653772, + "grad_norm": 3.559643507003784, + "learning_rate": 2.4577444327706255e-07, + "loss": 1.0849, + "step": 14855 + }, + { + "epoch": 0.9314690576211675, + "grad_norm": 3.5396904945373535, + "learning_rate": 2.45327180594781e-07, + "loss": 1.03, + "step": 14856 + }, + { + "epoch": 0.9315317574769578, + "grad_norm": 3.351133346557617, + "learning_rate": 2.44880320195634e-07, + "loss": 1.0813, + "step": 14857 + }, + { + "epoch": 0.9315944573327481, + "grad_norm": 3.4382293224334717, + "learning_rate": 2.44433862098048e-07, + "loss": 1.0682, + "step": 14858 + }, + { + "epoch": 0.9316571571885385, + "grad_norm": 3.2873504161834717, + "learning_rate": 2.439878063204348e-07, + "loss": 1.1082, + "step": 14859 + }, + { + "epoch": 0.9317198570443288, + "grad_norm": 3.4645135402679443, + "learning_rate": 2.43542152881191e-07, + "loss": 0.9131, + "step": 14860 + }, + { + "epoch": 0.9317825569001191, + "grad_norm": 3.642071008682251, + "learning_rate": 2.4309690179869503e-07, + "loss": 1.0728, + "step": 14861 + }, + { + "epoch": 0.9318452567559095, + "grad_norm": 3.504133462905884, + "learning_rate": 2.42652053091309e-07, + "loss": 1.2131, + "step": 14862 + }, + { + "epoch": 0.9319079566116998, + "grad_norm": 3.372994899749756, + "learning_rate": 2.422076067773793e-07, + "loss": 1.0678, + "step": 14863 + }, + { + "epoch": 0.9319706564674901, + "grad_norm": 3.362250804901123, + "learning_rate": 2.417635628752324e-07, + "loss": 1.1031, + "step": 14864 + }, + { + "epoch": 0.9320333563232804, + "grad_norm": 3.2269792556762695, + "learning_rate": 2.4131992140318363e-07, + "loss": 1.1766, + "step": 14865 + }, + { + "epoch": 0.9320960561790708, + "grad_norm": 3.362757682800293, + "learning_rate": 2.408766823795261e-07, + "loss": 1.2114, + "step": 14866 + }, + { + "epoch": 0.9321587560348611, + "grad_norm": 3.259978771209717, + "learning_rate": 2.404338458225408e-07, + "loss": 1.0874, + "step": 14867 + }, + { + "epoch": 0.9322214558906514, + "grad_norm": 3.5802695751190186, + "learning_rate": 2.399914117504909e-07, + "loss": 1.1431, + "step": 14868 + }, + { + "epoch": 0.9322841557464417, + "grad_norm": 3.155430555343628, + "learning_rate": 2.3954938018162067e-07, + "loss": 1.0369, + "step": 14869 + }, + { + "epoch": 0.9323468556022321, + "grad_norm": 3.5082287788391113, + "learning_rate": 2.3910775113416e-07, + "loss": 1.0517, + "step": 14870 + }, + { + "epoch": 0.9324095554580224, + "grad_norm": 3.272951602935791, + "learning_rate": 2.38666524626322e-07, + "loss": 1.0823, + "step": 14871 + }, + { + "epoch": 0.9324722553138127, + "grad_norm": 3.7600114345550537, + "learning_rate": 2.3822570067630335e-07, + "loss": 1.057, + "step": 14872 + }, + { + "epoch": 0.9325349551696032, + "grad_norm": 3.5491180419921875, + "learning_rate": 2.3778527930228278e-07, + "loss": 1.019, + "step": 14873 + }, + { + "epoch": 0.9325976550253935, + "grad_norm": 3.0232431888580322, + "learning_rate": 2.3734526052242356e-07, + "loss": 1.186, + "step": 14874 + }, + { + "epoch": 0.9326603548811838, + "grad_norm": 3.158738374710083, + "learning_rate": 2.3690564435487117e-07, + "loss": 1.1927, + "step": 14875 + }, + { + "epoch": 0.9327230547369741, + "grad_norm": 3.29183030128479, + "learning_rate": 2.364664308177578e-07, + "loss": 1.1611, + "step": 14876 + }, + { + "epoch": 0.9327857545927645, + "grad_norm": 3.3450143337249756, + "learning_rate": 2.3602761992919554e-07, + "loss": 1.0549, + "step": 14877 + }, + { + "epoch": 0.9328484544485548, + "grad_norm": 3.4970128536224365, + "learning_rate": 2.355892117072789e-07, + "loss": 1.1579, + "step": 14878 + }, + { + "epoch": 0.9329111543043451, + "grad_norm": 3.2782742977142334, + "learning_rate": 2.3515120617009112e-07, + "loss": 1.0927, + "step": 14879 + }, + { + "epoch": 0.9329738541601355, + "grad_norm": 3.616786003112793, + "learning_rate": 2.3471360333569338e-07, + "loss": 0.9595, + "step": 14880 + }, + { + "epoch": 0.9330365540159258, + "grad_norm": 3.599532127380371, + "learning_rate": 2.3427640322213342e-07, + "loss": 1.2056, + "step": 14881 + }, + { + "epoch": 0.9330992538717161, + "grad_norm": 3.4774293899536133, + "learning_rate": 2.3383960584744014e-07, + "loss": 1.0214, + "step": 14882 + }, + { + "epoch": 0.9331619537275064, + "grad_norm": 3.389697313308716, + "learning_rate": 2.3340321122962916e-07, + "loss": 1.1074, + "step": 14883 + }, + { + "epoch": 0.9332246535832968, + "grad_norm": 3.6862690448760986, + "learning_rate": 2.3296721938669608e-07, + "loss": 1.2091, + "step": 14884 + }, + { + "epoch": 0.9332873534390871, + "grad_norm": 3.2154793739318848, + "learning_rate": 2.3253163033662097e-07, + "loss": 0.9941, + "step": 14885 + }, + { + "epoch": 0.9333500532948774, + "grad_norm": 3.7464418411254883, + "learning_rate": 2.320964440973672e-07, + "loss": 1.0614, + "step": 14886 + }, + { + "epoch": 0.9334127531506677, + "grad_norm": 3.293915033340454, + "learning_rate": 2.3166166068688378e-07, + "loss": 1.1275, + "step": 14887 + }, + { + "epoch": 0.9334754530064581, + "grad_norm": 3.458153486251831, + "learning_rate": 2.312272801230997e-07, + "loss": 1.1412, + "step": 14888 + }, + { + "epoch": 0.9335381528622484, + "grad_norm": 3.5169525146484375, + "learning_rate": 2.307933024239284e-07, + "loss": 1.0709, + "step": 14889 + }, + { + "epoch": 0.9336008527180387, + "grad_norm": 3.5879955291748047, + "learning_rate": 2.3035972760727e-07, + "loss": 1.227, + "step": 14890 + }, + { + "epoch": 0.933663552573829, + "grad_norm": 3.6709558963775635, + "learning_rate": 2.2992655569100132e-07, + "loss": 1.0854, + "step": 14891 + }, + { + "epoch": 0.9337262524296194, + "grad_norm": 3.2557873725891113, + "learning_rate": 2.2949378669298917e-07, + "loss": 1.2052, + "step": 14892 + }, + { + "epoch": 0.9337889522854097, + "grad_norm": 3.2250020503997803, + "learning_rate": 2.2906142063107928e-07, + "loss": 1.1936, + "step": 14893 + }, + { + "epoch": 0.9338516521412, + "grad_norm": 3.5073344707489014, + "learning_rate": 2.2862945752310295e-07, + "loss": 1.0599, + "step": 14894 + }, + { + "epoch": 0.9339143519969904, + "grad_norm": 3.797926187515259, + "learning_rate": 2.2819789738687482e-07, + "loss": 1.0625, + "step": 14895 + }, + { + "epoch": 0.9339770518527808, + "grad_norm": 3.8600451946258545, + "learning_rate": 2.2776674024019286e-07, + "loss": 1.1245, + "step": 14896 + }, + { + "epoch": 0.9340397517085711, + "grad_norm": 3.3276960849761963, + "learning_rate": 2.2733598610083617e-07, + "loss": 1.0454, + "step": 14897 + }, + { + "epoch": 0.9341024515643614, + "grad_norm": 3.8019161224365234, + "learning_rate": 2.269056349865706e-07, + "loss": 1.0469, + "step": 14898 + }, + { + "epoch": 0.9341651514201518, + "grad_norm": 3.55945086479187, + "learning_rate": 2.264756869151441e-07, + "loss": 1.1593, + "step": 14899 + }, + { + "epoch": 0.9342278512759421, + "grad_norm": 3.584559440612793, + "learning_rate": 2.2604614190428474e-07, + "loss": 0.8413, + "step": 14900 + }, + { + "epoch": 0.9342905511317324, + "grad_norm": 3.6402688026428223, + "learning_rate": 2.2561699997171172e-07, + "loss": 1.0555, + "step": 14901 + }, + { + "epoch": 0.9343532509875228, + "grad_norm": 3.6982948780059814, + "learning_rate": 2.251882611351197e-07, + "loss": 1.0348, + "step": 14902 + }, + { + "epoch": 0.9344159508433131, + "grad_norm": 3.194010019302368, + "learning_rate": 2.2475992541219127e-07, + "loss": 1.0746, + "step": 14903 + }, + { + "epoch": 0.9344786506991034, + "grad_norm": 4.018040657043457, + "learning_rate": 2.2433199282058892e-07, + "loss": 1.1522, + "step": 14904 + }, + { + "epoch": 0.9345413505548937, + "grad_norm": 3.231950044631958, + "learning_rate": 2.2390446337796302e-07, + "loss": 1.1811, + "step": 14905 + }, + { + "epoch": 0.9346040504106841, + "grad_norm": 3.120347738265991, + "learning_rate": 2.2347733710194496e-07, + "loss": 1.3123, + "step": 14906 + }, + { + "epoch": 0.9346667502664744, + "grad_norm": 3.3445892333984375, + "learning_rate": 2.2305061401014627e-07, + "loss": 1.0794, + "step": 14907 + }, + { + "epoch": 0.9347294501222647, + "grad_norm": 3.484116315841675, + "learning_rate": 2.2262429412016838e-07, + "loss": 0.9684, + "step": 14908 + }, + { + "epoch": 0.934792149978055, + "grad_norm": 3.164264678955078, + "learning_rate": 2.2219837744959284e-07, + "loss": 1.0712, + "step": 14909 + }, + { + "epoch": 0.9348548498338454, + "grad_norm": 3.079310655593872, + "learning_rate": 2.2177286401598218e-07, + "loss": 1.1659, + "step": 14910 + }, + { + "epoch": 0.9349175496896357, + "grad_norm": 3.692728281021118, + "learning_rate": 2.2134775383688468e-07, + "loss": 0.9951, + "step": 14911 + }, + { + "epoch": 0.934980249545426, + "grad_norm": 3.6101441383361816, + "learning_rate": 2.2092304692983402e-07, + "loss": 0.9884, + "step": 14912 + }, + { + "epoch": 0.9350429494012164, + "grad_norm": 3.4565374851226807, + "learning_rate": 2.2049874331234289e-07, + "loss": 0.8158, + "step": 14913 + }, + { + "epoch": 0.9351056492570067, + "grad_norm": 2.966348171234131, + "learning_rate": 2.2007484300191173e-07, + "loss": 1.0197, + "step": 14914 + }, + { + "epoch": 0.935168349112797, + "grad_norm": 3.463097333908081, + "learning_rate": 2.196513460160199e-07, + "loss": 1.1078, + "step": 14915 + }, + { + "epoch": 0.9352310489685873, + "grad_norm": 3.3710381984710693, + "learning_rate": 2.1922825237213453e-07, + "loss": 1.1751, + "step": 14916 + }, + { + "epoch": 0.9352937488243777, + "grad_norm": 3.4711992740631104, + "learning_rate": 2.1880556208770386e-07, + "loss": 1.0432, + "step": 14917 + }, + { + "epoch": 0.935356448680168, + "grad_norm": 3.183377981185913, + "learning_rate": 2.1838327518015734e-07, + "loss": 1.194, + "step": 14918 + }, + { + "epoch": 0.9354191485359584, + "grad_norm": 3.1702263355255127, + "learning_rate": 2.179613916669132e-07, + "loss": 1.1031, + "step": 14919 + }, + { + "epoch": 0.9354818483917487, + "grad_norm": 3.576462745666504, + "learning_rate": 2.1753991156536757e-07, + "loss": 1.0377, + "step": 14920 + }, + { + "epoch": 0.9355445482475391, + "grad_norm": 3.6981699466705322, + "learning_rate": 2.1711883489290431e-07, + "loss": 1.0811, + "step": 14921 + }, + { + "epoch": 0.9356072481033294, + "grad_norm": 3.2848758697509766, + "learning_rate": 2.1669816166688505e-07, + "loss": 1.0427, + "step": 14922 + }, + { + "epoch": 0.9356699479591197, + "grad_norm": 3.3358590602874756, + "learning_rate": 2.1627789190466263e-07, + "loss": 1.2342, + "step": 14923 + }, + { + "epoch": 0.9357326478149101, + "grad_norm": 3.3167593479156494, + "learning_rate": 2.1585802562356761e-07, + "loss": 0.9529, + "step": 14924 + }, + { + "epoch": 0.9357953476707004, + "grad_norm": 3.1789324283599854, + "learning_rate": 2.1543856284091501e-07, + "loss": 0.9916, + "step": 14925 + }, + { + "epoch": 0.9358580475264907, + "grad_norm": 3.515812397003174, + "learning_rate": 2.1501950357400215e-07, + "loss": 1.0731, + "step": 14926 + }, + { + "epoch": 0.935920747382281, + "grad_norm": 3.3942673206329346, + "learning_rate": 2.1460084784011293e-07, + "loss": 1.1386, + "step": 14927 + }, + { + "epoch": 0.9359834472380714, + "grad_norm": 3.2318108081817627, + "learning_rate": 2.1418259565651133e-07, + "loss": 1.1279, + "step": 14928 + }, + { + "epoch": 0.9360461470938617, + "grad_norm": 3.2007999420166016, + "learning_rate": 2.1376474704044693e-07, + "loss": 0.9115, + "step": 14929 + }, + { + "epoch": 0.936108846949652, + "grad_norm": 3.6449592113494873, + "learning_rate": 2.1334730200915256e-07, + "loss": 1.119, + "step": 14930 + }, + { + "epoch": 0.9361715468054423, + "grad_norm": 3.2952306270599365, + "learning_rate": 2.1293026057984334e-07, + "loss": 0.9736, + "step": 14931 + }, + { + "epoch": 0.9362342466612327, + "grad_norm": 3.4811580181121826, + "learning_rate": 2.1251362276971665e-07, + "loss": 1.0113, + "step": 14932 + }, + { + "epoch": 0.936296946517023, + "grad_norm": 3.218053102493286, + "learning_rate": 2.1209738859595542e-07, + "loss": 0.9478, + "step": 14933 + }, + { + "epoch": 0.9363596463728133, + "grad_norm": 3.7867414951324463, + "learning_rate": 2.1168155807572476e-07, + "loss": 1.0165, + "step": 14934 + }, + { + "epoch": 0.9364223462286037, + "grad_norm": 3.8223485946655273, + "learning_rate": 2.1126613122617546e-07, + "loss": 1.1281, + "step": 14935 + }, + { + "epoch": 0.936485046084394, + "grad_norm": 3.4508907794952393, + "learning_rate": 2.108511080644382e-07, + "loss": 1.003, + "step": 14936 + }, + { + "epoch": 0.9365477459401843, + "grad_norm": 3.668450355529785, + "learning_rate": 2.1043648860762823e-07, + "loss": 0.9874, + "step": 14937 + }, + { + "epoch": 0.9366104457959746, + "grad_norm": 3.3579752445220947, + "learning_rate": 2.1002227287284514e-07, + "loss": 1.0246, + "step": 14938 + }, + { + "epoch": 0.936673145651765, + "grad_norm": 3.0465877056121826, + "learning_rate": 2.0960846087717092e-07, + "loss": 1.1782, + "step": 14939 + }, + { + "epoch": 0.9367358455075553, + "grad_norm": 3.2067599296569824, + "learning_rate": 2.091950526376707e-07, + "loss": 1.0767, + "step": 14940 + }, + { + "epoch": 0.9367985453633456, + "grad_norm": 3.597139835357666, + "learning_rate": 2.0878204817139536e-07, + "loss": 1.1493, + "step": 14941 + }, + { + "epoch": 0.9368612452191359, + "grad_norm": 3.401686191558838, + "learning_rate": 2.083694474953757e-07, + "loss": 1.0399, + "step": 14942 + }, + { + "epoch": 0.9369239450749264, + "grad_norm": 3.3270444869995117, + "learning_rate": 2.0795725062662697e-07, + "loss": 1.1187, + "step": 14943 + }, + { + "epoch": 0.9369866449307167, + "grad_norm": 3.9363224506378174, + "learning_rate": 2.0754545758214895e-07, + "loss": 1.221, + "step": 14944 + }, + { + "epoch": 0.937049344786507, + "grad_norm": 3.374192476272583, + "learning_rate": 2.0713406837892468e-07, + "loss": 1.118, + "step": 14945 + }, + { + "epoch": 0.9371120446422974, + "grad_norm": 3.420715093612671, + "learning_rate": 2.067230830339184e-07, + "loss": 1.0686, + "step": 14946 + }, + { + "epoch": 0.9371747444980877, + "grad_norm": 3.359309673309326, + "learning_rate": 2.063125015640799e-07, + "loss": 1.0408, + "step": 14947 + }, + { + "epoch": 0.937237444353878, + "grad_norm": 4.795127868652344, + "learning_rate": 2.0590232398634114e-07, + "loss": 1.1219, + "step": 14948 + }, + { + "epoch": 0.9373001442096683, + "grad_norm": 3.5024731159210205, + "learning_rate": 2.0549255031761862e-07, + "loss": 1.2217, + "step": 14949 + }, + { + "epoch": 0.9373628440654587, + "grad_norm": 3.2966818809509277, + "learning_rate": 2.0508318057481212e-07, + "loss": 1.0547, + "step": 14950 + }, + { + "epoch": 0.937425543921249, + "grad_norm": 3.957502603530884, + "learning_rate": 2.0467421477480153e-07, + "loss": 1.0598, + "step": 14951 + }, + { + "epoch": 0.9374882437770393, + "grad_norm": 3.424565553665161, + "learning_rate": 2.0426565293445444e-07, + "loss": 1.0511, + "step": 14952 + }, + { + "epoch": 0.9375509436328296, + "grad_norm": 3.0905721187591553, + "learning_rate": 2.038574950706196e-07, + "loss": 1.0317, + "step": 14953 + }, + { + "epoch": 0.93761364348862, + "grad_norm": 3.5126492977142334, + "learning_rate": 2.0344974120013017e-07, + "loss": 1.0877, + "step": 14954 + }, + { + "epoch": 0.9376763433444103, + "grad_norm": 3.256514072418213, + "learning_rate": 2.0304239133980052e-07, + "loss": 1.0619, + "step": 14955 + }, + { + "epoch": 0.9377390432002006, + "grad_norm": 3.179793357849121, + "learning_rate": 2.0263544550643056e-07, + "loss": 0.9504, + "step": 14956 + }, + { + "epoch": 0.937801743055991, + "grad_norm": 4.0252299308776855, + "learning_rate": 2.0222890371680237e-07, + "loss": 1.0382, + "step": 14957 + }, + { + "epoch": 0.9378644429117813, + "grad_norm": 3.876267194747925, + "learning_rate": 2.0182276598768257e-07, + "loss": 1.0758, + "step": 14958 + }, + { + "epoch": 0.9379271427675716, + "grad_norm": 3.268785238265991, + "learning_rate": 2.0141703233582e-07, + "loss": 1.0295, + "step": 14959 + }, + { + "epoch": 0.9379898426233619, + "grad_norm": 3.318519115447998, + "learning_rate": 2.010117027779468e-07, + "loss": 1.1505, + "step": 14960 + }, + { + "epoch": 0.9380525424791523, + "grad_norm": 3.5588481426239014, + "learning_rate": 2.006067773307785e-07, + "loss": 1.2017, + "step": 14961 + }, + { + "epoch": 0.9381152423349426, + "grad_norm": 3.3140783309936523, + "learning_rate": 2.0020225601101395e-07, + "loss": 1.0983, + "step": 14962 + }, + { + "epoch": 0.9381779421907329, + "grad_norm": 3.2149405479431152, + "learning_rate": 1.9979813883533762e-07, + "loss": 1.081, + "step": 14963 + }, + { + "epoch": 0.9382406420465232, + "grad_norm": 3.954921007156372, + "learning_rate": 1.993944258204139e-07, + "loss": 1.0186, + "step": 14964 + }, + { + "epoch": 0.9383033419023136, + "grad_norm": 3.3215203285217285, + "learning_rate": 1.9899111698289175e-07, + "loss": 1.1505, + "step": 14965 + }, + { + "epoch": 0.938366041758104, + "grad_norm": 3.547116279602051, + "learning_rate": 1.985882123394034e-07, + "loss": 1.0661, + "step": 14966 + }, + { + "epoch": 0.9384287416138943, + "grad_norm": 3.3550825119018555, + "learning_rate": 1.9818571190656556e-07, + "loss": 0.921, + "step": 14967 + }, + { + "epoch": 0.9384914414696847, + "grad_norm": 4.009061813354492, + "learning_rate": 1.9778361570097826e-07, + "loss": 1.1339, + "step": 14968 + }, + { + "epoch": 0.938554141325475, + "grad_norm": 3.2850496768951416, + "learning_rate": 1.973819237392205e-07, + "loss": 1.0136, + "step": 14969 + }, + { + "epoch": 0.9386168411812653, + "grad_norm": 3.2722129821777344, + "learning_rate": 1.9698063603786233e-07, + "loss": 1.1071, + "step": 14970 + }, + { + "epoch": 0.9386795410370556, + "grad_norm": 3.419149398803711, + "learning_rate": 1.965797526134494e-07, + "loss": 1.0142, + "step": 14971 + }, + { + "epoch": 0.938742240892846, + "grad_norm": 3.697927713394165, + "learning_rate": 1.961792734825163e-07, + "loss": 0.965, + "step": 14972 + }, + { + "epoch": 0.9388049407486363, + "grad_norm": 3.742479085922241, + "learning_rate": 1.9577919866157757e-07, + "loss": 1.0479, + "step": 14973 + }, + { + "epoch": 0.9388676406044266, + "grad_norm": 3.9885077476501465, + "learning_rate": 1.9537952816713334e-07, + "loss": 1.0244, + "step": 14974 + }, + { + "epoch": 0.938930340460217, + "grad_norm": 3.497570753097534, + "learning_rate": 1.94980262015666e-07, + "loss": 1.1779, + "step": 14975 + }, + { + "epoch": 0.9389930403160073, + "grad_norm": 3.0306479930877686, + "learning_rate": 1.9458140022364013e-07, + "loss": 1.0871, + "step": 14976 + }, + { + "epoch": 0.9390557401717976, + "grad_norm": 3.1532461643218994, + "learning_rate": 1.9418294280750482e-07, + "loss": 1.0909, + "step": 14977 + }, + { + "epoch": 0.9391184400275879, + "grad_norm": 3.86362886428833, + "learning_rate": 1.937848897836947e-07, + "loss": 1.0403, + "step": 14978 + }, + { + "epoch": 0.9391811398833783, + "grad_norm": 3.451899290084839, + "learning_rate": 1.9338724116862328e-07, + "loss": 1.0605, + "step": 14979 + }, + { + "epoch": 0.9392438397391686, + "grad_norm": 3.254377841949463, + "learning_rate": 1.929899969786897e-07, + "loss": 1.2676, + "step": 14980 + }, + { + "epoch": 0.9393065395949589, + "grad_norm": 3.5210156440734863, + "learning_rate": 1.925931572302775e-07, + "loss": 1.2113, + "step": 14981 + }, + { + "epoch": 0.9393692394507492, + "grad_norm": 3.3899455070495605, + "learning_rate": 1.9219672193975246e-07, + "loss": 1.1375, + "step": 14982 + }, + { + "epoch": 0.9394319393065396, + "grad_norm": 4.14723014831543, + "learning_rate": 1.9180069112346157e-07, + "loss": 1.0821, + "step": 14983 + }, + { + "epoch": 0.9394946391623299, + "grad_norm": 3.3816921710968018, + "learning_rate": 1.9140506479773836e-07, + "loss": 1.0528, + "step": 14984 + }, + { + "epoch": 0.9395573390181202, + "grad_norm": 3.6914243698120117, + "learning_rate": 1.910098429788998e-07, + "loss": 1.0159, + "step": 14985 + }, + { + "epoch": 0.9396200388739105, + "grad_norm": 3.701117753982544, + "learning_rate": 1.9061502568324396e-07, + "loss": 1.0389, + "step": 14986 + }, + { + "epoch": 0.9396827387297009, + "grad_norm": 3.8375155925750732, + "learning_rate": 1.902206129270523e-07, + "loss": 0.9713, + "step": 14987 + }, + { + "epoch": 0.9397454385854912, + "grad_norm": 3.046344041824341, + "learning_rate": 1.8982660472658954e-07, + "loss": 1.1537, + "step": 14988 + }, + { + "epoch": 0.9398081384412816, + "grad_norm": 3.861152172088623, + "learning_rate": 1.8943300109810826e-07, + "loss": 1.1195, + "step": 14989 + }, + { + "epoch": 0.939870838297072, + "grad_norm": 3.349022150039673, + "learning_rate": 1.8903980205783769e-07, + "loss": 1.1775, + "step": 14990 + }, + { + "epoch": 0.9399335381528623, + "grad_norm": 3.107670783996582, + "learning_rate": 1.886470076219926e-07, + "loss": 1.1079, + "step": 14991 + }, + { + "epoch": 0.9399962380086526, + "grad_norm": 3.088291883468628, + "learning_rate": 1.8825461780677567e-07, + "loss": 0.9598, + "step": 14992 + }, + { + "epoch": 0.940058937864443, + "grad_norm": 3.0848913192749023, + "learning_rate": 1.8786263262836613e-07, + "loss": 1.0297, + "step": 14993 + }, + { + "epoch": 0.9401216377202333, + "grad_norm": 3.896242618560791, + "learning_rate": 1.8747105210292994e-07, + "loss": 0.9661, + "step": 14994 + }, + { + "epoch": 0.9401843375760236, + "grad_norm": 3.4985404014587402, + "learning_rate": 1.870798762466153e-07, + "loss": 1.0002, + "step": 14995 + }, + { + "epoch": 0.9402470374318139, + "grad_norm": 3.666383981704712, + "learning_rate": 1.8668910507555594e-07, + "loss": 1.0027, + "step": 14996 + }, + { + "epoch": 0.9403097372876043, + "grad_norm": 3.4239840507507324, + "learning_rate": 1.8629873860586567e-07, + "loss": 0.9976, + "step": 14997 + }, + { + "epoch": 0.9403724371433946, + "grad_norm": 2.9894673824310303, + "learning_rate": 1.8590877685364495e-07, + "loss": 1.1611, + "step": 14998 + }, + { + "epoch": 0.9404351369991849, + "grad_norm": 3.3729143142700195, + "learning_rate": 1.8551921983497422e-07, + "loss": 1.2034, + "step": 14999 + }, + { + "epoch": 0.9404978368549752, + "grad_norm": 3.334146499633789, + "learning_rate": 1.8513006756591957e-07, + "loss": 0.9498, + "step": 15000 + }, + { + "epoch": 0.9404978368549752, + "eval_loss": 1.0942617654800415, + "eval_runtime": 144.0381, + "eval_samples_per_second": 4.374, + "eval_steps_per_second": 1.097, + "step": 15000 + }, + { + "epoch": 0.9405605367107656, + "grad_norm": 3.400826930999756, + "learning_rate": 1.8474132006253032e-07, + "loss": 1.0076, + "step": 15001 + }, + { + "epoch": 0.9406232365665559, + "grad_norm": 3.31484055519104, + "learning_rate": 1.843529773408359e-07, + "loss": 1.0786, + "step": 15002 + }, + { + "epoch": 0.9406859364223462, + "grad_norm": 3.5278024673461914, + "learning_rate": 1.8396503941685461e-07, + "loss": 1.082, + "step": 15003 + }, + { + "epoch": 0.9407486362781365, + "grad_norm": 3.6033129692077637, + "learning_rate": 1.8357750630658367e-07, + "loss": 0.9957, + "step": 15004 + }, + { + "epoch": 0.9408113361339269, + "grad_norm": 3.1517481803894043, + "learning_rate": 1.831903780260058e-07, + "loss": 1.0565, + "step": 15005 + }, + { + "epoch": 0.9408740359897172, + "grad_norm": 3.3254542350769043, + "learning_rate": 1.8280365459108385e-07, + "loss": 0.9944, + "step": 15006 + }, + { + "epoch": 0.9409367358455075, + "grad_norm": 3.4338736534118652, + "learning_rate": 1.8241733601776836e-07, + "loss": 1.0704, + "step": 15007 + }, + { + "epoch": 0.9409994357012978, + "grad_norm": 3.504026174545288, + "learning_rate": 1.82031422321991e-07, + "loss": 1.0534, + "step": 15008 + }, + { + "epoch": 0.9410621355570882, + "grad_norm": 3.1613359451293945, + "learning_rate": 1.8164591351966688e-07, + "loss": 1.1234, + "step": 15009 + }, + { + "epoch": 0.9411248354128785, + "grad_norm": 3.575471878051758, + "learning_rate": 1.8126080962669323e-07, + "loss": 1.1878, + "step": 15010 + }, + { + "epoch": 0.9411875352686688, + "grad_norm": 3.4016575813293457, + "learning_rate": 1.8087611065895295e-07, + "loss": 1.1244, + "step": 15011 + }, + { + "epoch": 0.9412502351244593, + "grad_norm": 3.324286460876465, + "learning_rate": 1.8049181663231107e-07, + "loss": 1.0708, + "step": 15012 + }, + { + "epoch": 0.9413129349802496, + "grad_norm": 3.2894725799560547, + "learning_rate": 1.8010792756261497e-07, + "loss": 1.0551, + "step": 15013 + }, + { + "epoch": 0.9413756348360399, + "grad_norm": 3.403911590576172, + "learning_rate": 1.7972444346569752e-07, + "loss": 1.0743, + "step": 15014 + }, + { + "epoch": 0.9414383346918302, + "grad_norm": 3.424740791320801, + "learning_rate": 1.793413643573716e-07, + "loss": 1.1272, + "step": 15015 + }, + { + "epoch": 0.9415010345476206, + "grad_norm": 3.4569427967071533, + "learning_rate": 1.7895869025343792e-07, + "loss": 1.0368, + "step": 15016 + }, + { + "epoch": 0.9415637344034109, + "grad_norm": 3.558885097503662, + "learning_rate": 1.785764211696761e-07, + "loss": 1.0347, + "step": 15017 + }, + { + "epoch": 0.9416264342592012, + "grad_norm": 3.5522353649139404, + "learning_rate": 1.781945571218513e-07, + "loss": 1.1238, + "step": 15018 + }, + { + "epoch": 0.9416891341149916, + "grad_norm": 3.557406187057495, + "learning_rate": 1.7781309812571313e-07, + "loss": 1.0609, + "step": 15019 + }, + { + "epoch": 0.9417518339707819, + "grad_norm": 3.1654531955718994, + "learning_rate": 1.774320441969901e-07, + "loss": 1.1465, + "step": 15020 + }, + { + "epoch": 0.9418145338265722, + "grad_norm": 2.947606086730957, + "learning_rate": 1.770513953513997e-07, + "loss": 1.1122, + "step": 15021 + }, + { + "epoch": 0.9418772336823625, + "grad_norm": 3.5112485885620117, + "learning_rate": 1.7667115160463822e-07, + "loss": 0.9541, + "step": 15022 + }, + { + "epoch": 0.9419399335381529, + "grad_norm": 3.613935708999634, + "learning_rate": 1.7629131297238754e-07, + "loss": 1.0295, + "step": 15023 + }, + { + "epoch": 0.9420026333939432, + "grad_norm": 3.978383779525757, + "learning_rate": 1.7591187947031185e-07, + "loss": 1.099, + "step": 15024 + }, + { + "epoch": 0.9420653332497335, + "grad_norm": 3.4345498085021973, + "learning_rate": 1.755328511140597e-07, + "loss": 1.0171, + "step": 15025 + }, + { + "epoch": 0.9421280331055238, + "grad_norm": 3.609862804412842, + "learning_rate": 1.7515422791926195e-07, + "loss": 0.9964, + "step": 15026 + }, + { + "epoch": 0.9421907329613142, + "grad_norm": 3.7048728466033936, + "learning_rate": 1.7477600990153275e-07, + "loss": 1.1055, + "step": 15027 + }, + { + "epoch": 0.9422534328171045, + "grad_norm": 3.6351685523986816, + "learning_rate": 1.7439819707646966e-07, + "loss": 1.1672, + "step": 15028 + }, + { + "epoch": 0.9423161326728948, + "grad_norm": 3.811415195465088, + "learning_rate": 1.7402078945965352e-07, + "loss": 1.05, + "step": 15029 + }, + { + "epoch": 0.9423788325286852, + "grad_norm": 3.3970887660980225, + "learning_rate": 1.7364378706665076e-07, + "loss": 0.9639, + "step": 15030 + }, + { + "epoch": 0.9424415323844755, + "grad_norm": 3.4123377799987793, + "learning_rate": 1.7326718991300563e-07, + "loss": 1.1895, + "step": 15031 + }, + { + "epoch": 0.9425042322402658, + "grad_norm": 3.7884860038757324, + "learning_rate": 1.7289099801425125e-07, + "loss": 1.1799, + "step": 15032 + }, + { + "epoch": 0.9425669320960561, + "grad_norm": 3.4636497497558594, + "learning_rate": 1.725152113859019e-07, + "loss": 1.1551, + "step": 15033 + }, + { + "epoch": 0.9426296319518465, + "grad_norm": 3.4603586196899414, + "learning_rate": 1.7213983004345403e-07, + "loss": 1.058, + "step": 15034 + }, + { + "epoch": 0.9426923318076369, + "grad_norm": 3.4038941860198975, + "learning_rate": 1.7176485400238752e-07, + "loss": 1.0077, + "step": 15035 + }, + { + "epoch": 0.9427550316634272, + "grad_norm": 3.669585704803467, + "learning_rate": 1.7139028327816998e-07, + "loss": 1.047, + "step": 15036 + }, + { + "epoch": 0.9428177315192176, + "grad_norm": 3.3754053115844727, + "learning_rate": 1.7101611788624462e-07, + "loss": 1.1426, + "step": 15037 + }, + { + "epoch": 0.9428804313750079, + "grad_norm": 3.4986324310302734, + "learning_rate": 1.7064235784204464e-07, + "loss": 1.1549, + "step": 15038 + }, + { + "epoch": 0.9429431312307982, + "grad_norm": 3.799746513366699, + "learning_rate": 1.7026900316098217e-07, + "loss": 1.1244, + "step": 15039 + }, + { + "epoch": 0.9430058310865885, + "grad_norm": 3.726390838623047, + "learning_rate": 1.69896053858456e-07, + "loss": 0.9498, + "step": 15040 + }, + { + "epoch": 0.9430685309423789, + "grad_norm": 3.3466248512268066, + "learning_rate": 1.6952350994984601e-07, + "loss": 1.103, + "step": 15041 + }, + { + "epoch": 0.9431312307981692, + "grad_norm": 3.252396583557129, + "learning_rate": 1.6915137145051552e-07, + "loss": 1.2401, + "step": 15042 + }, + { + "epoch": 0.9431939306539595, + "grad_norm": 3.379995584487915, + "learning_rate": 1.6877963837581112e-07, + "loss": 0.965, + "step": 15043 + }, + { + "epoch": 0.9432566305097498, + "grad_norm": 3.6018002033233643, + "learning_rate": 1.6840831074106502e-07, + "loss": 1.073, + "step": 15044 + }, + { + "epoch": 0.9433193303655402, + "grad_norm": 3.5526509284973145, + "learning_rate": 1.6803738856158934e-07, + "loss": 0.9604, + "step": 15045 + }, + { + "epoch": 0.9433820302213305, + "grad_norm": 3.6055104732513428, + "learning_rate": 1.6766687185267972e-07, + "loss": 1.0656, + "step": 15046 + }, + { + "epoch": 0.9434447300771208, + "grad_norm": 3.5918853282928467, + "learning_rate": 1.672967606296194e-07, + "loss": 1.1634, + "step": 15047 + }, + { + "epoch": 0.9435074299329111, + "grad_norm": 3.315800189971924, + "learning_rate": 1.6692705490766958e-07, + "loss": 1.1621, + "step": 15048 + }, + { + "epoch": 0.9435701297887015, + "grad_norm": 3.6574442386627197, + "learning_rate": 1.665577547020769e-07, + "loss": 0.9888, + "step": 15049 + }, + { + "epoch": 0.9436328296444918, + "grad_norm": 3.3126721382141113, + "learning_rate": 1.6618886002807144e-07, + "loss": 1.1659, + "step": 15050 + }, + { + "epoch": 0.9436955295002821, + "grad_norm": 3.647982120513916, + "learning_rate": 1.6582037090086767e-07, + "loss": 0.9547, + "step": 15051 + }, + { + "epoch": 0.9437582293560725, + "grad_norm": 2.9035277366638184, + "learning_rate": 1.654522873356612e-07, + "loss": 1.1741, + "step": 15052 + }, + { + "epoch": 0.9438209292118628, + "grad_norm": 3.3764853477478027, + "learning_rate": 1.6508460934763104e-07, + "loss": 1.2368, + "step": 15053 + }, + { + "epoch": 0.9438836290676531, + "grad_norm": 3.344425916671753, + "learning_rate": 1.647173369519428e-07, + "loss": 1.0939, + "step": 15054 + }, + { + "epoch": 0.9439463289234434, + "grad_norm": 3.37990403175354, + "learning_rate": 1.6435047016373996e-07, + "loss": 1.1993, + "step": 15055 + }, + { + "epoch": 0.9440090287792338, + "grad_norm": 3.257678747177124, + "learning_rate": 1.639840089981537e-07, + "loss": 1.0574, + "step": 15056 + }, + { + "epoch": 0.9440717286350241, + "grad_norm": 3.443324089050293, + "learning_rate": 1.636179534702953e-07, + "loss": 1.2051, + "step": 15057 + }, + { + "epoch": 0.9441344284908145, + "grad_norm": 3.487901210784912, + "learning_rate": 1.6325230359526378e-07, + "loss": 1.1158, + "step": 15058 + }, + { + "epoch": 0.9441971283466049, + "grad_norm": 3.4506585597991943, + "learning_rate": 1.6288705938813598e-07, + "loss": 1.0199, + "step": 15059 + }, + { + "epoch": 0.9442598282023952, + "grad_norm": 3.3863656520843506, + "learning_rate": 1.625222208639754e-07, + "loss": 1.2219, + "step": 15060 + }, + { + "epoch": 0.9443225280581855, + "grad_norm": 3.118319272994995, + "learning_rate": 1.621577880378278e-07, + "loss": 1.032, + "step": 15061 + }, + { + "epoch": 0.9443852279139758, + "grad_norm": 3.576772928237915, + "learning_rate": 1.617937609247222e-07, + "loss": 1.0772, + "step": 15062 + }, + { + "epoch": 0.9444479277697662, + "grad_norm": 3.83198881149292, + "learning_rate": 1.6143013953967334e-07, + "loss": 0.9873, + "step": 15063 + }, + { + "epoch": 0.9445106276255565, + "grad_norm": 3.4923946857452393, + "learning_rate": 1.6106692389767253e-07, + "loss": 1.0346, + "step": 15064 + }, + { + "epoch": 0.9445733274813468, + "grad_norm": 3.445279359817505, + "learning_rate": 1.6070411401370335e-07, + "loss": 0.9936, + "step": 15065 + }, + { + "epoch": 0.9446360273371371, + "grad_norm": 3.249285936355591, + "learning_rate": 1.6034170990272602e-07, + "loss": 1.0138, + "step": 15066 + }, + { + "epoch": 0.9446987271929275, + "grad_norm": 4.113312721252441, + "learning_rate": 1.599797115796864e-07, + "loss": 1.3218, + "step": 15067 + }, + { + "epoch": 0.9447614270487178, + "grad_norm": 4.030782222747803, + "learning_rate": 1.596181190595125e-07, + "loss": 0.9958, + "step": 15068 + }, + { + "epoch": 0.9448241269045081, + "grad_norm": 3.648390531539917, + "learning_rate": 1.5925693235711804e-07, + "loss": 0.9903, + "step": 15069 + }, + { + "epoch": 0.9448868267602984, + "grad_norm": 3.4091830253601074, + "learning_rate": 1.5889615148739656e-07, + "loss": 0.9583, + "step": 15070 + }, + { + "epoch": 0.9449495266160888, + "grad_norm": 3.7463932037353516, + "learning_rate": 1.5853577646522733e-07, + "loss": 1.0841, + "step": 15071 + }, + { + "epoch": 0.9450122264718791, + "grad_norm": 3.217775583267212, + "learning_rate": 1.581758073054729e-07, + "loss": 1.1542, + "step": 15072 + }, + { + "epoch": 0.9450749263276694, + "grad_norm": 3.5835094451904297, + "learning_rate": 1.5781624402297913e-07, + "loss": 1.1348, + "step": 15073 + }, + { + "epoch": 0.9451376261834598, + "grad_norm": 3.5416486263275146, + "learning_rate": 1.5745708663257199e-07, + "loss": 1.0769, + "step": 15074 + }, + { + "epoch": 0.9452003260392501, + "grad_norm": 3.217649459838867, + "learning_rate": 1.5709833514906403e-07, + "loss": 1.1521, + "step": 15075 + }, + { + "epoch": 0.9452630258950404, + "grad_norm": 3.33685564994812, + "learning_rate": 1.5673998958725122e-07, + "loss": 1.0714, + "step": 15076 + }, + { + "epoch": 0.9453257257508307, + "grad_norm": 3.5818305015563965, + "learning_rate": 1.5638204996191176e-07, + "loss": 1.0478, + "step": 15077 + }, + { + "epoch": 0.9453884256066211, + "grad_norm": 3.2161431312561035, + "learning_rate": 1.560245162878049e-07, + "loss": 0.9772, + "step": 15078 + }, + { + "epoch": 0.9454511254624114, + "grad_norm": 3.4922239780426025, + "learning_rate": 1.556673885796778e-07, + "loss": 1.0774, + "step": 15079 + }, + { + "epoch": 0.9455138253182017, + "grad_norm": 3.7051405906677246, + "learning_rate": 1.5531066685225749e-07, + "loss": 1.0531, + "step": 15080 + }, + { + "epoch": 0.9455765251739922, + "grad_norm": 3.8625783920288086, + "learning_rate": 1.549543511202556e-07, + "loss": 0.9496, + "step": 15081 + }, + { + "epoch": 0.9456392250297825, + "grad_norm": 3.898132085800171, + "learning_rate": 1.5459844139836476e-07, + "loss": 1.1966, + "step": 15082 + }, + { + "epoch": 0.9457019248855728, + "grad_norm": 3.2594385147094727, + "learning_rate": 1.5424293770126554e-07, + "loss": 1.1799, + "step": 15083 + }, + { + "epoch": 0.9457646247413631, + "grad_norm": 3.3955483436584473, + "learning_rate": 1.5388784004361723e-07, + "loss": 1.1448, + "step": 15084 + }, + { + "epoch": 0.9458273245971535, + "grad_norm": 3.163191318511963, + "learning_rate": 1.5353314844006372e-07, + "loss": 1.1249, + "step": 15085 + }, + { + "epoch": 0.9458900244529438, + "grad_norm": 3.2634236812591553, + "learning_rate": 1.5317886290523332e-07, + "loss": 1.1979, + "step": 15086 + }, + { + "epoch": 0.9459527243087341, + "grad_norm": 3.4830825328826904, + "learning_rate": 1.5282498345373764e-07, + "loss": 0.9905, + "step": 15087 + }, + { + "epoch": 0.9460154241645244, + "grad_norm": 3.3197498321533203, + "learning_rate": 1.5247151010016947e-07, + "loss": 1.0225, + "step": 15088 + }, + { + "epoch": 0.9460781240203148, + "grad_norm": 3.0802834033966064, + "learning_rate": 1.5211844285910605e-07, + "loss": 1.254, + "step": 15089 + }, + { + "epoch": 0.9461408238761051, + "grad_norm": 3.52634859085083, + "learning_rate": 1.517657817451068e-07, + "loss": 1.0015, + "step": 15090 + }, + { + "epoch": 0.9462035237318954, + "grad_norm": 3.5454530715942383, + "learning_rate": 1.514135267727179e-07, + "loss": 1.1465, + "step": 15091 + }, + { + "epoch": 0.9462662235876858, + "grad_norm": 3.481858491897583, + "learning_rate": 1.5106167795646553e-07, + "loss": 1.2967, + "step": 15092 + }, + { + "epoch": 0.9463289234434761, + "grad_norm": 3.244314432144165, + "learning_rate": 1.5071023531085914e-07, + "loss": 1.2654, + "step": 15093 + }, + { + "epoch": 0.9463916232992664, + "grad_norm": 3.780566930770874, + "learning_rate": 1.5035919885039274e-07, + "loss": 0.9874, + "step": 15094 + }, + { + "epoch": 0.9464543231550567, + "grad_norm": 3.5093259811401367, + "learning_rate": 1.500085685895436e-07, + "loss": 0.9192, + "step": 15095 + }, + { + "epoch": 0.9465170230108471, + "grad_norm": 3.2729365825653076, + "learning_rate": 1.4965834454277128e-07, + "loss": 1.0075, + "step": 15096 + }, + { + "epoch": 0.9465797228666374, + "grad_norm": 3.327152967453003, + "learning_rate": 1.4930852672451758e-07, + "loss": 1.2453, + "step": 15097 + }, + { + "epoch": 0.9466424227224277, + "grad_norm": 3.917937755584717, + "learning_rate": 1.4895911514921202e-07, + "loss": 0.9238, + "step": 15098 + }, + { + "epoch": 0.946705122578218, + "grad_norm": 3.3870229721069336, + "learning_rate": 1.4861010983126202e-07, + "loss": 1.0971, + "step": 15099 + }, + { + "epoch": 0.9467678224340084, + "grad_norm": 3.667229652404785, + "learning_rate": 1.4826151078506156e-07, + "loss": 1.0535, + "step": 15100 + }, + { + "epoch": 0.9468305222897987, + "grad_norm": 3.433054208755493, + "learning_rate": 1.47913318024987e-07, + "loss": 1.1153, + "step": 15101 + }, + { + "epoch": 0.946893222145589, + "grad_norm": 3.4833483695983887, + "learning_rate": 1.4756553156539677e-07, + "loss": 1.0252, + "step": 15102 + }, + { + "epoch": 0.9469559220013793, + "grad_norm": 3.335742473602295, + "learning_rate": 1.47218151420635e-07, + "loss": 1.1229, + "step": 15103 + }, + { + "epoch": 0.9470186218571697, + "grad_norm": 3.4920055866241455, + "learning_rate": 1.4687117760502579e-07, + "loss": 1.1571, + "step": 15104 + }, + { + "epoch": 0.9470813217129601, + "grad_norm": 3.451115846633911, + "learning_rate": 1.465246101328799e-07, + "loss": 0.9618, + "step": 15105 + }, + { + "epoch": 0.9471440215687504, + "grad_norm": 3.4291646480560303, + "learning_rate": 1.4617844901849032e-07, + "loss": 1.0653, + "step": 15106 + }, + { + "epoch": 0.9472067214245408, + "grad_norm": 3.177621841430664, + "learning_rate": 1.4583269427613121e-07, + "loss": 1.1007, + "step": 15107 + }, + { + "epoch": 0.9472694212803311, + "grad_norm": 3.3090293407440186, + "learning_rate": 1.454873459200612e-07, + "loss": 0.9672, + "step": 15108 + }, + { + "epoch": 0.9473321211361214, + "grad_norm": 3.747659921646118, + "learning_rate": 1.4514240396452438e-07, + "loss": 0.8275, + "step": 15109 + }, + { + "epoch": 0.9473948209919117, + "grad_norm": 3.221844434738159, + "learning_rate": 1.447978684237461e-07, + "loss": 1.1246, + "step": 15110 + }, + { + "epoch": 0.9474575208477021, + "grad_norm": 3.3314592838287354, + "learning_rate": 1.4445373931193273e-07, + "loss": 0.9703, + "step": 15111 + }, + { + "epoch": 0.9475202207034924, + "grad_norm": 3.8679637908935547, + "learning_rate": 1.441100166432774e-07, + "loss": 1.1194, + "step": 15112 + }, + { + "epoch": 0.9475829205592827, + "grad_norm": 3.112004518508911, + "learning_rate": 1.437667004319565e-07, + "loss": 0.9882, + "step": 15113 + }, + { + "epoch": 0.947645620415073, + "grad_norm": 3.434248685836792, + "learning_rate": 1.4342379069212654e-07, + "loss": 1.0628, + "step": 15114 + }, + { + "epoch": 0.9477083202708634, + "grad_norm": 4.178025722503662, + "learning_rate": 1.4308128743792947e-07, + "loss": 0.9782, + "step": 15115 + }, + { + "epoch": 0.9477710201266537, + "grad_norm": 4.068634986877441, + "learning_rate": 1.4273919068349184e-07, + "loss": 1.0499, + "step": 15116 + }, + { + "epoch": 0.947833719982444, + "grad_norm": 3.460834264755249, + "learning_rate": 1.42397500442919e-07, + "loss": 1.0647, + "step": 15117 + }, + { + "epoch": 0.9478964198382344, + "grad_norm": 3.5416343212127686, + "learning_rate": 1.4205621673030522e-07, + "loss": 1.1159, + "step": 15118 + }, + { + "epoch": 0.9479591196940247, + "grad_norm": 3.502016544342041, + "learning_rate": 1.417153395597226e-07, + "loss": 1.2537, + "step": 15119 + }, + { + "epoch": 0.948021819549815, + "grad_norm": 3.757854700088501, + "learning_rate": 1.413748689452299e-07, + "loss": 1.1344, + "step": 15120 + }, + { + "epoch": 0.9480845194056053, + "grad_norm": 3.3775651454925537, + "learning_rate": 1.4103480490086808e-07, + "loss": 1.1182, + "step": 15121 + }, + { + "epoch": 0.9481472192613957, + "grad_norm": 3.8002870082855225, + "learning_rate": 1.4069514744066149e-07, + "loss": 0.9243, + "step": 15122 + }, + { + "epoch": 0.948209919117186, + "grad_norm": 3.376404285430908, + "learning_rate": 1.4035589657861782e-07, + "loss": 1.1552, + "step": 15123 + }, + { + "epoch": 0.9482726189729763, + "grad_norm": 3.7961108684539795, + "learning_rate": 1.400170523287281e-07, + "loss": 1.0715, + "step": 15124 + }, + { + "epoch": 0.9483353188287666, + "grad_norm": 3.6325812339782715, + "learning_rate": 1.3967861470496558e-07, + "loss": 1.0119, + "step": 15125 + }, + { + "epoch": 0.948398018684557, + "grad_norm": 3.326674222946167, + "learning_rate": 1.3934058372128577e-07, + "loss": 1.0036, + "step": 15126 + }, + { + "epoch": 0.9484607185403473, + "grad_norm": 3.8528342247009277, + "learning_rate": 1.390029593916331e-07, + "loss": 1.0816, + "step": 15127 + }, + { + "epoch": 0.9485234183961377, + "grad_norm": 3.4441168308258057, + "learning_rate": 1.3866574172992865e-07, + "loss": 1.1263, + "step": 15128 + }, + { + "epoch": 0.9485861182519281, + "grad_norm": 3.4823849201202393, + "learning_rate": 1.3832893075007902e-07, + "loss": 1.2337, + "step": 15129 + }, + { + "epoch": 0.9486488181077184, + "grad_norm": 3.665038824081421, + "learning_rate": 1.3799252646597428e-07, + "loss": 1.1852, + "step": 15130 + }, + { + "epoch": 0.9487115179635087, + "grad_norm": 3.2227485179901123, + "learning_rate": 1.3765652889148996e-07, + "loss": 0.9965, + "step": 15131 + }, + { + "epoch": 0.948774217819299, + "grad_norm": 3.3048343658447266, + "learning_rate": 1.3732093804048053e-07, + "loss": 1.0267, + "step": 15132 + }, + { + "epoch": 0.9488369176750894, + "grad_norm": 3.4517910480499268, + "learning_rate": 1.3698575392678492e-07, + "loss": 1.0617, + "step": 15133 + }, + { + "epoch": 0.9488996175308797, + "grad_norm": 3.460009813308716, + "learning_rate": 1.3665097656422986e-07, + "loss": 1.014, + "step": 15134 + }, + { + "epoch": 0.94896231738667, + "grad_norm": 3.4041194915771484, + "learning_rate": 1.3631660596661765e-07, + "loss": 1.1073, + "step": 15135 + }, + { + "epoch": 0.9490250172424604, + "grad_norm": 3.415764331817627, + "learning_rate": 1.359826421477406e-07, + "loss": 1.1299, + "step": 15136 + }, + { + "epoch": 0.9490877170982507, + "grad_norm": 3.4048655033111572, + "learning_rate": 1.3564908512136877e-07, + "loss": 0.971, + "step": 15137 + }, + { + "epoch": 0.949150416954041, + "grad_norm": 4.041733741760254, + "learning_rate": 1.3531593490126004e-07, + "loss": 0.8359, + "step": 15138 + }, + { + "epoch": 0.9492131168098313, + "grad_norm": 3.4893951416015625, + "learning_rate": 1.3498319150115347e-07, + "loss": 1.1829, + "step": 15139 + }, + { + "epoch": 0.9492758166656217, + "grad_norm": 3.247810125350952, + "learning_rate": 1.3465085493477027e-07, + "loss": 1.1295, + "step": 15140 + }, + { + "epoch": 0.949338516521412, + "grad_norm": 3.398587703704834, + "learning_rate": 1.3431892521581613e-07, + "loss": 1.1554, + "step": 15141 + }, + { + "epoch": 0.9494012163772023, + "grad_norm": 3.2681021690368652, + "learning_rate": 1.3398740235798124e-07, + "loss": 1.046, + "step": 15142 + }, + { + "epoch": 0.9494639162329926, + "grad_norm": 3.366421937942505, + "learning_rate": 1.3365628637493688e-07, + "loss": 1.0052, + "step": 15143 + }, + { + "epoch": 0.949526616088783, + "grad_norm": 3.3347668647766113, + "learning_rate": 1.333255772803377e-07, + "loss": 1.0553, + "step": 15144 + }, + { + "epoch": 0.9495893159445733, + "grad_norm": 3.5800914764404297, + "learning_rate": 1.3299527508782273e-07, + "loss": 1.0867, + "step": 15145 + }, + { + "epoch": 0.9496520158003636, + "grad_norm": 3.4946751594543457, + "learning_rate": 1.3266537981101335e-07, + "loss": 1.2551, + "step": 15146 + }, + { + "epoch": 0.949714715656154, + "grad_norm": 3.3624813556671143, + "learning_rate": 1.3233589146351533e-07, + "loss": 1.0215, + "step": 15147 + }, + { + "epoch": 0.9497774155119443, + "grad_norm": 3.928112268447876, + "learning_rate": 1.3200681005891447e-07, + "loss": 1.0436, + "step": 15148 + }, + { + "epoch": 0.9498401153677346, + "grad_norm": 3.10012149810791, + "learning_rate": 1.3167813561078546e-07, + "loss": 1.2047, + "step": 15149 + }, + { + "epoch": 0.9499028152235249, + "grad_norm": 3.2840607166290283, + "learning_rate": 1.3134986813267968e-07, + "loss": 1.0164, + "step": 15150 + }, + { + "epoch": 0.9499655150793154, + "grad_norm": 3.4463953971862793, + "learning_rate": 1.3102200763813744e-07, + "loss": 1.1496, + "step": 15151 + }, + { + "epoch": 0.9500282149351057, + "grad_norm": 3.2733867168426514, + "learning_rate": 1.306945541406779e-07, + "loss": 1.2126, + "step": 15152 + }, + { + "epoch": 0.950090914790896, + "grad_norm": 3.521425724029541, + "learning_rate": 1.3036750765380578e-07, + "loss": 0.9661, + "step": 15153 + }, + { + "epoch": 0.9501536146466864, + "grad_norm": 3.537353277206421, + "learning_rate": 1.3004086819100926e-07, + "loss": 1.0591, + "step": 15154 + }, + { + "epoch": 0.9502163145024767, + "grad_norm": 3.5408029556274414, + "learning_rate": 1.2971463576575748e-07, + "loss": 1.0637, + "step": 15155 + }, + { + "epoch": 0.950279014358267, + "grad_norm": 2.913888692855835, + "learning_rate": 1.293888103915064e-07, + "loss": 1.36, + "step": 15156 + }, + { + "epoch": 0.9503417142140573, + "grad_norm": 3.623380661010742, + "learning_rate": 1.2906339208169084e-07, + "loss": 0.8753, + "step": 15157 + }, + { + "epoch": 0.9504044140698477, + "grad_norm": 3.530423879623413, + "learning_rate": 1.2873838084973334e-07, + "loss": 1.1346, + "step": 15158 + }, + { + "epoch": 0.950467113925638, + "grad_norm": 3.135420322418213, + "learning_rate": 1.2841377670903432e-07, + "loss": 1.027, + "step": 15159 + }, + { + "epoch": 0.9505298137814283, + "grad_norm": 3.121556282043457, + "learning_rate": 1.2808957967298307e-07, + "loss": 1.1391, + "step": 15160 + }, + { + "epoch": 0.9505925136372186, + "grad_norm": 3.5753493309020996, + "learning_rate": 1.277657897549489e-07, + "loss": 1.1191, + "step": 15161 + }, + { + "epoch": 0.950655213493009, + "grad_norm": 3.7755682468414307, + "learning_rate": 1.2744240696828447e-07, + "loss": 1.0723, + "step": 15162 + }, + { + "epoch": 0.9507179133487993, + "grad_norm": 3.2222886085510254, + "learning_rate": 1.2711943132632576e-07, + "loss": 1.0657, + "step": 15163 + }, + { + "epoch": 0.9507806132045896, + "grad_norm": 3.6347410678863525, + "learning_rate": 1.2679686284239435e-07, + "loss": 0.9581, + "step": 15164 + }, + { + "epoch": 0.95084331306038, + "grad_norm": 3.4017627239227295, + "learning_rate": 1.2647470152979068e-07, + "loss": 0.9906, + "step": 15165 + }, + { + "epoch": 0.9509060129161703, + "grad_norm": 3.2265169620513916, + "learning_rate": 1.2615294740180085e-07, + "loss": 1.1942, + "step": 15166 + }, + { + "epoch": 0.9509687127719606, + "grad_norm": 3.374142646789551, + "learning_rate": 1.258316004716953e-07, + "loss": 1.2061, + "step": 15167 + }, + { + "epoch": 0.9510314126277509, + "grad_norm": 3.561723232269287, + "learning_rate": 1.2551066075272568e-07, + "loss": 1.0275, + "step": 15168 + }, + { + "epoch": 0.9510941124835413, + "grad_norm": 3.121415138244629, + "learning_rate": 1.2519012825812804e-07, + "loss": 1.1331, + "step": 15169 + }, + { + "epoch": 0.9511568123393316, + "grad_norm": 3.370990037918091, + "learning_rate": 1.248700030011196e-07, + "loss": 1.002, + "step": 15170 + }, + { + "epoch": 0.9512195121951219, + "grad_norm": 3.0452451705932617, + "learning_rate": 1.245502849949043e-07, + "loss": 1.1516, + "step": 15171 + }, + { + "epoch": 0.9512822120509122, + "grad_norm": 3.5460548400878906, + "learning_rate": 1.2423097425266594e-07, + "loss": 1.0292, + "step": 15172 + }, + { + "epoch": 0.9513449119067026, + "grad_norm": 3.218895196914673, + "learning_rate": 1.2391207078757406e-07, + "loss": 1.1258, + "step": 15173 + }, + { + "epoch": 0.951407611762493, + "grad_norm": 3.4222195148468018, + "learning_rate": 1.2359357461277921e-07, + "loss": 1.145, + "step": 15174 + }, + { + "epoch": 0.9514703116182833, + "grad_norm": 3.0812933444976807, + "learning_rate": 1.2327548574141756e-07, + "loss": 1.2, + "step": 15175 + }, + { + "epoch": 0.9515330114740737, + "grad_norm": 3.74760103225708, + "learning_rate": 1.229578041866053e-07, + "loss": 0.9611, + "step": 15176 + }, + { + "epoch": 0.951595711329864, + "grad_norm": 3.540253162384033, + "learning_rate": 1.2264052996144415e-07, + "loss": 1.0736, + "step": 15177 + }, + { + "epoch": 0.9516584111856543, + "grad_norm": 3.6027798652648926, + "learning_rate": 1.2232366307902034e-07, + "loss": 0.9982, + "step": 15178 + }, + { + "epoch": 0.9517211110414446, + "grad_norm": 3.1432909965515137, + "learning_rate": 1.2200720355239893e-07, + "loss": 1.0299, + "step": 15179 + }, + { + "epoch": 0.951783810897235, + "grad_norm": 3.3772077560424805, + "learning_rate": 1.2169115139463283e-07, + "loss": 1.0723, + "step": 15180 + }, + { + "epoch": 0.9518465107530253, + "grad_norm": 3.300978183746338, + "learning_rate": 1.2137550661875386e-07, + "loss": 0.9768, + "step": 15181 + }, + { + "epoch": 0.9519092106088156, + "grad_norm": 3.2258992195129395, + "learning_rate": 1.2106026923778046e-07, + "loss": 1.181, + "step": 15182 + }, + { + "epoch": 0.9519719104646059, + "grad_norm": 3.110187292098999, + "learning_rate": 1.2074543926471338e-07, + "loss": 1.1082, + "step": 15183 + }, + { + "epoch": 0.9520346103203963, + "grad_norm": 3.681122064590454, + "learning_rate": 1.2043101671253553e-07, + "loss": 1.1093, + "step": 15184 + }, + { + "epoch": 0.9520973101761866, + "grad_norm": 3.679325580596924, + "learning_rate": 1.2011700159421436e-07, + "loss": 1.0396, + "step": 15185 + }, + { + "epoch": 0.9521600100319769, + "grad_norm": 3.7614572048187256, + "learning_rate": 1.1980339392269946e-07, + "loss": 0.8753, + "step": 15186 + }, + { + "epoch": 0.9522227098877672, + "grad_norm": 3.27128529548645, + "learning_rate": 1.1949019371092386e-07, + "loss": 1.095, + "step": 15187 + }, + { + "epoch": 0.9522854097435576, + "grad_norm": 3.5154902935028076, + "learning_rate": 1.1917740097180386e-07, + "loss": 1.1205, + "step": 15188 + }, + { + "epoch": 0.9523481095993479, + "grad_norm": 3.42278790473938, + "learning_rate": 1.1886501571824028e-07, + "loss": 1.0854, + "step": 15189 + }, + { + "epoch": 0.9524108094551382, + "grad_norm": 3.3250975608825684, + "learning_rate": 1.1855303796311502e-07, + "loss": 1.1856, + "step": 15190 + }, + { + "epoch": 0.9524735093109286, + "grad_norm": 3.5545825958251953, + "learning_rate": 1.1824146771929334e-07, + "loss": 1.1347, + "step": 15191 + }, + { + "epoch": 0.9525362091667189, + "grad_norm": 3.4140212535858154, + "learning_rate": 1.17930304999625e-07, + "loss": 1.1376, + "step": 15192 + }, + { + "epoch": 0.9525989090225092, + "grad_norm": 3.5343017578125, + "learning_rate": 1.1761954981694301e-07, + "loss": 1.1164, + "step": 15193 + }, + { + "epoch": 0.9526616088782995, + "grad_norm": 3.9485068321228027, + "learning_rate": 1.1730920218406161e-07, + "loss": 1.0244, + "step": 15194 + }, + { + "epoch": 0.9527243087340899, + "grad_norm": 3.5210444927215576, + "learning_rate": 1.1699926211378054e-07, + "loss": 1.2109, + "step": 15195 + }, + { + "epoch": 0.9527870085898802, + "grad_norm": 3.1449408531188965, + "learning_rate": 1.1668972961888181e-07, + "loss": 1.0808, + "step": 15196 + }, + { + "epoch": 0.9528497084456706, + "grad_norm": 3.568805456161499, + "learning_rate": 1.1638060471212964e-07, + "loss": 1.0102, + "step": 15197 + }, + { + "epoch": 0.952912408301461, + "grad_norm": 3.112870216369629, + "learning_rate": 1.1607188740627384e-07, + "loss": 1.0879, + "step": 15198 + }, + { + "epoch": 0.9529751081572513, + "grad_norm": 3.868549108505249, + "learning_rate": 1.1576357771404423e-07, + "loss": 1.0916, + "step": 15199 + }, + { + "epoch": 0.9530378080130416, + "grad_norm": 3.452388048171997, + "learning_rate": 1.1545567564815619e-07, + "loss": 1.2133, + "step": 15200 + }, + { + "epoch": 0.9531005078688319, + "grad_norm": 3.49778151512146, + "learning_rate": 1.1514818122130844e-07, + "loss": 0.9207, + "step": 15201 + }, + { + "epoch": 0.9531632077246223, + "grad_norm": 3.216926097869873, + "learning_rate": 1.1484109444618085e-07, + "loss": 1.0237, + "step": 15202 + }, + { + "epoch": 0.9532259075804126, + "grad_norm": 3.2647953033447266, + "learning_rate": 1.1453441533543775e-07, + "loss": 1.0217, + "step": 15203 + }, + { + "epoch": 0.9532886074362029, + "grad_norm": 3.3658971786499023, + "learning_rate": 1.1422814390172899e-07, + "loss": 1.0568, + "step": 15204 + }, + { + "epoch": 0.9533513072919932, + "grad_norm": 3.558492422103882, + "learning_rate": 1.1392228015768114e-07, + "loss": 1.0161, + "step": 15205 + }, + { + "epoch": 0.9534140071477836, + "grad_norm": 3.4765405654907227, + "learning_rate": 1.136168241159108e-07, + "loss": 1.0698, + "step": 15206 + }, + { + "epoch": 0.9534767070035739, + "grad_norm": 4.072548866271973, + "learning_rate": 1.1331177578901564e-07, + "loss": 0.963, + "step": 15207 + }, + { + "epoch": 0.9535394068593642, + "grad_norm": 3.1407644748687744, + "learning_rate": 1.130071351895734e-07, + "loss": 1.147, + "step": 15208 + }, + { + "epoch": 0.9536021067151546, + "grad_norm": 3.400461196899414, + "learning_rate": 1.1270290233014847e-07, + "loss": 1.135, + "step": 15209 + }, + { + "epoch": 0.9536648065709449, + "grad_norm": 3.2438693046569824, + "learning_rate": 1.1239907722328858e-07, + "loss": 1.1692, + "step": 15210 + }, + { + "epoch": 0.9537275064267352, + "grad_norm": 3.3879504203796387, + "learning_rate": 1.1209565988152149e-07, + "loss": 1.0511, + "step": 15211 + }, + { + "epoch": 0.9537902062825255, + "grad_norm": 3.4532418251037598, + "learning_rate": 1.1179265031736163e-07, + "loss": 1.1225, + "step": 15212 + }, + { + "epoch": 0.9538529061383159, + "grad_norm": 3.6473002433776855, + "learning_rate": 1.1149004854330569e-07, + "loss": 1.1968, + "step": 15213 + }, + { + "epoch": 0.9539156059941062, + "grad_norm": 3.4704251289367676, + "learning_rate": 1.1118785457183034e-07, + "loss": 1.1379, + "step": 15214 + }, + { + "epoch": 0.9539783058498965, + "grad_norm": 3.339953899383545, + "learning_rate": 1.1088606841540006e-07, + "loss": 1.2858, + "step": 15215 + }, + { + "epoch": 0.9540410057056868, + "grad_norm": 3.340817451477051, + "learning_rate": 1.1058469008646156e-07, + "loss": 1.0427, + "step": 15216 + }, + { + "epoch": 0.9541037055614772, + "grad_norm": 3.33793044090271, + "learning_rate": 1.1028371959744045e-07, + "loss": 1.1337, + "step": 15217 + }, + { + "epoch": 0.9541664054172675, + "grad_norm": 3.559131145477295, + "learning_rate": 1.0998315696075123e-07, + "loss": 1.0119, + "step": 15218 + }, + { + "epoch": 0.9542291052730578, + "grad_norm": 3.4499094486236572, + "learning_rate": 1.0968300218878957e-07, + "loss": 1.0847, + "step": 15219 + }, + { + "epoch": 0.9542918051288483, + "grad_norm": 3.4417667388916016, + "learning_rate": 1.0938325529393223e-07, + "loss": 1.0835, + "step": 15220 + }, + { + "epoch": 0.9543545049846386, + "grad_norm": 3.866844415664673, + "learning_rate": 1.0908391628854042e-07, + "loss": 1.0974, + "step": 15221 + }, + { + "epoch": 0.9544172048404289, + "grad_norm": 3.3855502605438232, + "learning_rate": 1.0878498518496095e-07, + "loss": 1.0208, + "step": 15222 + }, + { + "epoch": 0.9544799046962192, + "grad_norm": 3.50639271736145, + "learning_rate": 1.084864619955206e-07, + "loss": 0.9846, + "step": 15223 + }, + { + "epoch": 0.9545426045520096, + "grad_norm": 3.280289649963379, + "learning_rate": 1.0818834673252954e-07, + "loss": 1.0553, + "step": 15224 + }, + { + "epoch": 0.9546053044077999, + "grad_norm": 3.7816109657287598, + "learning_rate": 1.0789063940828348e-07, + "loss": 1.0355, + "step": 15225 + }, + { + "epoch": 0.9546680042635902, + "grad_norm": 2.8567376136779785, + "learning_rate": 1.0759334003505928e-07, + "loss": 1.219, + "step": 15226 + }, + { + "epoch": 0.9547307041193805, + "grad_norm": 3.671502113342285, + "learning_rate": 1.0729644862511824e-07, + "loss": 1.1009, + "step": 15227 + }, + { + "epoch": 0.9547934039751709, + "grad_norm": 3.0374579429626465, + "learning_rate": 1.0699996519070388e-07, + "loss": 1.0773, + "step": 15228 + }, + { + "epoch": 0.9548561038309612, + "grad_norm": 3.59403657913208, + "learning_rate": 1.06703889744042e-07, + "loss": 1.1797, + "step": 15229 + }, + { + "epoch": 0.9549188036867515, + "grad_norm": 3.2577197551727295, + "learning_rate": 1.0640822229734505e-07, + "loss": 1.1707, + "step": 15230 + }, + { + "epoch": 0.9549815035425419, + "grad_norm": 3.6244142055511475, + "learning_rate": 1.0611296286280437e-07, + "loss": 1.1418, + "step": 15231 + }, + { + "epoch": 0.9550442033983322, + "grad_norm": 3.5254592895507812, + "learning_rate": 1.0581811145259691e-07, + "loss": 1.097, + "step": 15232 + }, + { + "epoch": 0.9551069032541225, + "grad_norm": 3.4374732971191406, + "learning_rate": 1.0552366807888292e-07, + "loss": 1.082, + "step": 15233 + }, + { + "epoch": 0.9551696031099128, + "grad_norm": 3.443310260772705, + "learning_rate": 1.0522963275380494e-07, + "loss": 1.1309, + "step": 15234 + }, + { + "epoch": 0.9552323029657032, + "grad_norm": 3.439516544342041, + "learning_rate": 1.0493600548948879e-07, + "loss": 1.1315, + "step": 15235 + }, + { + "epoch": 0.9552950028214935, + "grad_norm": 3.398674964904785, + "learning_rate": 1.046427862980437e-07, + "loss": 1.1275, + "step": 15236 + }, + { + "epoch": 0.9553577026772838, + "grad_norm": 3.896296501159668, + "learning_rate": 1.0434997519156331e-07, + "loss": 1.1488, + "step": 15237 + }, + { + "epoch": 0.9554204025330741, + "grad_norm": 3.5583293437957764, + "learning_rate": 1.0405757218212131e-07, + "loss": 1.1334, + "step": 15238 + }, + { + "epoch": 0.9554831023888645, + "grad_norm": 2.9542298316955566, + "learning_rate": 1.0376557728177694e-07, + "loss": 1.0511, + "step": 15239 + }, + { + "epoch": 0.9555458022446548, + "grad_norm": 3.1029670238494873, + "learning_rate": 1.0347399050257278e-07, + "loss": 1.112, + "step": 15240 + }, + { + "epoch": 0.9556085021004451, + "grad_norm": 3.5359458923339844, + "learning_rate": 1.0318281185653367e-07, + "loss": 1.0784, + "step": 15241 + }, + { + "epoch": 0.9556712019562354, + "grad_norm": 3.4405038356781006, + "learning_rate": 1.0289204135566777e-07, + "loss": 1.0853, + "step": 15242 + }, + { + "epoch": 0.9557339018120259, + "grad_norm": 3.628072500228882, + "learning_rate": 1.026016790119655e-07, + "loss": 1.0405, + "step": 15243 + }, + { + "epoch": 0.9557966016678162, + "grad_norm": 3.677417755126953, + "learning_rate": 1.0231172483740171e-07, + "loss": 0.9328, + "step": 15244 + }, + { + "epoch": 0.9558593015236065, + "grad_norm": 3.2026240825653076, + "learning_rate": 1.0202217884393573e-07, + "loss": 1.031, + "step": 15245 + }, + { + "epoch": 0.9559220013793969, + "grad_norm": 3.4248616695404053, + "learning_rate": 1.017330410435069e-07, + "loss": 1.1282, + "step": 15246 + }, + { + "epoch": 0.9559847012351872, + "grad_norm": 3.4561829566955566, + "learning_rate": 1.0144431144804012e-07, + "loss": 1.0162, + "step": 15247 + }, + { + "epoch": 0.9560474010909775, + "grad_norm": 3.7382118701934814, + "learning_rate": 1.0115599006944143e-07, + "loss": 1.0414, + "step": 15248 + }, + { + "epoch": 0.9561101009467678, + "grad_norm": 3.4248647689819336, + "learning_rate": 1.0086807691960243e-07, + "loss": 1.0143, + "step": 15249 + }, + { + "epoch": 0.9561728008025582, + "grad_norm": 3.6117446422576904, + "learning_rate": 1.0058057201039695e-07, + "loss": 1.1901, + "step": 15250 + }, + { + "epoch": 0.9562355006583485, + "grad_norm": 3.466587781906128, + "learning_rate": 1.0029347535367994e-07, + "loss": 1.0692, + "step": 15251 + }, + { + "epoch": 0.9562982005141388, + "grad_norm": 3.5347402095794678, + "learning_rate": 1.0000678696129307e-07, + "loss": 1.2285, + "step": 15252 + }, + { + "epoch": 0.9563609003699292, + "grad_norm": 4.081801414489746, + "learning_rate": 9.972050684505796e-08, + "loss": 0.9879, + "step": 15253 + }, + { + "epoch": 0.9564236002257195, + "grad_norm": 3.199981212615967, + "learning_rate": 9.943463501678186e-08, + "loss": 1.0636, + "step": 15254 + }, + { + "epoch": 0.9564863000815098, + "grad_norm": 3.230395793914795, + "learning_rate": 9.91491714882542e-08, + "loss": 1.1241, + "step": 15255 + }, + { + "epoch": 0.9565489999373001, + "grad_norm": 3.545133590698242, + "learning_rate": 9.88641162712467e-08, + "loss": 1.0267, + "step": 15256 + }, + { + "epoch": 0.9566116997930905, + "grad_norm": 3.505906820297241, + "learning_rate": 9.85794693775155e-08, + "loss": 1.0598, + "step": 15257 + }, + { + "epoch": 0.9566743996488808, + "grad_norm": 3.084888219833374, + "learning_rate": 9.829523081879788e-08, + "loss": 1.2074, + "step": 15258 + }, + { + "epoch": 0.9567370995046711, + "grad_norm": 3.904806613922119, + "learning_rate": 9.80114006068189e-08, + "loss": 1.1263, + "step": 15259 + }, + { + "epoch": 0.9567997993604614, + "grad_norm": 3.5488057136535645, + "learning_rate": 9.772797875328255e-08, + "loss": 1.0063, + "step": 15260 + }, + { + "epoch": 0.9568624992162518, + "grad_norm": 2.837165117263794, + "learning_rate": 9.744496526987612e-08, + "loss": 1.0771, + "step": 15261 + }, + { + "epoch": 0.9569251990720421, + "grad_norm": 3.2743301391601562, + "learning_rate": 9.716236016827141e-08, + "loss": 1.0163, + "step": 15262 + }, + { + "epoch": 0.9569878989278324, + "grad_norm": 3.0175211429595947, + "learning_rate": 9.688016346012463e-08, + "loss": 1.0587, + "step": 15263 + }, + { + "epoch": 0.9570505987836228, + "grad_norm": 3.0581064224243164, + "learning_rate": 9.659837515707093e-08, + "loss": 1.0765, + "step": 15264 + }, + { + "epoch": 0.9571132986394131, + "grad_norm": 3.4255924224853516, + "learning_rate": 9.631699527073323e-08, + "loss": 1.0958, + "step": 15265 + }, + { + "epoch": 0.9571759984952034, + "grad_norm": 3.502519130706787, + "learning_rate": 9.60360238127156e-08, + "loss": 1.0753, + "step": 15266 + }, + { + "epoch": 0.9572386983509938, + "grad_norm": 3.2498955726623535, + "learning_rate": 9.57554607946043e-08, + "loss": 1.1241, + "step": 15267 + }, + { + "epoch": 0.9573013982067842, + "grad_norm": 3.660377264022827, + "learning_rate": 9.547530622797119e-08, + "loss": 1.0173, + "step": 15268 + }, + { + "epoch": 0.9573640980625745, + "grad_norm": 3.3104264736175537, + "learning_rate": 9.519556012436815e-08, + "loss": 1.0272, + "step": 15269 + }, + { + "epoch": 0.9574267979183648, + "grad_norm": 3.46342134475708, + "learning_rate": 9.491622249533261e-08, + "loss": 1.1344, + "step": 15270 + }, + { + "epoch": 0.9574894977741552, + "grad_norm": 3.518233060836792, + "learning_rate": 9.463729335238537e-08, + "loss": 1.0972, + "step": 15271 + }, + { + "epoch": 0.9575521976299455, + "grad_norm": 3.407646894454956, + "learning_rate": 9.435877270702831e-08, + "loss": 1.0817, + "step": 15272 + }, + { + "epoch": 0.9576148974857358, + "grad_norm": 3.347656488418579, + "learning_rate": 9.408066057074782e-08, + "loss": 1.0628, + "step": 15273 + }, + { + "epoch": 0.9576775973415261, + "grad_norm": 3.2072973251342773, + "learning_rate": 9.380295695501363e-08, + "loss": 1.113, + "step": 15274 + }, + { + "epoch": 0.9577402971973165, + "grad_norm": 3.5405683517456055, + "learning_rate": 9.352566187127876e-08, + "loss": 0.876, + "step": 15275 + }, + { + "epoch": 0.9578029970531068, + "grad_norm": 3.4693996906280518, + "learning_rate": 9.324877533097743e-08, + "loss": 1.1706, + "step": 15276 + }, + { + "epoch": 0.9578656969088971, + "grad_norm": 3.438380241394043, + "learning_rate": 9.297229734552937e-08, + "loss": 1.2525, + "step": 15277 + }, + { + "epoch": 0.9579283967646874, + "grad_norm": 3.20628023147583, + "learning_rate": 9.26962279263366e-08, + "loss": 1.2376, + "step": 15278 + }, + { + "epoch": 0.9579910966204778, + "grad_norm": 3.437309503555298, + "learning_rate": 9.242056708478442e-08, + "loss": 1.0191, + "step": 15279 + }, + { + "epoch": 0.9580537964762681, + "grad_norm": 3.337340831756592, + "learning_rate": 9.214531483223931e-08, + "loss": 1.0547, + "step": 15280 + }, + { + "epoch": 0.9581164963320584, + "grad_norm": 3.5408434867858887, + "learning_rate": 9.187047118005554e-08, + "loss": 1.0541, + "step": 15281 + }, + { + "epoch": 0.9581791961878487, + "grad_norm": 3.5665125846862793, + "learning_rate": 9.159603613956625e-08, + "loss": 1.0134, + "step": 15282 + }, + { + "epoch": 0.9582418960436391, + "grad_norm": 3.1009206771850586, + "learning_rate": 9.132200972208793e-08, + "loss": 1.2461, + "step": 15283 + }, + { + "epoch": 0.9583045958994294, + "grad_norm": 3.6539087295532227, + "learning_rate": 9.104839193892379e-08, + "loss": 1.1275, + "step": 15284 + }, + { + "epoch": 0.9583672957552197, + "grad_norm": 3.5959408283233643, + "learning_rate": 9.077518280135589e-08, + "loss": 1.0243, + "step": 15285 + }, + { + "epoch": 0.95842999561101, + "grad_norm": 3.33638858795166, + "learning_rate": 9.0502382320653e-08, + "loss": 1.0352, + "step": 15286 + }, + { + "epoch": 0.9584926954668004, + "grad_norm": 3.1776201725006104, + "learning_rate": 9.02299905080628e-08, + "loss": 1.073, + "step": 15287 + }, + { + "epoch": 0.9585553953225907, + "grad_norm": 3.323685884475708, + "learning_rate": 8.995800737482296e-08, + "loss": 0.9242, + "step": 15288 + }, + { + "epoch": 0.958618095178381, + "grad_norm": 3.404254674911499, + "learning_rate": 8.968643293214674e-08, + "loss": 1.0495, + "step": 15289 + }, + { + "epoch": 0.9586807950341715, + "grad_norm": 3.2889833450317383, + "learning_rate": 8.941526719123406e-08, + "loss": 1.2517, + "step": 15290 + }, + { + "epoch": 0.9587434948899618, + "grad_norm": 3.5937585830688477, + "learning_rate": 8.914451016326931e-08, + "loss": 1.1013, + "step": 15291 + }, + { + "epoch": 0.9588061947457521, + "grad_norm": 3.2514026165008545, + "learning_rate": 8.887416185941799e-08, + "loss": 1.0071, + "step": 15292 + }, + { + "epoch": 0.9588688946015425, + "grad_norm": 3.5206472873687744, + "learning_rate": 8.860422229082788e-08, + "loss": 1.0417, + "step": 15293 + }, + { + "epoch": 0.9589315944573328, + "grad_norm": 3.6841702461242676, + "learning_rate": 8.833469146863339e-08, + "loss": 1.0989, + "step": 15294 + }, + { + "epoch": 0.9589942943131231, + "grad_norm": 4.1579389572143555, + "learning_rate": 8.806556940394894e-08, + "loss": 0.8383, + "step": 15295 + }, + { + "epoch": 0.9590569941689134, + "grad_norm": 3.6891534328460693, + "learning_rate": 8.779685610787348e-08, + "loss": 1.0827, + "step": 15296 + }, + { + "epoch": 0.9591196940247038, + "grad_norm": 3.1625304222106934, + "learning_rate": 8.75285515914881e-08, + "loss": 1.0558, + "step": 15297 + }, + { + "epoch": 0.9591823938804941, + "grad_norm": 3.32497239112854, + "learning_rate": 8.726065586585841e-08, + "loss": 1.1688, + "step": 15298 + }, + { + "epoch": 0.9592450937362844, + "grad_norm": 3.8947999477386475, + "learning_rate": 8.699316894203225e-08, + "loss": 0.9337, + "step": 15299 + }, + { + "epoch": 0.9593077935920747, + "grad_norm": 3.231551170349121, + "learning_rate": 8.672609083104078e-08, + "loss": 0.9695, + "step": 15300 + }, + { + "epoch": 0.9593704934478651, + "grad_norm": 3.6507320404052734, + "learning_rate": 8.645942154389742e-08, + "loss": 1.0907, + "step": 15301 + }, + { + "epoch": 0.9594331933036554, + "grad_norm": 3.335843801498413, + "learning_rate": 8.619316109160115e-08, + "loss": 1.2001, + "step": 15302 + }, + { + "epoch": 0.9594958931594457, + "grad_norm": 3.88716721534729, + "learning_rate": 8.592730948513205e-08, + "loss": 0.9351, + "step": 15303 + }, + { + "epoch": 0.959558593015236, + "grad_norm": 3.2464964389801025, + "learning_rate": 8.566186673545362e-08, + "loss": 1.0942, + "step": 15304 + }, + { + "epoch": 0.9596212928710264, + "grad_norm": 3.670203447341919, + "learning_rate": 8.539683285351152e-08, + "loss": 1.1072, + "step": 15305 + }, + { + "epoch": 0.9596839927268167, + "grad_norm": 3.275280237197876, + "learning_rate": 8.513220785023813e-08, + "loss": 1.0879, + "step": 15306 + }, + { + "epoch": 0.959746692582607, + "grad_norm": 3.435492515563965, + "learning_rate": 8.486799173654581e-08, + "loss": 1.0036, + "step": 15307 + }, + { + "epoch": 0.9598093924383974, + "grad_norm": 3.233311414718628, + "learning_rate": 8.460418452333031e-08, + "loss": 1.1562, + "step": 15308 + }, + { + "epoch": 0.9598720922941877, + "grad_norm": 3.729247808456421, + "learning_rate": 8.434078622147068e-08, + "loss": 0.9665, + "step": 15309 + }, + { + "epoch": 0.959934792149978, + "grad_norm": 3.089743137359619, + "learning_rate": 8.407779684183048e-08, + "loss": 1.1158, + "step": 15310 + }, + { + "epoch": 0.9599974920057683, + "grad_norm": 3.4982364177703857, + "learning_rate": 8.381521639525436e-08, + "loss": 1.0065, + "step": 15311 + }, + { + "epoch": 0.9600601918615587, + "grad_norm": 3.6751184463500977, + "learning_rate": 8.355304489257254e-08, + "loss": 0.9257, + "step": 15312 + }, + { + "epoch": 0.9601228917173491, + "grad_norm": 3.1172773838043213, + "learning_rate": 8.329128234459527e-08, + "loss": 1.2037, + "step": 15313 + }, + { + "epoch": 0.9601855915731394, + "grad_norm": 3.486464023590088, + "learning_rate": 8.302992876211946e-08, + "loss": 0.8782, + "step": 15314 + }, + { + "epoch": 0.9602482914289298, + "grad_norm": 3.1824557781219482, + "learning_rate": 8.276898415592205e-08, + "loss": 1.1164, + "step": 15315 + }, + { + "epoch": 0.9603109912847201, + "grad_norm": 3.8211560249328613, + "learning_rate": 8.250844853676443e-08, + "loss": 1.096, + "step": 15316 + }, + { + "epoch": 0.9603736911405104, + "grad_norm": 3.384202241897583, + "learning_rate": 8.224832191539245e-08, + "loss": 0.9989, + "step": 15317 + }, + { + "epoch": 0.9604363909963007, + "grad_norm": 3.2139289379119873, + "learning_rate": 8.198860430253308e-08, + "loss": 1.0424, + "step": 15318 + }, + { + "epoch": 0.9604990908520911, + "grad_norm": 3.6802570819854736, + "learning_rate": 8.172929570889553e-08, + "loss": 0.9307, + "step": 15319 + }, + { + "epoch": 0.9605617907078814, + "grad_norm": 3.909668445587158, + "learning_rate": 8.147039614517571e-08, + "loss": 0.8744, + "step": 15320 + }, + { + "epoch": 0.9606244905636717, + "grad_norm": 3.3278496265411377, + "learning_rate": 8.121190562204951e-08, + "loss": 0.984, + "step": 15321 + }, + { + "epoch": 0.960687190419462, + "grad_norm": 3.400620222091675, + "learning_rate": 8.095382415017727e-08, + "loss": 1.0121, + "step": 15322 + }, + { + "epoch": 0.9607498902752524, + "grad_norm": 3.2684831619262695, + "learning_rate": 8.069615174020384e-08, + "loss": 1.1211, + "step": 15323 + }, + { + "epoch": 0.9608125901310427, + "grad_norm": 3.3832449913024902, + "learning_rate": 8.043888840275293e-08, + "loss": 1.0223, + "step": 15324 + }, + { + "epoch": 0.960875289986833, + "grad_norm": 3.5164451599121094, + "learning_rate": 8.018203414843607e-08, + "loss": 1.0703, + "step": 15325 + }, + { + "epoch": 0.9609379898426234, + "grad_norm": 3.357593297958374, + "learning_rate": 7.992558898784475e-08, + "loss": 0.9942, + "step": 15326 + }, + { + "epoch": 0.9610006896984137, + "grad_norm": 3.3862247467041016, + "learning_rate": 7.966955293155498e-08, + "loss": 1.1032, + "step": 15327 + }, + { + "epoch": 0.961063389554204, + "grad_norm": 3.8088033199310303, + "learning_rate": 7.94139259901272e-08, + "loss": 1.1201, + "step": 15328 + }, + { + "epoch": 0.9611260894099943, + "grad_norm": 3.411757230758667, + "learning_rate": 7.915870817410188e-08, + "loss": 1.144, + "step": 15329 + }, + { + "epoch": 0.9611887892657847, + "grad_norm": 3.207388401031494, + "learning_rate": 7.890389949400501e-08, + "loss": 1.2254, + "step": 15330 + }, + { + "epoch": 0.961251489121575, + "grad_norm": 3.466123580932617, + "learning_rate": 7.864949996034376e-08, + "loss": 1.1256, + "step": 15331 + }, + { + "epoch": 0.9613141889773653, + "grad_norm": 3.2497098445892334, + "learning_rate": 7.839550958361086e-08, + "loss": 0.9735, + "step": 15332 + }, + { + "epoch": 0.9613768888331556, + "grad_norm": 3.792212963104248, + "learning_rate": 7.814192837428126e-08, + "loss": 1.1173, + "step": 15333 + }, + { + "epoch": 0.961439588688946, + "grad_norm": 3.3658111095428467, + "learning_rate": 7.788875634281101e-08, + "loss": 1.2109, + "step": 15334 + }, + { + "epoch": 0.9615022885447363, + "grad_norm": 3.3387458324432373, + "learning_rate": 7.763599349964068e-08, + "loss": 1.0313, + "step": 15335 + }, + { + "epoch": 0.9615649884005267, + "grad_norm": 3.474869728088379, + "learning_rate": 7.738363985519749e-08, + "loss": 0.9671, + "step": 15336 + }, + { + "epoch": 0.9616276882563171, + "grad_norm": 3.879098653793335, + "learning_rate": 7.71316954198853e-08, + "loss": 1.1036, + "step": 15337 + }, + { + "epoch": 0.9616903881121074, + "grad_norm": 3.4811558723449707, + "learning_rate": 7.688016020409473e-08, + "loss": 1.0298, + "step": 15338 + }, + { + "epoch": 0.9617530879678977, + "grad_norm": 3.611095666885376, + "learning_rate": 7.662903421819967e-08, + "loss": 1.169, + "step": 15339 + }, + { + "epoch": 0.961815787823688, + "grad_norm": 3.2478320598602295, + "learning_rate": 7.637831747255741e-08, + "loss": 1.1648, + "step": 15340 + }, + { + "epoch": 0.9618784876794784, + "grad_norm": 3.3624703884124756, + "learning_rate": 7.612800997750635e-08, + "loss": 1.2996, + "step": 15341 + }, + { + "epoch": 0.9619411875352687, + "grad_norm": 3.223797559738159, + "learning_rate": 7.587811174336934e-08, + "loss": 1.2181, + "step": 15342 + }, + { + "epoch": 0.962003887391059, + "grad_norm": 3.467984676361084, + "learning_rate": 7.562862278045257e-08, + "loss": 1.0219, + "step": 15343 + }, + { + "epoch": 0.9620665872468493, + "grad_norm": 3.3753859996795654, + "learning_rate": 7.53795430990456e-08, + "loss": 1.0872, + "step": 15344 + }, + { + "epoch": 0.9621292871026397, + "grad_norm": 3.300600290298462, + "learning_rate": 7.51308727094191e-08, + "loss": 1.3273, + "step": 15345 + }, + { + "epoch": 0.96219198695843, + "grad_norm": 3.729337692260742, + "learning_rate": 7.48826116218282e-08, + "loss": 0.9115, + "step": 15346 + }, + { + "epoch": 0.9622546868142203, + "grad_norm": 3.4490065574645996, + "learning_rate": 7.463475984651248e-08, + "loss": 1.0618, + "step": 15347 + }, + { + "epoch": 0.9623173866700107, + "grad_norm": 3.182521343231201, + "learning_rate": 7.438731739369264e-08, + "loss": 1.206, + "step": 15348 + }, + { + "epoch": 0.962380086525801, + "grad_norm": 3.8358840942382812, + "learning_rate": 7.414028427357278e-08, + "loss": 1.1009, + "step": 15349 + }, + { + "epoch": 0.9624427863815913, + "grad_norm": 3.390096426010132, + "learning_rate": 7.389366049634139e-08, + "loss": 1.0133, + "step": 15350 + }, + { + "epoch": 0.9625054862373816, + "grad_norm": 3.162907838821411, + "learning_rate": 7.364744607216923e-08, + "loss": 1.1939, + "step": 15351 + }, + { + "epoch": 0.962568186093172, + "grad_norm": 3.2206978797912598, + "learning_rate": 7.34016410112104e-08, + "loss": 0.938, + "step": 15352 + }, + { + "epoch": 0.9626308859489623, + "grad_norm": 3.4246013164520264, + "learning_rate": 7.315624532360011e-08, + "loss": 1.01, + "step": 15353 + }, + { + "epoch": 0.9626935858047526, + "grad_norm": 3.6921112537384033, + "learning_rate": 7.291125901946027e-08, + "loss": 1.0528, + "step": 15354 + }, + { + "epoch": 0.9627562856605429, + "grad_norm": 3.562080144882202, + "learning_rate": 7.266668210889394e-08, + "loss": 0.9998, + "step": 15355 + }, + { + "epoch": 0.9628189855163333, + "grad_norm": 2.999265193939209, + "learning_rate": 7.242251460198635e-08, + "loss": 1.1943, + "step": 15356 + }, + { + "epoch": 0.9628816853721236, + "grad_norm": 3.2126097679138184, + "learning_rate": 7.217875650880834e-08, + "loss": 1.0592, + "step": 15357 + }, + { + "epoch": 0.9629443852279139, + "grad_norm": 3.0213146209716797, + "learning_rate": 7.193540783941188e-08, + "loss": 1.0491, + "step": 15358 + }, + { + "epoch": 0.9630070850837044, + "grad_norm": 3.180800437927246, + "learning_rate": 7.169246860383338e-08, + "loss": 1.0398, + "step": 15359 + }, + { + "epoch": 0.9630697849394947, + "grad_norm": 3.5552589893341064, + "learning_rate": 7.144993881208929e-08, + "loss": 1.0821, + "step": 15360 + }, + { + "epoch": 0.963132484795285, + "grad_norm": 3.374891996383667, + "learning_rate": 7.12078184741849e-08, + "loss": 1.2512, + "step": 15361 + }, + { + "epoch": 0.9631951846510753, + "grad_norm": 3.2269327640533447, + "learning_rate": 7.096610760010337e-08, + "loss": 1.0704, + "step": 15362 + }, + { + "epoch": 0.9632578845068657, + "grad_norm": 4.034399509429932, + "learning_rate": 7.072480619981337e-08, + "loss": 1.1152, + "step": 15363 + }, + { + "epoch": 0.963320584362656, + "grad_norm": 3.808016777038574, + "learning_rate": 7.048391428326585e-08, + "loss": 1.0688, + "step": 15364 + }, + { + "epoch": 0.9633832842184463, + "grad_norm": 3.428903579711914, + "learning_rate": 7.024343186039507e-08, + "loss": 1.01, + "step": 15365 + }, + { + "epoch": 0.9634459840742366, + "grad_norm": 3.53584361076355, + "learning_rate": 7.000335894111865e-08, + "loss": 0.9751, + "step": 15366 + }, + { + "epoch": 0.963508683930027, + "grad_norm": 3.2933003902435303, + "learning_rate": 6.976369553533758e-08, + "loss": 1.0531, + "step": 15367 + }, + { + "epoch": 0.9635713837858173, + "grad_norm": 3.2165729999542236, + "learning_rate": 6.952444165293504e-08, + "loss": 1.3205, + "step": 15368 + }, + { + "epoch": 0.9636340836416076, + "grad_norm": 3.415886640548706, + "learning_rate": 6.92855973037787e-08, + "loss": 1.0819, + "step": 15369 + }, + { + "epoch": 0.963696783497398, + "grad_norm": 3.6454741954803467, + "learning_rate": 6.904716249771848e-08, + "loss": 1.1193, + "step": 15370 + }, + { + "epoch": 0.9637594833531883, + "grad_norm": 3.3559603691101074, + "learning_rate": 6.880913724458538e-08, + "loss": 1.0606, + "step": 15371 + }, + { + "epoch": 0.9638221832089786, + "grad_norm": 3.1634585857391357, + "learning_rate": 6.857152155419822e-08, + "loss": 1.0301, + "step": 15372 + }, + { + "epoch": 0.9638848830647689, + "grad_norm": 3.498323917388916, + "learning_rate": 6.833431543635471e-08, + "loss": 1.1355, + "step": 15373 + }, + { + "epoch": 0.9639475829205593, + "grad_norm": 3.1681020259857178, + "learning_rate": 6.809751890083926e-08, + "loss": 1.0648, + "step": 15374 + }, + { + "epoch": 0.9640102827763496, + "grad_norm": 3.7474417686462402, + "learning_rate": 6.786113195741406e-08, + "loss": 1.205, + "step": 15375 + }, + { + "epoch": 0.9640729826321399, + "grad_norm": 3.6695070266723633, + "learning_rate": 6.76251546158302e-08, + "loss": 1.1098, + "step": 15376 + }, + { + "epoch": 0.9641356824879302, + "grad_norm": 3.4282565116882324, + "learning_rate": 6.738958688581876e-08, + "loss": 1.0347, + "step": 15377 + }, + { + "epoch": 0.9641983823437206, + "grad_norm": 3.828029155731201, + "learning_rate": 6.715442877709422e-08, + "loss": 0.9183, + "step": 15378 + }, + { + "epoch": 0.9642610821995109, + "grad_norm": 3.380239963531494, + "learning_rate": 6.691968029935436e-08, + "loss": 1.1326, + "step": 15379 + }, + { + "epoch": 0.9643237820553012, + "grad_norm": 3.3327341079711914, + "learning_rate": 6.668534146228145e-08, + "loss": 1.2123, + "step": 15380 + }, + { + "epoch": 0.9643864819110916, + "grad_norm": 3.5654420852661133, + "learning_rate": 6.645141227553887e-08, + "loss": 1.1463, + "step": 15381 + }, + { + "epoch": 0.964449181766882, + "grad_norm": 3.184116840362549, + "learning_rate": 6.621789274877332e-08, + "loss": 0.9334, + "step": 15382 + }, + { + "epoch": 0.9645118816226723, + "grad_norm": 3.2978439331054688, + "learning_rate": 6.598478289161714e-08, + "loss": 1.0912, + "step": 15383 + }, + { + "epoch": 0.9645745814784626, + "grad_norm": 3.3064026832580566, + "learning_rate": 6.575208271368039e-08, + "loss": 1.1358, + "step": 15384 + }, + { + "epoch": 0.964637281334253, + "grad_norm": 3.503466844558716, + "learning_rate": 6.551979222456317e-08, + "loss": 1.0912, + "step": 15385 + }, + { + "epoch": 0.9646999811900433, + "grad_norm": 3.898331880569458, + "learning_rate": 6.528791143384227e-08, + "loss": 0.9796, + "step": 15386 + }, + { + "epoch": 0.9647626810458336, + "grad_norm": 3.599151372909546, + "learning_rate": 6.505644035108227e-08, + "loss": 1.0686, + "step": 15387 + }, + { + "epoch": 0.964825380901624, + "grad_norm": 3.2323009967803955, + "learning_rate": 6.482537898582886e-08, + "loss": 1.1275, + "step": 15388 + }, + { + "epoch": 0.9648880807574143, + "grad_norm": 3.4669435024261475, + "learning_rate": 6.459472734760997e-08, + "loss": 1.1228, + "step": 15389 + }, + { + "epoch": 0.9649507806132046, + "grad_norm": 3.3038721084594727, + "learning_rate": 6.43644854459391e-08, + "loss": 1.1399, + "step": 15390 + }, + { + "epoch": 0.9650134804689949, + "grad_norm": 3.661517381668091, + "learning_rate": 6.413465329031088e-08, + "loss": 1.0227, + "step": 15391 + }, + { + "epoch": 0.9650761803247853, + "grad_norm": 3.3960683345794678, + "learning_rate": 6.390523089020328e-08, + "loss": 0.8548, + "step": 15392 + }, + { + "epoch": 0.9651388801805756, + "grad_norm": 3.3890345096588135, + "learning_rate": 6.367621825507652e-08, + "loss": 1.0826, + "step": 15393 + }, + { + "epoch": 0.9652015800363659, + "grad_norm": 3.372366189956665, + "learning_rate": 6.344761539437749e-08, + "loss": 1.0435, + "step": 15394 + }, + { + "epoch": 0.9652642798921562, + "grad_norm": 3.479091167449951, + "learning_rate": 6.321942231753309e-08, + "loss": 0.9788, + "step": 15395 + }, + { + "epoch": 0.9653269797479466, + "grad_norm": 3.846597671508789, + "learning_rate": 6.299163903395355e-08, + "loss": 0.9808, + "step": 15396 + }, + { + "epoch": 0.9653896796037369, + "grad_norm": 3.852358102798462, + "learning_rate": 6.27642655530325e-08, + "loss": 1.1834, + "step": 15397 + }, + { + "epoch": 0.9654523794595272, + "grad_norm": 3.436089277267456, + "learning_rate": 6.253730188414687e-08, + "loss": 1.1208, + "step": 15398 + }, + { + "epoch": 0.9655150793153175, + "grad_norm": 3.411395788192749, + "learning_rate": 6.231074803665804e-08, + "loss": 1.2051, + "step": 15399 + }, + { + "epoch": 0.9655777791711079, + "grad_norm": 3.6639058589935303, + "learning_rate": 6.208460401990634e-08, + "loss": 1.0673, + "step": 15400 + }, + { + "epoch": 0.9656404790268982, + "grad_norm": 3.528465747833252, + "learning_rate": 6.185886984322098e-08, + "loss": 1.1865, + "step": 15401 + }, + { + "epoch": 0.9657031788826885, + "grad_norm": 3.6881930828094482, + "learning_rate": 6.163354551590895e-08, + "loss": 1.09, + "step": 15402 + }, + { + "epoch": 0.9657658787384789, + "grad_norm": 3.434936761856079, + "learning_rate": 6.140863104726391e-08, + "loss": 1.1724, + "step": 15403 + }, + { + "epoch": 0.9658285785942692, + "grad_norm": 3.4999442100524902, + "learning_rate": 6.118412644655958e-08, + "loss": 1.075, + "step": 15404 + }, + { + "epoch": 0.9658912784500595, + "grad_norm": 3.367572069168091, + "learning_rate": 6.096003172305742e-08, + "loss": 1.1886, + "step": 15405 + }, + { + "epoch": 0.96595397830585, + "grad_norm": 3.8769023418426514, + "learning_rate": 6.073634688599783e-08, + "loss": 1.0509, + "step": 15406 + }, + { + "epoch": 0.9660166781616403, + "grad_norm": 3.5949718952178955, + "learning_rate": 6.051307194460454e-08, + "loss": 1.1615, + "step": 15407 + }, + { + "epoch": 0.9660793780174306, + "grad_norm": 3.3339927196502686, + "learning_rate": 6.029020690808573e-08, + "loss": 1.0925, + "step": 15408 + }, + { + "epoch": 0.9661420778732209, + "grad_norm": 3.5709142684936523, + "learning_rate": 6.006775178563406e-08, + "loss": 1.033, + "step": 15409 + }, + { + "epoch": 0.9662047777290113, + "grad_norm": 3.272660970687866, + "learning_rate": 5.984570658642219e-08, + "loss": 1.0942, + "step": 15410 + }, + { + "epoch": 0.9662674775848016, + "grad_norm": 3.3709404468536377, + "learning_rate": 5.962407131960723e-08, + "loss": 1.0895, + "step": 15411 + }, + { + "epoch": 0.9663301774405919, + "grad_norm": 3.1714625358581543, + "learning_rate": 5.940284599432966e-08, + "loss": 1.1484, + "step": 15412 + }, + { + "epoch": 0.9663928772963822, + "grad_norm": 3.17622709274292, + "learning_rate": 5.918203061971217e-08, + "loss": 1.0703, + "step": 15413 + }, + { + "epoch": 0.9664555771521726, + "grad_norm": 3.4609086513519287, + "learning_rate": 5.8961625204863036e-08, + "loss": 1.1974, + "step": 15414 + }, + { + "epoch": 0.9665182770079629, + "grad_norm": 3.2552316188812256, + "learning_rate": 5.874162975886943e-08, + "loss": 1.0261, + "step": 15415 + }, + { + "epoch": 0.9665809768637532, + "grad_norm": 3.495182752609253, + "learning_rate": 5.852204429080632e-08, + "loss": 1.132, + "step": 15416 + }, + { + "epoch": 0.9666436767195435, + "grad_norm": 3.241935968399048, + "learning_rate": 5.830286880972758e-08, + "loss": 1.045, + "step": 15417 + }, + { + "epoch": 0.9667063765753339, + "grad_norm": 3.0909223556518555, + "learning_rate": 5.808410332467263e-08, + "loss": 1.1032, + "step": 15418 + }, + { + "epoch": 0.9667690764311242, + "grad_norm": 3.388355255126953, + "learning_rate": 5.7865747844663146e-08, + "loss": 1.2235, + "step": 15419 + }, + { + "epoch": 0.9668317762869145, + "grad_norm": 3.5431437492370605, + "learning_rate": 5.7647802378705265e-08, + "loss": 1.1172, + "step": 15420 + }, + { + "epoch": 0.9668944761427049, + "grad_norm": 3.27899169921875, + "learning_rate": 5.743026693578624e-08, + "loss": 1.1037, + "step": 15421 + }, + { + "epoch": 0.9669571759984952, + "grad_norm": 3.6909048557281494, + "learning_rate": 5.721314152487556e-08, + "loss": 0.977, + "step": 15422 + }, + { + "epoch": 0.9670198758542855, + "grad_norm": 3.2541604042053223, + "learning_rate": 5.6996426154930505e-08, + "loss": 0.8781, + "step": 15423 + }, + { + "epoch": 0.9670825757100758, + "grad_norm": 3.3958165645599365, + "learning_rate": 5.6780120834887264e-08, + "loss": 1.1649, + "step": 15424 + }, + { + "epoch": 0.9671452755658662, + "grad_norm": 3.5780129432678223, + "learning_rate": 5.6564225573665364e-08, + "loss": 1.1065, + "step": 15425 + }, + { + "epoch": 0.9672079754216565, + "grad_norm": 3.581937313079834, + "learning_rate": 5.6348740380168795e-08, + "loss": 1.0416, + "step": 15426 + }, + { + "epoch": 0.9672706752774468, + "grad_norm": 3.4933838844299316, + "learning_rate": 5.613366526328601e-08, + "loss": 1.0467, + "step": 15427 + }, + { + "epoch": 0.9673333751332371, + "grad_norm": 3.1295483112335205, + "learning_rate": 5.5919000231883236e-08, + "loss": 1.0525, + "step": 15428 + }, + { + "epoch": 0.9673960749890276, + "grad_norm": 3.2218778133392334, + "learning_rate": 5.5704745294815624e-08, + "loss": 1.1211, + "step": 15429 + }, + { + "epoch": 0.9674587748448179, + "grad_norm": 3.1597812175750732, + "learning_rate": 5.549090046091943e-08, + "loss": 0.9885, + "step": 15430 + }, + { + "epoch": 0.9675214747006082, + "grad_norm": 3.9163169860839844, + "learning_rate": 5.5277465739012045e-08, + "loss": 0.963, + "step": 15431 + }, + { + "epoch": 0.9675841745563986, + "grad_norm": 3.2603278160095215, + "learning_rate": 5.506444113789644e-08, + "loss": 1.1872, + "step": 15432 + }, + { + "epoch": 0.9676468744121889, + "grad_norm": 3.3917076587677, + "learning_rate": 5.48518266663578e-08, + "loss": 1.2135, + "step": 15433 + }, + { + "epoch": 0.9677095742679792, + "grad_norm": 3.655897378921509, + "learning_rate": 5.463962233316356e-08, + "loss": 1.1302, + "step": 15434 + }, + { + "epoch": 0.9677722741237695, + "grad_norm": 3.6586077213287354, + "learning_rate": 5.4427828147066705e-08, + "loss": 1.1641, + "step": 15435 + }, + { + "epoch": 0.9678349739795599, + "grad_norm": 3.299105405807495, + "learning_rate": 5.421644411679916e-08, + "loss": 1.1048, + "step": 15436 + }, + { + "epoch": 0.9678976738353502, + "grad_norm": 3.521488904953003, + "learning_rate": 5.400547025108061e-08, + "loss": 1.059, + "step": 15437 + }, + { + "epoch": 0.9679603736911405, + "grad_norm": 3.0354433059692383, + "learning_rate": 5.379490655861075e-08, + "loss": 1.0475, + "step": 15438 + }, + { + "epoch": 0.9680230735469308, + "grad_norm": 3.7453243732452393, + "learning_rate": 5.3584753048073756e-08, + "loss": 1.0003, + "step": 15439 + }, + { + "epoch": 0.9680857734027212, + "grad_norm": 3.703329086303711, + "learning_rate": 5.337500972813492e-08, + "loss": 1.0943, + "step": 15440 + }, + { + "epoch": 0.9681484732585115, + "grad_norm": 3.32411527633667, + "learning_rate": 5.3165676607445096e-08, + "loss": 1.177, + "step": 15441 + }, + { + "epoch": 0.9682111731143018, + "grad_norm": 3.4271349906921387, + "learning_rate": 5.295675369463737e-08, + "loss": 1.137, + "step": 15442 + }, + { + "epoch": 0.9682738729700922, + "grad_norm": 3.637296199798584, + "learning_rate": 5.2748240998328206e-08, + "loss": 1.204, + "step": 15443 + }, + { + "epoch": 0.9683365728258825, + "grad_norm": 3.3303451538085938, + "learning_rate": 5.2540138527115145e-08, + "loss": 1.0746, + "step": 15444 + }, + { + "epoch": 0.9683992726816728, + "grad_norm": 3.2268238067626953, + "learning_rate": 5.233244628958134e-08, + "loss": 0.9032, + "step": 15445 + }, + { + "epoch": 0.9684619725374631, + "grad_norm": 3.4964349269866943, + "learning_rate": 5.212516429429215e-08, + "loss": 1.1786, + "step": 15446 + }, + { + "epoch": 0.9685246723932535, + "grad_norm": 3.5533859729766846, + "learning_rate": 5.191829254979519e-08, + "loss": 1.1282, + "step": 15447 + }, + { + "epoch": 0.9685873722490438, + "grad_norm": 3.3421573638916016, + "learning_rate": 5.171183106462252e-08, + "loss": 1.1234, + "step": 15448 + }, + { + "epoch": 0.9686500721048341, + "grad_norm": 3.6395111083984375, + "learning_rate": 5.150577984728844e-08, + "loss": 1.1483, + "step": 15449 + }, + { + "epoch": 0.9687127719606244, + "grad_norm": 3.1322481632232666, + "learning_rate": 5.13001389062906e-08, + "loss": 1.1666, + "step": 15450 + }, + { + "epoch": 0.9687754718164148, + "grad_norm": 3.343162775039673, + "learning_rate": 5.109490825010888e-08, + "loss": 1.079, + "step": 15451 + }, + { + "epoch": 0.9688381716722052, + "grad_norm": 3.204803466796875, + "learning_rate": 5.0890087887208725e-08, + "loss": 1.0993, + "step": 15452 + }, + { + "epoch": 0.9689008715279955, + "grad_norm": 3.3205745220184326, + "learning_rate": 5.0685677826034505e-08, + "loss": 1.1685, + "step": 15453 + }, + { + "epoch": 0.9689635713837859, + "grad_norm": 3.4360134601593018, + "learning_rate": 5.048167807501836e-08, + "loss": 1.2417, + "step": 15454 + }, + { + "epoch": 0.9690262712395762, + "grad_norm": 3.524721145629883, + "learning_rate": 5.0278088642572445e-08, + "loss": 1.0753, + "step": 15455 + }, + { + "epoch": 0.9690889710953665, + "grad_norm": 3.4809083938598633, + "learning_rate": 5.007490953709227e-08, + "loss": 1.0131, + "step": 15456 + }, + { + "epoch": 0.9691516709511568, + "grad_norm": 3.0057928562164307, + "learning_rate": 4.987214076695668e-08, + "loss": 1.0857, + "step": 15457 + }, + { + "epoch": 0.9692143708069472, + "grad_norm": 3.3795032501220703, + "learning_rate": 4.9669782340530105e-08, + "loss": 1.1132, + "step": 15458 + }, + { + "epoch": 0.9692770706627375, + "grad_norm": 3.6505966186523438, + "learning_rate": 4.9467834266154756e-08, + "loss": 1.1532, + "step": 15459 + }, + { + "epoch": 0.9693397705185278, + "grad_norm": 3.4412715435028076, + "learning_rate": 4.926629655216064e-08, + "loss": 1.1036, + "step": 15460 + }, + { + "epoch": 0.9694024703743181, + "grad_norm": 3.4617886543273926, + "learning_rate": 4.906516920686e-08, + "loss": 0.9282, + "step": 15461 + }, + { + "epoch": 0.9694651702301085, + "grad_norm": 3.501183271408081, + "learning_rate": 4.8864452238545077e-08, + "loss": 1.0265, + "step": 15462 + }, + { + "epoch": 0.9695278700858988, + "grad_norm": 3.6748850345611572, + "learning_rate": 4.8664145655494824e-08, + "loss": 1.1142, + "step": 15463 + }, + { + "epoch": 0.9695905699416891, + "grad_norm": 3.2080299854278564, + "learning_rate": 4.846424946597039e-08, + "loss": 1.1171, + "step": 15464 + }, + { + "epoch": 0.9696532697974795, + "grad_norm": 3.435286045074463, + "learning_rate": 4.82647636782152e-08, + "loss": 1.0951, + "step": 15465 + }, + { + "epoch": 0.9697159696532698, + "grad_norm": 3.3698816299438477, + "learning_rate": 4.806568830045488e-08, + "loss": 1.1014, + "step": 15466 + }, + { + "epoch": 0.9697786695090601, + "grad_norm": 3.123234748840332, + "learning_rate": 4.786702334090065e-08, + "loss": 1.1751, + "step": 15467 + }, + { + "epoch": 0.9698413693648504, + "grad_norm": 3.65004825592041, + "learning_rate": 4.766876880774596e-08, + "loss": 1.1379, + "step": 15468 + }, + { + "epoch": 0.9699040692206408, + "grad_norm": 3.2290396690368652, + "learning_rate": 4.747092470916426e-08, + "loss": 1.1105, + "step": 15469 + }, + { + "epoch": 0.9699667690764311, + "grad_norm": 3.4164764881134033, + "learning_rate": 4.7273491053316803e-08, + "loss": 1.0247, + "step": 15470 + }, + { + "epoch": 0.9700294689322214, + "grad_norm": 3.620370388031006, + "learning_rate": 4.707646784834485e-08, + "loss": 1.0479, + "step": 15471 + }, + { + "epoch": 0.9700921687880117, + "grad_norm": 3.307697057723999, + "learning_rate": 4.687985510237525e-08, + "loss": 0.9828, + "step": 15472 + }, + { + "epoch": 0.9701548686438021, + "grad_norm": 3.4488131999969482, + "learning_rate": 4.6683652823513725e-08, + "loss": 1.1008, + "step": 15473 + }, + { + "epoch": 0.9702175684995924, + "grad_norm": 3.51314115524292, + "learning_rate": 4.648786101985381e-08, + "loss": 1.1891, + "step": 15474 + }, + { + "epoch": 0.9702802683553828, + "grad_norm": 3.766650915145874, + "learning_rate": 4.629247969946904e-08, + "loss": 1.1002, + "step": 15475 + }, + { + "epoch": 0.9703429682111732, + "grad_norm": 3.3936150074005127, + "learning_rate": 4.6097508870416306e-08, + "loss": 1.0446, + "step": 15476 + }, + { + "epoch": 0.9704056680669635, + "grad_norm": 3.4888811111450195, + "learning_rate": 4.5902948540738065e-08, + "loss": 1.1513, + "step": 15477 + }, + { + "epoch": 0.9704683679227538, + "grad_norm": 3.64877986907959, + "learning_rate": 4.570879871845679e-08, + "loss": 0.9388, + "step": 15478 + }, + { + "epoch": 0.9705310677785441, + "grad_norm": 3.6927707195281982, + "learning_rate": 4.55150594115783e-08, + "loss": 1.1377, + "step": 15479 + }, + { + "epoch": 0.9705937676343345, + "grad_norm": 3.569957971572876, + "learning_rate": 4.5321730628093976e-08, + "loss": 1.1023, + "step": 15480 + }, + { + "epoch": 0.9706564674901248, + "grad_norm": 3.888674259185791, + "learning_rate": 4.512881237597744e-08, + "loss": 1.0314, + "step": 15481 + }, + { + "epoch": 0.9707191673459151, + "grad_norm": 3.3586859703063965, + "learning_rate": 4.4936304663182326e-08, + "loss": 0.9458, + "step": 15482 + }, + { + "epoch": 0.9707818672017055, + "grad_norm": 3.6830804347991943, + "learning_rate": 4.474420749764896e-08, + "loss": 1.0296, + "step": 15483 + }, + { + "epoch": 0.9708445670574958, + "grad_norm": 3.502497911453247, + "learning_rate": 4.455252088729878e-08, + "loss": 0.9545, + "step": 15484 + }, + { + "epoch": 0.9709072669132861, + "grad_norm": 3.5157642364501953, + "learning_rate": 4.4361244840037674e-08, + "loss": 1.3322, + "step": 15485 + }, + { + "epoch": 0.9709699667690764, + "grad_norm": 3.326275587081909, + "learning_rate": 4.417037936375379e-08, + "loss": 1.1695, + "step": 15486 + }, + { + "epoch": 0.9710326666248668, + "grad_norm": 3.641401767730713, + "learning_rate": 4.39799244663186e-08, + "loss": 0.997, + "step": 15487 + }, + { + "epoch": 0.9710953664806571, + "grad_norm": 3.4352493286132812, + "learning_rate": 4.378988015558472e-08, + "loss": 1.0008, + "step": 15488 + }, + { + "epoch": 0.9711580663364474, + "grad_norm": 3.5592422485351562, + "learning_rate": 4.360024643939254e-08, + "loss": 1.1041, + "step": 15489 + }, + { + "epoch": 0.9712207661922377, + "grad_norm": 3.38523530960083, + "learning_rate": 4.3411023325560245e-08, + "loss": 0.9973, + "step": 15490 + }, + { + "epoch": 0.9712834660480281, + "grad_norm": 3.8618545532226562, + "learning_rate": 4.3222210821891596e-08, + "loss": 1.2419, + "step": 15491 + }, + { + "epoch": 0.9713461659038184, + "grad_norm": 3.4424657821655273, + "learning_rate": 4.303380893617481e-08, + "loss": 1.0086, + "step": 15492 + }, + { + "epoch": 0.9714088657596087, + "grad_norm": 3.6096415519714355, + "learning_rate": 4.2845817676178127e-08, + "loss": 1.0497, + "step": 15493 + }, + { + "epoch": 0.971471565615399, + "grad_norm": 3.7404990196228027, + "learning_rate": 4.2658237049655325e-08, + "loss": 1.0042, + "step": 15494 + }, + { + "epoch": 0.9715342654711894, + "grad_norm": 3.3968870639801025, + "learning_rate": 4.2471067064340234e-08, + "loss": 1.0437, + "step": 15495 + }, + { + "epoch": 0.9715969653269797, + "grad_norm": 3.9778709411621094, + "learning_rate": 4.228430772795444e-08, + "loss": 1.0091, + "step": 15496 + }, + { + "epoch": 0.97165966518277, + "grad_norm": 3.706547737121582, + "learning_rate": 4.2097959048198465e-08, + "loss": 0.9803, + "step": 15497 + }, + { + "epoch": 0.9717223650385605, + "grad_norm": 3.7095119953155518, + "learning_rate": 4.191202103275727e-08, + "loss": 1.1725, + "step": 15498 + }, + { + "epoch": 0.9717850648943508, + "grad_norm": 3.2553458213806152, + "learning_rate": 4.1726493689299154e-08, + "loss": 1.1387, + "step": 15499 + }, + { + "epoch": 0.9718477647501411, + "grad_norm": 3.1986353397369385, + "learning_rate": 4.154137702547578e-08, + "loss": 0.9879, + "step": 15500 + }, + { + "epoch": 0.9719104646059314, + "grad_norm": 3.6882176399230957, + "learning_rate": 4.135667104892105e-08, + "loss": 1.1186, + "step": 15501 + }, + { + "epoch": 0.9719731644617218, + "grad_norm": 3.2734508514404297, + "learning_rate": 4.117237576725108e-08, + "loss": 1.0223, + "step": 15502 + }, + { + "epoch": 0.9720358643175121, + "grad_norm": 3.4164302349090576, + "learning_rate": 4.098849118806758e-08, + "loss": 1.0895, + "step": 15503 + }, + { + "epoch": 0.9720985641733024, + "grad_norm": 3.7575161457061768, + "learning_rate": 4.0805017318954474e-08, + "loss": 1.1366, + "step": 15504 + }, + { + "epoch": 0.9721612640290928, + "grad_norm": 3.1306867599487305, + "learning_rate": 4.0621954167476827e-08, + "loss": 1.0378, + "step": 15505 + }, + { + "epoch": 0.9722239638848831, + "grad_norm": 2.990274667739868, + "learning_rate": 4.043930174118416e-08, + "loss": 1.2271, + "step": 15506 + }, + { + "epoch": 0.9722866637406734, + "grad_norm": 3.1733431816101074, + "learning_rate": 4.025706004760932e-08, + "loss": 1.2821, + "step": 15507 + }, + { + "epoch": 0.9723493635964637, + "grad_norm": 3.1614692211151123, + "learning_rate": 4.007522909426964e-08, + "loss": 1.1377, + "step": 15508 + }, + { + "epoch": 0.9724120634522541, + "grad_norm": 3.5770158767700195, + "learning_rate": 3.989380888866246e-08, + "loss": 1.1826, + "step": 15509 + }, + { + "epoch": 0.9724747633080444, + "grad_norm": 3.224686861038208, + "learning_rate": 3.9712799438268444e-08, + "loss": 1.0008, + "step": 15510 + }, + { + "epoch": 0.9725374631638347, + "grad_norm": 3.068866014480591, + "learning_rate": 3.953220075055386e-08, + "loss": 1.1828, + "step": 15511 + }, + { + "epoch": 0.972600163019625, + "grad_norm": 3.5680229663848877, + "learning_rate": 3.935201283296608e-08, + "loss": 1.0429, + "step": 15512 + }, + { + "epoch": 0.9726628628754154, + "grad_norm": 3.296643018722534, + "learning_rate": 3.9172235692936935e-08, + "loss": 1.1279, + "step": 15513 + }, + { + "epoch": 0.9727255627312057, + "grad_norm": 3.7489609718322754, + "learning_rate": 3.899286933787938e-08, + "loss": 1.1547, + "step": 15514 + }, + { + "epoch": 0.972788262586996, + "grad_norm": 3.0577242374420166, + "learning_rate": 3.881391377519084e-08, + "loss": 1.227, + "step": 15515 + }, + { + "epoch": 0.9728509624427863, + "grad_norm": 3.1388115882873535, + "learning_rate": 3.8635369012252064e-08, + "loss": 1.1495, + "step": 15516 + }, + { + "epoch": 0.9729136622985767, + "grad_norm": 3.2317254543304443, + "learning_rate": 3.845723505642496e-08, + "loss": 1.0016, + "step": 15517 + }, + { + "epoch": 0.972976362154367, + "grad_norm": 3.4463651180267334, + "learning_rate": 3.827951191505697e-08, + "loss": 0.9502, + "step": 15518 + }, + { + "epoch": 0.9730390620101573, + "grad_norm": 3.387831211090088, + "learning_rate": 3.8102199595476675e-08, + "loss": 1.0678, + "step": 15519 + }, + { + "epoch": 0.9731017618659477, + "grad_norm": 3.3974528312683105, + "learning_rate": 3.792529810499712e-08, + "loss": 1.1392, + "step": 15520 + }, + { + "epoch": 0.9731644617217381, + "grad_norm": 3.378783702850342, + "learning_rate": 3.7748807450912475e-08, + "loss": 1.0958, + "step": 15521 + }, + { + "epoch": 0.9732271615775284, + "grad_norm": 2.8373024463653564, + "learning_rate": 3.757272764050357e-08, + "loss": 0.968, + "step": 15522 + }, + { + "epoch": 0.9732898614333187, + "grad_norm": 3.8651344776153564, + "learning_rate": 3.7397058681029054e-08, + "loss": 1.0803, + "step": 15523 + }, + { + "epoch": 0.9733525612891091, + "grad_norm": 3.4444923400878906, + "learning_rate": 3.7221800579735346e-08, + "loss": 0.9706, + "step": 15524 + }, + { + "epoch": 0.9734152611448994, + "grad_norm": 3.5384364128112793, + "learning_rate": 3.704695334385e-08, + "loss": 1.1448, + "step": 15525 + }, + { + "epoch": 0.9734779610006897, + "grad_norm": 3.597165107727051, + "learning_rate": 3.6872516980582806e-08, + "loss": 1.0238, + "step": 15526 + }, + { + "epoch": 0.9735406608564801, + "grad_norm": 3.559866189956665, + "learning_rate": 3.669849149712801e-08, + "loss": 1.1836, + "step": 15527 + }, + { + "epoch": 0.9736033607122704, + "grad_norm": 3.2227694988250732, + "learning_rate": 3.65248769006632e-08, + "loss": 1.0412, + "step": 15528 + }, + { + "epoch": 0.9736660605680607, + "grad_norm": 3.345844030380249, + "learning_rate": 3.635167319834709e-08, + "loss": 0.9912, + "step": 15529 + }, + { + "epoch": 0.973728760423851, + "grad_norm": 3.4300408363342285, + "learning_rate": 3.6178880397323976e-08, + "loss": 1.0781, + "step": 15530 + }, + { + "epoch": 0.9737914602796414, + "grad_norm": 3.3334243297576904, + "learning_rate": 3.600649850471705e-08, + "loss": 1.2334, + "step": 15531 + }, + { + "epoch": 0.9738541601354317, + "grad_norm": 3.082752227783203, + "learning_rate": 3.5834527527638384e-08, + "loss": 1.1447, + "step": 15532 + }, + { + "epoch": 0.973916859991222, + "grad_norm": 3.5605592727661133, + "learning_rate": 3.56629674731801e-08, + "loss": 0.8522, + "step": 15533 + }, + { + "epoch": 0.9739795598470123, + "grad_norm": 3.3614542484283447, + "learning_rate": 3.549181834841431e-08, + "loss": 0.9554, + "step": 15534 + }, + { + "epoch": 0.9740422597028027, + "grad_norm": 3.8744378089904785, + "learning_rate": 3.5321080160402035e-08, + "loss": 0.887, + "step": 15535 + }, + { + "epoch": 0.974104959558593, + "grad_norm": 3.0967934131622314, + "learning_rate": 3.51507529161832e-08, + "loss": 1.0185, + "step": 15536 + }, + { + "epoch": 0.9741676594143833, + "grad_norm": 3.6444969177246094, + "learning_rate": 3.4980836622783285e-08, + "loss": 0.9686, + "step": 15537 + }, + { + "epoch": 0.9742303592701737, + "grad_norm": 3.4602622985839844, + "learning_rate": 3.481133128720782e-08, + "loss": 0.9709, + "step": 15538 + }, + { + "epoch": 0.974293059125964, + "grad_norm": 3.2861788272857666, + "learning_rate": 3.4642236916447856e-08, + "loss": 1.1251, + "step": 15539 + }, + { + "epoch": 0.9743557589817543, + "grad_norm": 3.7498865127563477, + "learning_rate": 3.447355351747894e-08, + "loss": 0.9826, + "step": 15540 + }, + { + "epoch": 0.9744184588375446, + "grad_norm": 3.3992035388946533, + "learning_rate": 3.430528109725439e-08, + "loss": 0.9997, + "step": 15541 + }, + { + "epoch": 0.974481158693335, + "grad_norm": 3.5911922454833984, + "learning_rate": 3.413741966271533e-08, + "loss": 0.9806, + "step": 15542 + }, + { + "epoch": 0.9745438585491253, + "grad_norm": 3.1207892894744873, + "learning_rate": 3.3969969220785106e-08, + "loss": 1.1528, + "step": 15543 + }, + { + "epoch": 0.9746065584049157, + "grad_norm": 3.47607684135437, + "learning_rate": 3.380292977836819e-08, + "loss": 1.2195, + "step": 15544 + }, + { + "epoch": 0.974669258260706, + "grad_norm": 3.3527212142944336, + "learning_rate": 3.363630134235463e-08, + "loss": 0.9087, + "step": 15545 + }, + { + "epoch": 0.9747319581164964, + "grad_norm": 3.7199501991271973, + "learning_rate": 3.347008391961337e-08, + "loss": 1.0036, + "step": 15546 + }, + { + "epoch": 0.9747946579722867, + "grad_norm": 3.8357343673706055, + "learning_rate": 3.330427751700227e-08, + "loss": 1.1325, + "step": 15547 + }, + { + "epoch": 0.974857357828077, + "grad_norm": 3.436469554901123, + "learning_rate": 3.313888214135808e-08, + "loss": 1.0789, + "step": 15548 + }, + { + "epoch": 0.9749200576838674, + "grad_norm": 3.2122995853424072, + "learning_rate": 3.2973897799500884e-08, + "loss": 1.1003, + "step": 15549 + }, + { + "epoch": 0.9749827575396577, + "grad_norm": 3.225123405456543, + "learning_rate": 3.2809324498235265e-08, + "loss": 1.1455, + "step": 15550 + }, + { + "epoch": 0.975045457395448, + "grad_norm": 3.472608804702759, + "learning_rate": 3.2645162244349104e-08, + "loss": 1.1962, + "step": 15551 + }, + { + "epoch": 0.9751081572512383, + "grad_norm": 3.1868293285369873, + "learning_rate": 3.248141104461256e-08, + "loss": 1.1516, + "step": 15552 + }, + { + "epoch": 0.9751708571070287, + "grad_norm": 3.743821382522583, + "learning_rate": 3.231807090577577e-08, + "loss": 1.1114, + "step": 15553 + }, + { + "epoch": 0.975233556962819, + "grad_norm": 3.096194267272949, + "learning_rate": 3.2155141834578904e-08, + "loss": 1.1352, + "step": 15554 + }, + { + "epoch": 0.9752962568186093, + "grad_norm": 3.420814275741577, + "learning_rate": 3.199262383773882e-08, + "loss": 1.1763, + "step": 15555 + }, + { + "epoch": 0.9753589566743996, + "grad_norm": 3.3700828552246094, + "learning_rate": 3.1830516921957935e-08, + "loss": 1.0104, + "step": 15556 + }, + { + "epoch": 0.97542165653019, + "grad_norm": 3.469954252243042, + "learning_rate": 3.166882109392089e-08, + "loss": 0.9991, + "step": 15557 + }, + { + "epoch": 0.9754843563859803, + "grad_norm": 3.3651983737945557, + "learning_rate": 3.150753636029902e-08, + "loss": 1.1126, + "step": 15558 + }, + { + "epoch": 0.9755470562417706, + "grad_norm": 3.3716742992401123, + "learning_rate": 3.134666272774034e-08, + "loss": 1.0545, + "step": 15559 + }, + { + "epoch": 0.975609756097561, + "grad_norm": 3.569272041320801, + "learning_rate": 3.118620020288066e-08, + "loss": 1.0288, + "step": 15560 + }, + { + "epoch": 0.9756724559533513, + "grad_norm": 3.682577133178711, + "learning_rate": 3.102614879233801e-08, + "loss": 1.0329, + "step": 15561 + }, + { + "epoch": 0.9757351558091416, + "grad_norm": 3.6167187690734863, + "learning_rate": 3.0866508502711556e-08, + "loss": 1.1173, + "step": 15562 + }, + { + "epoch": 0.9757978556649319, + "grad_norm": 3.2090377807617188, + "learning_rate": 3.0707279340586037e-08, + "loss": 1.0395, + "step": 15563 + }, + { + "epoch": 0.9758605555207223, + "grad_norm": 4.008094310760498, + "learning_rate": 3.054846131252731e-08, + "loss": 0.8331, + "step": 15564 + }, + { + "epoch": 0.9759232553765126, + "grad_norm": 3.9300289154052734, + "learning_rate": 3.03900544250868e-08, + "loss": 1.1716, + "step": 15565 + }, + { + "epoch": 0.9759859552323029, + "grad_norm": 3.249243974685669, + "learning_rate": 3.023205868479484e-08, + "loss": 1.0088, + "step": 15566 + }, + { + "epoch": 0.9760486550880932, + "grad_norm": 3.243601083755493, + "learning_rate": 3.007447409816844e-08, + "loss": 1.3435, + "step": 15567 + }, + { + "epoch": 0.9761113549438837, + "grad_norm": 3.6180613040924072, + "learning_rate": 2.991730067170462e-08, + "loss": 1.0061, + "step": 15568 + }, + { + "epoch": 0.976174054799674, + "grad_norm": 3.3154404163360596, + "learning_rate": 2.9760538411888196e-08, + "loss": 1.0477, + "step": 15569 + }, + { + "epoch": 0.9762367546554643, + "grad_norm": 3.223971366882324, + "learning_rate": 2.960418732518289e-08, + "loss": 1.2312, + "step": 15570 + }, + { + "epoch": 0.9762994545112547, + "grad_norm": 3.6402251720428467, + "learning_rate": 2.944824741803576e-08, + "loss": 1.0467, + "step": 15571 + }, + { + "epoch": 0.976362154367045, + "grad_norm": 3.502255916595459, + "learning_rate": 2.929271869687833e-08, + "loss": 0.9851, + "step": 15572 + }, + { + "epoch": 0.9764248542228353, + "grad_norm": 3.6805896759033203, + "learning_rate": 2.9137601168124365e-08, + "loss": 1.1654, + "step": 15573 + }, + { + "epoch": 0.9764875540786256, + "grad_norm": 3.3529114723205566, + "learning_rate": 2.898289483817096e-08, + "loss": 1.0587, + "step": 15574 + }, + { + "epoch": 0.976550253934416, + "grad_norm": 4.104887008666992, + "learning_rate": 2.8828599713398575e-08, + "loss": 0.9945, + "step": 15575 + }, + { + "epoch": 0.9766129537902063, + "grad_norm": 3.2594079971313477, + "learning_rate": 2.8674715800171004e-08, + "loss": 1.1288, + "step": 15576 + }, + { + "epoch": 0.9766756536459966, + "grad_norm": 3.4077069759368896, + "learning_rate": 2.8521243104833174e-08, + "loss": 1.0742, + "step": 15577 + }, + { + "epoch": 0.976738353501787, + "grad_norm": 3.447064161300659, + "learning_rate": 2.8368181633714464e-08, + "loss": 0.9217, + "step": 15578 + }, + { + "epoch": 0.9768010533575773, + "grad_norm": 3.775094509124756, + "learning_rate": 2.8215531393126495e-08, + "loss": 1.0072, + "step": 15579 + }, + { + "epoch": 0.9768637532133676, + "grad_norm": 3.277939558029175, + "learning_rate": 2.8063292389367558e-08, + "loss": 0.9866, + "step": 15580 + }, + { + "epoch": 0.9769264530691579, + "grad_norm": 3.5665903091430664, + "learning_rate": 2.7911464628712637e-08, + "loss": 0.9227, + "step": 15581 + }, + { + "epoch": 0.9769891529249483, + "grad_norm": 3.96260142326355, + "learning_rate": 2.77600481174245e-08, + "loss": 0.9379, + "step": 15582 + }, + { + "epoch": 0.9770518527807386, + "grad_norm": 3.55336856842041, + "learning_rate": 2.7609042861747037e-08, + "loss": 0.8856, + "step": 15583 + }, + { + "epoch": 0.9771145526365289, + "grad_norm": 3.3854012489318848, + "learning_rate": 2.7458448867908604e-08, + "loss": 0.9121, + "step": 15584 + }, + { + "epoch": 0.9771772524923192, + "grad_norm": 3.9305765628814697, + "learning_rate": 2.7308266142119788e-08, + "loss": 0.9702, + "step": 15585 + }, + { + "epoch": 0.9772399523481096, + "grad_norm": 3.0069634914398193, + "learning_rate": 2.7158494690572303e-08, + "loss": 1.2113, + "step": 15586 + }, + { + "epoch": 0.9773026522038999, + "grad_norm": 3.687711715698242, + "learning_rate": 2.7009134519445645e-08, + "loss": 0.9988, + "step": 15587 + }, + { + "epoch": 0.9773653520596902, + "grad_norm": 3.5466439723968506, + "learning_rate": 2.686018563489712e-08, + "loss": 1.2666, + "step": 15588 + }, + { + "epoch": 0.9774280519154805, + "grad_norm": 3.486964702606201, + "learning_rate": 2.6711648043069582e-08, + "loss": 1.1465, + "step": 15589 + }, + { + "epoch": 0.9774907517712709, + "grad_norm": 3.4376590251922607, + "learning_rate": 2.656352175008925e-08, + "loss": 1.1681, + "step": 15590 + }, + { + "epoch": 0.9775534516270613, + "grad_norm": 3.2415616512298584, + "learning_rate": 2.6415806762065677e-08, + "loss": 1.2211, + "step": 15591 + }, + { + "epoch": 0.9776161514828516, + "grad_norm": 3.467559576034546, + "learning_rate": 2.6268503085089547e-08, + "loss": 1.1814, + "step": 15592 + }, + { + "epoch": 0.977678851338642, + "grad_norm": 4.082385540008545, + "learning_rate": 2.6121610725234892e-08, + "loss": 0.9898, + "step": 15593 + }, + { + "epoch": 0.9777415511944323, + "grad_norm": 3.3234004974365234, + "learning_rate": 2.5975129688561306e-08, + "loss": 1.1645, + "step": 15594 + }, + { + "epoch": 0.9778042510502226, + "grad_norm": 3.5289456844329834, + "learning_rate": 2.5829059981109516e-08, + "loss": 1.0248, + "step": 15595 + }, + { + "epoch": 0.9778669509060129, + "grad_norm": 3.56825852394104, + "learning_rate": 2.568340160890137e-08, + "loss": 0.9511, + "step": 15596 + }, + { + "epoch": 0.9779296507618033, + "grad_norm": 3.809657335281372, + "learning_rate": 2.5538154577946504e-08, + "loss": 1.0109, + "step": 15597 + }, + { + "epoch": 0.9779923506175936, + "grad_norm": 3.4443466663360596, + "learning_rate": 2.5393318894233464e-08, + "loss": 1.1072, + "step": 15598 + }, + { + "epoch": 0.9780550504733839, + "grad_norm": 3.4189956188201904, + "learning_rate": 2.524889456373525e-08, + "loss": 1.1066, + "step": 15599 + }, + { + "epoch": 0.9781177503291743, + "grad_norm": 3.6206812858581543, + "learning_rate": 2.5104881592409315e-08, + "loss": 1.1159, + "step": 15600 + }, + { + "epoch": 0.9781804501849646, + "grad_norm": 3.4594435691833496, + "learning_rate": 2.496127998619202e-08, + "loss": 0.951, + "step": 15601 + }, + { + "epoch": 0.9782431500407549, + "grad_norm": 3.6084609031677246, + "learning_rate": 2.4818089751007525e-08, + "loss": 0.8952, + "step": 15602 + }, + { + "epoch": 0.9783058498965452, + "grad_norm": 3.4960484504699707, + "learning_rate": 2.4675310892762205e-08, + "loss": 1.1627, + "step": 15603 + }, + { + "epoch": 0.9783685497523356, + "grad_norm": 3.3640573024749756, + "learning_rate": 2.453294341734025e-08, + "loss": 1.1208, + "step": 15604 + }, + { + "epoch": 0.9784312496081259, + "grad_norm": 3.255600690841675, + "learning_rate": 2.4390987330616956e-08, + "loss": 1.0419, + "step": 15605 + }, + { + "epoch": 0.9784939494639162, + "grad_norm": 3.3623621463775635, + "learning_rate": 2.424944263844542e-08, + "loss": 1.1095, + "step": 15606 + }, + { + "epoch": 0.9785566493197065, + "grad_norm": 3.5918197631835938, + "learning_rate": 2.4108309346660975e-08, + "loss": 1.1038, + "step": 15607 + }, + { + "epoch": 0.9786193491754969, + "grad_norm": 3.592999219894409, + "learning_rate": 2.396758746108674e-08, + "loss": 1.0066, + "step": 15608 + }, + { + "epoch": 0.9786820490312872, + "grad_norm": 3.6010961532592773, + "learning_rate": 2.3827276987524738e-08, + "loss": 1.1263, + "step": 15609 + }, + { + "epoch": 0.9787447488870775, + "grad_norm": 3.5617568492889404, + "learning_rate": 2.3687377931760346e-08, + "loss": 1.0626, + "step": 15610 + }, + { + "epoch": 0.9788074487428678, + "grad_norm": 3.454930067062378, + "learning_rate": 2.354789029956561e-08, + "loss": 1.0808, + "step": 15611 + }, + { + "epoch": 0.9788701485986582, + "grad_norm": 3.2559220790863037, + "learning_rate": 2.340881409669038e-08, + "loss": 1.0976, + "step": 15612 + }, + { + "epoch": 0.9789328484544485, + "grad_norm": 3.404548406600952, + "learning_rate": 2.327014932887228e-08, + "loss": 0.9909, + "step": 15613 + }, + { + "epoch": 0.9789955483102389, + "grad_norm": 3.114657402038574, + "learning_rate": 2.313189600182786e-08, + "loss": 1.0755, + "step": 15614 + }, + { + "epoch": 0.9790582481660293, + "grad_norm": 3.374600410461426, + "learning_rate": 2.299405412126032e-08, + "loss": 1.099, + "step": 15615 + }, + { + "epoch": 0.9791209480218196, + "grad_norm": 3.3406810760498047, + "learning_rate": 2.2856623692854018e-08, + "loss": 1.0769, + "step": 15616 + }, + { + "epoch": 0.9791836478776099, + "grad_norm": 3.3087399005889893, + "learning_rate": 2.2719604722275523e-08, + "loss": 0.9468, + "step": 15617 + }, + { + "epoch": 0.9792463477334002, + "grad_norm": 3.414395570755005, + "learning_rate": 2.2582997215176984e-08, + "loss": 1.0981, + "step": 15618 + }, + { + "epoch": 0.9793090475891906, + "grad_norm": 3.3536782264709473, + "learning_rate": 2.244680117719056e-08, + "loss": 1.1954, + "step": 15619 + }, + { + "epoch": 0.9793717474449809, + "grad_norm": 3.697340965270996, + "learning_rate": 2.2311016613935087e-08, + "loss": 0.9698, + "step": 15620 + }, + { + "epoch": 0.9794344473007712, + "grad_norm": 3.421616554260254, + "learning_rate": 2.2175643531008318e-08, + "loss": 1.1388, + "step": 15621 + }, + { + "epoch": 0.9794971471565616, + "grad_norm": 3.3796849250793457, + "learning_rate": 2.2040681933993558e-08, + "loss": 1.158, + "step": 15622 + }, + { + "epoch": 0.9795598470123519, + "grad_norm": 3.892340660095215, + "learning_rate": 2.1906131828456356e-08, + "loss": 1.1309, + "step": 15623 + }, + { + "epoch": 0.9796225468681422, + "grad_norm": 3.6955668926239014, + "learning_rate": 2.177199321994672e-08, + "loss": 1.1185, + "step": 15624 + }, + { + "epoch": 0.9796852467239325, + "grad_norm": 3.7515599727630615, + "learning_rate": 2.1638266113995776e-08, + "loss": 0.9797, + "step": 15625 + }, + { + "epoch": 0.9797479465797229, + "grad_norm": 3.772948980331421, + "learning_rate": 2.1504950516118007e-08, + "loss": 0.9624, + "step": 15626 + }, + { + "epoch": 0.9798106464355132, + "grad_norm": 3.4150025844573975, + "learning_rate": 2.1372046431812343e-08, + "loss": 0.9556, + "step": 15627 + }, + { + "epoch": 0.9798733462913035, + "grad_norm": 3.789712429046631, + "learning_rate": 2.123955386655885e-08, + "loss": 1.1572, + "step": 15628 + }, + { + "epoch": 0.9799360461470938, + "grad_norm": 3.5895040035247803, + "learning_rate": 2.110747282582204e-08, + "loss": 1.0194, + "step": 15629 + }, + { + "epoch": 0.9799987460028842, + "grad_norm": 3.469980478286743, + "learning_rate": 2.097580331504978e-08, + "loss": 1.0843, + "step": 15630 + }, + { + "epoch": 0.9800614458586745, + "grad_norm": 2.992981433868408, + "learning_rate": 2.0844545339669952e-08, + "loss": 1.1807, + "step": 15631 + }, + { + "epoch": 0.9801241457144648, + "grad_norm": 3.6501107215881348, + "learning_rate": 2.071369890509711e-08, + "loss": 1.0788, + "step": 15632 + }, + { + "epoch": 0.9801868455702551, + "grad_norm": 3.4128994941711426, + "learning_rate": 2.0583264016726946e-08, + "loss": 0.9815, + "step": 15633 + }, + { + "epoch": 0.9802495454260455, + "grad_norm": 3.6957502365112305, + "learning_rate": 2.045324067993959e-08, + "loss": 0.9418, + "step": 15634 + }, + { + "epoch": 0.9803122452818358, + "grad_norm": 2.9663655757904053, + "learning_rate": 2.0323628900096313e-08, + "loss": 1.0439, + "step": 15635 + }, + { + "epoch": 0.9803749451376261, + "grad_norm": 3.4354779720306396, + "learning_rate": 2.0194428682541733e-08, + "loss": 0.9983, + "step": 15636 + }, + { + "epoch": 0.9804376449934166, + "grad_norm": 3.421154260635376, + "learning_rate": 2.0065640032604916e-08, + "loss": 1.0165, + "step": 15637 + }, + { + "epoch": 0.9805003448492069, + "grad_norm": 3.293213129043579, + "learning_rate": 1.993726295559717e-08, + "loss": 1.0448, + "step": 15638 + }, + { + "epoch": 0.9805630447049972, + "grad_norm": 3.321227788925171, + "learning_rate": 1.980929745681204e-08, + "loss": 1.0616, + "step": 15639 + }, + { + "epoch": 0.9806257445607875, + "grad_norm": 3.6864495277404785, + "learning_rate": 1.968174354152752e-08, + "loss": 1.0984, + "step": 15640 + }, + { + "epoch": 0.9806884444165779, + "grad_norm": 3.5456414222717285, + "learning_rate": 1.9554601215003856e-08, + "loss": 1.0992, + "step": 15641 + }, + { + "epoch": 0.9807511442723682, + "grad_norm": 3.1757049560546875, + "learning_rate": 1.9427870482484623e-08, + "loss": 1.1273, + "step": 15642 + }, + { + "epoch": 0.9808138441281585, + "grad_norm": 3.265087127685547, + "learning_rate": 1.9301551349195648e-08, + "loss": 1.14, + "step": 15643 + }, + { + "epoch": 0.9808765439839489, + "grad_norm": 3.520554780960083, + "learning_rate": 1.917564382034609e-08, + "loss": 0.9779, + "step": 15644 + }, + { + "epoch": 0.9809392438397392, + "grad_norm": 3.0988290309906006, + "learning_rate": 1.905014790112958e-08, + "loss": 1.1447, + "step": 15645 + }, + { + "epoch": 0.9810019436955295, + "grad_norm": 3.4394943714141846, + "learning_rate": 1.892506359672086e-08, + "loss": 0.9967, + "step": 15646 + }, + { + "epoch": 0.9810646435513198, + "grad_norm": 3.1594927310943604, + "learning_rate": 1.8800390912278034e-08, + "loss": 1.2095, + "step": 15647 + }, + { + "epoch": 0.9811273434071102, + "grad_norm": 3.5364668369293213, + "learning_rate": 1.8676129852942537e-08, + "loss": 1.0365, + "step": 15648 + }, + { + "epoch": 0.9811900432629005, + "grad_norm": 3.526364803314209, + "learning_rate": 1.8552280423839163e-08, + "loss": 1.0016, + "step": 15649 + }, + { + "epoch": 0.9812527431186908, + "grad_norm": 3.326098680496216, + "learning_rate": 1.842884263007716e-08, + "loss": 1.1756, + "step": 15650 + }, + { + "epoch": 0.9813154429744811, + "grad_norm": 3.2398414611816406, + "learning_rate": 1.8305816476744675e-08, + "loss": 1.11, + "step": 15651 + }, + { + "epoch": 0.9813781428302715, + "grad_norm": 2.9818031787872314, + "learning_rate": 1.8183201968915432e-08, + "loss": 1.2123, + "step": 15652 + }, + { + "epoch": 0.9814408426860618, + "grad_norm": 3.697113037109375, + "learning_rate": 1.8060999111647603e-08, + "loss": 0.9993, + "step": 15653 + }, + { + "epoch": 0.9815035425418521, + "grad_norm": 3.4162192344665527, + "learning_rate": 1.7939207909980494e-08, + "loss": 1.0149, + "step": 15654 + }, + { + "epoch": 0.9815662423976425, + "grad_norm": 3.783313274383545, + "learning_rate": 1.7817828368935642e-08, + "loss": 1.002, + "step": 15655 + }, + { + "epoch": 0.9816289422534328, + "grad_norm": 3.205760955810547, + "learning_rate": 1.769686049352015e-08, + "loss": 1.0906, + "step": 15656 + }, + { + "epoch": 0.9816916421092231, + "grad_norm": 3.1543655395507812, + "learning_rate": 1.7576304288721145e-08, + "loss": 1.195, + "step": 15657 + }, + { + "epoch": 0.9817543419650134, + "grad_norm": 3.4479238986968994, + "learning_rate": 1.7456159759512426e-08, + "loss": 0.9023, + "step": 15658 + }, + { + "epoch": 0.9818170418208038, + "grad_norm": 3.2887301445007324, + "learning_rate": 1.7336426910846693e-08, + "loss": 1.0101, + "step": 15659 + }, + { + "epoch": 0.9818797416765942, + "grad_norm": 3.653118848800659, + "learning_rate": 1.721710574766333e-08, + "loss": 1.0327, + "step": 15660 + }, + { + "epoch": 0.9819424415323845, + "grad_norm": 3.551976203918457, + "learning_rate": 1.709819627488174e-08, + "loss": 1.1613, + "step": 15661 + }, + { + "epoch": 0.9820051413881749, + "grad_norm": 3.2459607124328613, + "learning_rate": 1.6979698497406883e-08, + "loss": 1.1005, + "step": 15662 + }, + { + "epoch": 0.9820678412439652, + "grad_norm": 3.8678927421569824, + "learning_rate": 1.6861612420124852e-08, + "loss": 0.9486, + "step": 15663 + }, + { + "epoch": 0.9821305410997555, + "grad_norm": 3.329923152923584, + "learning_rate": 1.6743938047906193e-08, + "loss": 1.0711, + "step": 15664 + }, + { + "epoch": 0.9821932409555458, + "grad_norm": 3.5742380619049072, + "learning_rate": 1.6626675385603695e-08, + "loss": 1.1827, + "step": 15665 + }, + { + "epoch": 0.9822559408113362, + "grad_norm": 3.570347547531128, + "learning_rate": 1.650982443805349e-08, + "loss": 1.1865, + "step": 15666 + }, + { + "epoch": 0.9823186406671265, + "grad_norm": 3.3778045177459717, + "learning_rate": 1.6393385210072833e-08, + "loss": 1.1124, + "step": 15667 + }, + { + "epoch": 0.9823813405229168, + "grad_norm": 3.7470850944519043, + "learning_rate": 1.6277357706465658e-08, + "loss": 0.9184, + "step": 15668 + }, + { + "epoch": 0.9824440403787071, + "grad_norm": 4.011614799499512, + "learning_rate": 1.6161741932017026e-08, + "loss": 0.9152, + "step": 15669 + }, + { + "epoch": 0.9825067402344975, + "grad_norm": 3.725595235824585, + "learning_rate": 1.604653789149313e-08, + "loss": 0.9735, + "step": 15670 + }, + { + "epoch": 0.9825694400902878, + "grad_norm": 3.286186456680298, + "learning_rate": 1.593174558964572e-08, + "loss": 1.0951, + "step": 15671 + }, + { + "epoch": 0.9826321399460781, + "grad_norm": 3.459134101867676, + "learning_rate": 1.5817365031209898e-08, + "loss": 1.0766, + "step": 15672 + }, + { + "epoch": 0.9826948398018684, + "grad_norm": 2.9882400035858154, + "learning_rate": 1.570339622090189e-08, + "loss": 1.083, + "step": 15673 + }, + { + "epoch": 0.9827575396576588, + "grad_norm": 3.6024668216705322, + "learning_rate": 1.5589839163421274e-08, + "loss": 1.1843, + "step": 15674 + }, + { + "epoch": 0.9828202395134491, + "grad_norm": 3.4293243885040283, + "learning_rate": 1.5476693863452074e-08, + "loss": 1.1915, + "step": 15675 + }, + { + "epoch": 0.9828829393692394, + "grad_norm": 3.588135242462158, + "learning_rate": 1.5363960325660565e-08, + "loss": 1.0383, + "step": 15676 + }, + { + "epoch": 0.9829456392250298, + "grad_norm": 3.2409098148345947, + "learning_rate": 1.5251638554694137e-08, + "loss": 1.1698, + "step": 15677 + }, + { + "epoch": 0.9830083390808201, + "grad_norm": 3.143085479736328, + "learning_rate": 1.513972855518797e-08, + "loss": 1.086, + "step": 15678 + }, + { + "epoch": 0.9830710389366104, + "grad_norm": 3.735368013381958, + "learning_rate": 1.5028230331753935e-08, + "loss": 1.0113, + "step": 15679 + }, + { + "epoch": 0.9831337387924007, + "grad_norm": 7.535407543182373, + "learning_rate": 1.491714388899168e-08, + "loss": 1.2003, + "step": 15680 + }, + { + "epoch": 0.9831964386481911, + "grad_norm": 3.2763774394989014, + "learning_rate": 1.48064692314831e-08, + "loss": 1.0957, + "step": 15681 + }, + { + "epoch": 0.9832591385039814, + "grad_norm": 3.5015640258789062, + "learning_rate": 1.469620636379232e-08, + "loss": 1.2057, + "step": 15682 + }, + { + "epoch": 0.9833218383597718, + "grad_norm": 3.544616937637329, + "learning_rate": 1.4586355290464593e-08, + "loss": 1.0559, + "step": 15683 + }, + { + "epoch": 0.9833845382155622, + "grad_norm": 3.311112642288208, + "learning_rate": 1.447691601603296e-08, + "loss": 1.2629, + "step": 15684 + }, + { + "epoch": 0.9834472380713525, + "grad_norm": 3.6587204933166504, + "learning_rate": 1.4367888545008258e-08, + "loss": 0.9064, + "step": 15685 + }, + { + "epoch": 0.9835099379271428, + "grad_norm": 3.397183895111084, + "learning_rate": 1.4259272881889109e-08, + "loss": 1.0978, + "step": 15686 + }, + { + "epoch": 0.9835726377829331, + "grad_norm": 3.2899169921875, + "learning_rate": 1.4151069031153042e-08, + "loss": 1.0572, + "step": 15687 + }, + { + "epoch": 0.9836353376387235, + "grad_norm": 3.486558198928833, + "learning_rate": 1.4043276997262045e-08, + "loss": 1.1796, + "step": 15688 + }, + { + "epoch": 0.9836980374945138, + "grad_norm": 3.786825656890869, + "learning_rate": 1.3935896784663671e-08, + "loss": 1.1964, + "step": 15689 + }, + { + "epoch": 0.9837607373503041, + "grad_norm": 3.592642068862915, + "learning_rate": 1.382892839778438e-08, + "loss": 1.0716, + "step": 15690 + }, + { + "epoch": 0.9838234372060944, + "grad_norm": 3.752361536026001, + "learning_rate": 1.3722371841037307e-08, + "loss": 1.0441, + "step": 15691 + }, + { + "epoch": 0.9838861370618848, + "grad_norm": 3.259214162826538, + "learning_rate": 1.3616227118814495e-08, + "loss": 1.124, + "step": 15692 + }, + { + "epoch": 0.9839488369176751, + "grad_norm": 3.44797682762146, + "learning_rate": 1.3510494235494664e-08, + "loss": 1.1702, + "step": 15693 + }, + { + "epoch": 0.9840115367734654, + "grad_norm": 3.05049204826355, + "learning_rate": 1.340517319543877e-08, + "loss": 1.028, + "step": 15694 + }, + { + "epoch": 0.9840742366292557, + "grad_norm": 3.4915571212768555, + "learning_rate": 1.3300264002988894e-08, + "loss": 1.1824, + "step": 15695 + }, + { + "epoch": 0.9841369364850461, + "grad_norm": 3.4459385871887207, + "learning_rate": 1.3195766662472686e-08, + "loss": 1.0917, + "step": 15696 + }, + { + "epoch": 0.9841996363408364, + "grad_norm": 3.490994691848755, + "learning_rate": 1.3091681178198922e-08, + "loss": 1.159, + "step": 15697 + }, + { + "epoch": 0.9842623361966267, + "grad_norm": 3.123448133468628, + "learning_rate": 1.2988007554460835e-08, + "loss": 1.1329, + "step": 15698 + }, + { + "epoch": 0.9843250360524171, + "grad_norm": 3.7644426822662354, + "learning_rate": 1.2884745795532782e-08, + "loss": 1.124, + "step": 15699 + }, + { + "epoch": 0.9843877359082074, + "grad_norm": 3.303434133529663, + "learning_rate": 1.278189590567469e-08, + "loss": 1.0961, + "step": 15700 + }, + { + "epoch": 0.9844504357639977, + "grad_norm": 3.2783474922180176, + "learning_rate": 1.26794578891265e-08, + "loss": 1.1213, + "step": 15701 + }, + { + "epoch": 0.984513135619788, + "grad_norm": 3.1664819717407227, + "learning_rate": 1.2577431750114833e-08, + "loss": 1.0582, + "step": 15702 + }, + { + "epoch": 0.9845758354755784, + "grad_norm": 3.408539056777954, + "learning_rate": 1.247581749284521e-08, + "loss": 1.029, + "step": 15703 + }, + { + "epoch": 0.9846385353313687, + "grad_norm": 3.6416845321655273, + "learning_rate": 1.2374615121508726e-08, + "loss": 1.0442, + "step": 15704 + }, + { + "epoch": 0.984701235187159, + "grad_norm": 3.7451441287994385, + "learning_rate": 1.227382464027982e-08, + "loss": 1.0821, + "step": 15705 + }, + { + "epoch": 0.9847639350429495, + "grad_norm": 3.5509610176086426, + "learning_rate": 1.2173446053314053e-08, + "loss": 1.1844, + "step": 15706 + }, + { + "epoch": 0.9848266348987398, + "grad_norm": 3.714250326156616, + "learning_rate": 1.2073479364752561e-08, + "loss": 1.1497, + "step": 15707 + }, + { + "epoch": 0.9848893347545301, + "grad_norm": 3.0574729442596436, + "learning_rate": 1.197392457871649e-08, + "loss": 1.1318, + "step": 15708 + }, + { + "epoch": 0.9849520346103204, + "grad_norm": 2.8359475135803223, + "learning_rate": 1.1874781699311444e-08, + "loss": 1.1328, + "step": 15709 + }, + { + "epoch": 0.9850147344661108, + "grad_norm": 3.315493583679199, + "learning_rate": 1.1776050730626376e-08, + "loss": 1.0075, + "step": 15710 + }, + { + "epoch": 0.9850774343219011, + "grad_norm": 3.616457223892212, + "learning_rate": 1.1677731676733584e-08, + "loss": 1.1781, + "step": 15711 + }, + { + "epoch": 0.9851401341776914, + "grad_norm": 3.0778329372406006, + "learning_rate": 1.15798245416876e-08, + "loss": 1.1343, + "step": 15712 + }, + { + "epoch": 0.9852028340334817, + "grad_norm": 3.4759068489074707, + "learning_rate": 1.148232932952631e-08, + "loss": 1.0647, + "step": 15713 + }, + { + "epoch": 0.9852655338892721, + "grad_norm": 3.312685251235962, + "learning_rate": 1.1385246044268716e-08, + "loss": 1.1245, + "step": 15714 + }, + { + "epoch": 0.9853282337450624, + "grad_norm": 3.6588995456695557, + "learning_rate": 1.1288574689920506e-08, + "loss": 0.9712, + "step": 15715 + }, + { + "epoch": 0.9853909336008527, + "grad_norm": 3.5682520866394043, + "learning_rate": 1.119231527046738e-08, + "loss": 0.9103, + "step": 15716 + }, + { + "epoch": 0.985453633456643, + "grad_norm": 3.197352647781372, + "learning_rate": 1.1096467789880606e-08, + "loss": 1.1668, + "step": 15717 + }, + { + "epoch": 0.9855163333124334, + "grad_norm": 3.309753656387329, + "learning_rate": 1.1001032252110356e-08, + "loss": 1.0099, + "step": 15718 + }, + { + "epoch": 0.9855790331682237, + "grad_norm": 3.341500997543335, + "learning_rate": 1.0906008661093482e-08, + "loss": 1.1763, + "step": 15719 + }, + { + "epoch": 0.985641733024014, + "grad_norm": 3.145169973373413, + "learning_rate": 1.081139702075018e-08, + "loss": 1.1353, + "step": 15720 + }, + { + "epoch": 0.9857044328798044, + "grad_norm": 3.754523754119873, + "learning_rate": 1.0717197334980667e-08, + "loss": 1.024, + "step": 15721 + }, + { + "epoch": 0.9857671327355947, + "grad_norm": 3.4427878856658936, + "learning_rate": 1.062340960766961e-08, + "loss": 1.1082, + "step": 15722 + }, + { + "epoch": 0.985829832591385, + "grad_norm": 3.505979537963867, + "learning_rate": 1.0530033842686138e-08, + "loss": 1.1256, + "step": 15723 + }, + { + "epoch": 0.9858925324471753, + "grad_norm": 4.010402202606201, + "learning_rate": 1.0437070043879394e-08, + "loss": 0.9696, + "step": 15724 + }, + { + "epoch": 0.9859552323029657, + "grad_norm": 3.6460111141204834, + "learning_rate": 1.0344518215085198e-08, + "loss": 1.0134, + "step": 15725 + }, + { + "epoch": 0.986017932158756, + "grad_norm": 3.712817430496216, + "learning_rate": 1.0252378360118276e-08, + "loss": 0.9487, + "step": 15726 + }, + { + "epoch": 0.9860806320145463, + "grad_norm": 3.249812364578247, + "learning_rate": 1.0160650482781142e-08, + "loss": 1.241, + "step": 15727 + }, + { + "epoch": 0.9861433318703366, + "grad_norm": 3.0207772254943848, + "learning_rate": 1.0069334586854106e-08, + "loss": 1.1623, + "step": 15728 + }, + { + "epoch": 0.986206031726127, + "grad_norm": 3.719963312149048, + "learning_rate": 9.978430676103047e-09, + "loss": 1.0525, + "step": 15729 + }, + { + "epoch": 0.9862687315819174, + "grad_norm": 3.1028099060058594, + "learning_rate": 9.887938754278292e-09, + "loss": 1.1632, + "step": 15730 + }, + { + "epoch": 0.9863314314377077, + "grad_norm": 3.5440196990966797, + "learning_rate": 9.797858825112417e-09, + "loss": 1.0539, + "step": 15731 + }, + { + "epoch": 0.9863941312934981, + "grad_norm": 3.4245851039886475, + "learning_rate": 9.708190892318003e-09, + "loss": 1.1606, + "step": 15732 + }, + { + "epoch": 0.9864568311492884, + "grad_norm": 3.4576897621154785, + "learning_rate": 9.618934959594317e-09, + "loss": 1.0322, + "step": 15733 + }, + { + "epoch": 0.9865195310050787, + "grad_norm": 3.4217512607574463, + "learning_rate": 9.530091030621746e-09, + "loss": 0.9661, + "step": 15734 + }, + { + "epoch": 0.986582230860869, + "grad_norm": 3.0061843395233154, + "learning_rate": 9.441659109065138e-09, + "loss": 1.1338, + "step": 15735 + }, + { + "epoch": 0.9866449307166594, + "grad_norm": 3.7945597171783447, + "learning_rate": 9.353639198570463e-09, + "loss": 1.016, + "step": 15736 + }, + { + "epoch": 0.9867076305724497, + "grad_norm": 4.074557304382324, + "learning_rate": 9.266031302767042e-09, + "loss": 1.1162, + "step": 15737 + }, + { + "epoch": 0.98677033042824, + "grad_norm": 3.656989336013794, + "learning_rate": 9.178835425269762e-09, + "loss": 1.0147, + "step": 15738 + }, + { + "epoch": 0.9868330302840304, + "grad_norm": 3.532956600189209, + "learning_rate": 9.092051569674632e-09, + "loss": 0.972, + "step": 15739 + }, + { + "epoch": 0.9868957301398207, + "grad_norm": 3.4624826908111572, + "learning_rate": 9.005679739557683e-09, + "loss": 0.9821, + "step": 15740 + }, + { + "epoch": 0.986958429995611, + "grad_norm": 3.486351728439331, + "learning_rate": 8.91971993848384e-09, + "loss": 1.0568, + "step": 15741 + }, + { + "epoch": 0.9870211298514013, + "grad_norm": 3.6022472381591797, + "learning_rate": 8.834172169996935e-09, + "loss": 1.0452, + "step": 15742 + }, + { + "epoch": 0.9870838297071917, + "grad_norm": 3.4887166023254395, + "learning_rate": 8.749036437625258e-09, + "loss": 1.153, + "step": 15743 + }, + { + "epoch": 0.987146529562982, + "grad_norm": 3.6230969429016113, + "learning_rate": 8.664312744879334e-09, + "loss": 1.0314, + "step": 15744 + }, + { + "epoch": 0.9872092294187723, + "grad_norm": 3.644228935241699, + "learning_rate": 8.580001095253032e-09, + "loss": 1.1494, + "step": 15745 + }, + { + "epoch": 0.9872719292745626, + "grad_norm": 3.387406349182129, + "learning_rate": 8.496101492224684e-09, + "loss": 0.9735, + "step": 15746 + }, + { + "epoch": 0.987334629130353, + "grad_norm": 3.5925307273864746, + "learning_rate": 8.412613939252633e-09, + "loss": 1.1106, + "step": 15747 + }, + { + "epoch": 0.9873973289861433, + "grad_norm": 3.227963447570801, + "learning_rate": 8.329538439781904e-09, + "loss": 1.0806, + "step": 15748 + }, + { + "epoch": 0.9874600288419336, + "grad_norm": 3.8532962799072266, + "learning_rate": 8.24687499723642e-09, + "loss": 1.0747, + "step": 15749 + }, + { + "epoch": 0.987522728697724, + "grad_norm": 3.445103645324707, + "learning_rate": 8.16462361502568e-09, + "loss": 1.0319, + "step": 15750 + }, + { + "epoch": 0.9875854285535143, + "grad_norm": 3.530688524246216, + "learning_rate": 8.082784296543633e-09, + "loss": 1.0616, + "step": 15751 + }, + { + "epoch": 0.9876481284093046, + "grad_norm": 4.002108573913574, + "learning_rate": 8.001357045163138e-09, + "loss": 1.0063, + "step": 15752 + }, + { + "epoch": 0.987710828265095, + "grad_norm": 3.220515489578247, + "learning_rate": 7.92034186424262e-09, + "loss": 1.2544, + "step": 15753 + }, + { + "epoch": 0.9877735281208854, + "grad_norm": 3.4867680072784424, + "learning_rate": 7.83973875712385e-09, + "loss": 1.0305, + "step": 15754 + }, + { + "epoch": 0.9878362279766757, + "grad_norm": 3.7346549034118652, + "learning_rate": 7.759547727130834e-09, + "loss": 1.0763, + "step": 15755 + }, + { + "epoch": 0.987898927832466, + "grad_norm": 3.558094024658203, + "learning_rate": 7.679768777569819e-09, + "loss": 0.9772, + "step": 15756 + }, + { + "epoch": 0.9879616276882563, + "grad_norm": 3.5505564212799072, + "learning_rate": 7.600401911732613e-09, + "loss": 1.0177, + "step": 15757 + }, + { + "epoch": 0.9880243275440467, + "grad_norm": 3.468125581741333, + "learning_rate": 7.521447132889936e-09, + "loss": 1.1529, + "step": 15758 + }, + { + "epoch": 0.988087027399837, + "grad_norm": 3.764096736907959, + "learning_rate": 7.4429044443002915e-09, + "loss": 1.1929, + "step": 15759 + }, + { + "epoch": 0.9881497272556273, + "grad_norm": 3.7712137699127197, + "learning_rate": 7.36477384920109e-09, + "loss": 1.0788, + "step": 15760 + }, + { + "epoch": 0.9882124271114177, + "grad_norm": 3.715054512023926, + "learning_rate": 7.2870553508153085e-09, + "loss": 1.0394, + "step": 15761 + }, + { + "epoch": 0.988275126967208, + "grad_norm": 3.450955867767334, + "learning_rate": 7.209748952347051e-09, + "loss": 1.0428, + "step": 15762 + }, + { + "epoch": 0.9883378268229983, + "grad_norm": 3.5924999713897705, + "learning_rate": 7.132854656985988e-09, + "loss": 1.1479, + "step": 15763 + }, + { + "epoch": 0.9884005266787886, + "grad_norm": 3.4422390460968018, + "learning_rate": 7.0563724679018065e-09, + "loss": 0.9512, + "step": 15764 + }, + { + "epoch": 0.988463226534579, + "grad_norm": 3.26800799369812, + "learning_rate": 6.98030238824976e-09, + "loss": 1.2533, + "step": 15765 + }, + { + "epoch": 0.9885259263903693, + "grad_norm": 3.2798192501068115, + "learning_rate": 6.904644421166229e-09, + "loss": 1.1581, + "step": 15766 + }, + { + "epoch": 0.9885886262461596, + "grad_norm": 3.6073176860809326, + "learning_rate": 6.82939856977094e-09, + "loss": 1.1149, + "step": 15767 + }, + { + "epoch": 0.98865132610195, + "grad_norm": 3.9442691802978516, + "learning_rate": 6.754564837168076e-09, + "loss": 1.0497, + "step": 15768 + }, + { + "epoch": 0.9887140259577403, + "grad_norm": 3.3737101554870605, + "learning_rate": 6.680143226444058e-09, + "loss": 1.0776, + "step": 15769 + }, + { + "epoch": 0.9887767258135306, + "grad_norm": 3.305170774459839, + "learning_rate": 6.606133740666432e-09, + "loss": 1.2166, + "step": 15770 + }, + { + "epoch": 0.9888394256693209, + "grad_norm": 3.3061795234680176, + "learning_rate": 6.532536382888311e-09, + "loss": 1.091, + "step": 15771 + }, + { + "epoch": 0.9889021255251113, + "grad_norm": 3.6594088077545166, + "learning_rate": 6.459351156145044e-09, + "loss": 1.0965, + "step": 15772 + }, + { + "epoch": 0.9889648253809016, + "grad_norm": 3.0058510303497314, + "learning_rate": 6.386578063454218e-09, + "loss": 1.1959, + "step": 15773 + }, + { + "epoch": 0.9890275252366919, + "grad_norm": 3.5144782066345215, + "learning_rate": 6.314217107817877e-09, + "loss": 1.1534, + "step": 15774 + }, + { + "epoch": 0.9890902250924822, + "grad_norm": 3.06842303276062, + "learning_rate": 6.242268292219189e-09, + "loss": 1.0367, + "step": 15775 + }, + { + "epoch": 0.9891529249482727, + "grad_norm": 3.2260303497314453, + "learning_rate": 6.170731619626891e-09, + "loss": 0.9908, + "step": 15776 + }, + { + "epoch": 0.989215624804063, + "grad_norm": 3.2304306030273438, + "learning_rate": 6.099607092988624e-09, + "loss": 1.0812, + "step": 15777 + }, + { + "epoch": 0.9892783246598533, + "grad_norm": 3.4829304218292236, + "learning_rate": 6.028894715239819e-09, + "loss": 1.1813, + "step": 15778 + }, + { + "epoch": 0.9893410245156437, + "grad_norm": 3.8579976558685303, + "learning_rate": 5.958594489295921e-09, + "loss": 1.0854, + "step": 15779 + }, + { + "epoch": 0.989403724371434, + "grad_norm": 3.6675562858581543, + "learning_rate": 5.888706418054613e-09, + "loss": 0.9206, + "step": 15780 + }, + { + "epoch": 0.9894664242272243, + "grad_norm": 3.406309127807617, + "learning_rate": 5.819230504401363e-09, + "loss": 0.9539, + "step": 15781 + }, + { + "epoch": 0.9895291240830146, + "grad_norm": 3.304258346557617, + "learning_rate": 5.750166751198327e-09, + "loss": 1.1156, + "step": 15782 + }, + { + "epoch": 0.989591823938805, + "grad_norm": 3.610659122467041, + "learning_rate": 5.6815151612954475e-09, + "loss": 1.1518, + "step": 15783 + }, + { + "epoch": 0.9896545237945953, + "grad_norm": 3.6196911334991455, + "learning_rate": 5.613275737522683e-09, + "loss": 0.909, + "step": 15784 + }, + { + "epoch": 0.9897172236503856, + "grad_norm": 3.2106597423553467, + "learning_rate": 5.545448482695559e-09, + "loss": 1.0026, + "step": 15785 + }, + { + "epoch": 0.9897799235061759, + "grad_norm": 3.6671667098999023, + "learning_rate": 5.478033399610727e-09, + "loss": 0.9199, + "step": 15786 + }, + { + "epoch": 0.9898426233619663, + "grad_norm": 3.4029946327209473, + "learning_rate": 5.411030491047075e-09, + "loss": 0.92, + "step": 15787 + }, + { + "epoch": 0.9899053232177566, + "grad_norm": 3.122297763824463, + "learning_rate": 5.34443975977017e-09, + "loss": 1.1481, + "step": 15788 + }, + { + "epoch": 0.9899680230735469, + "grad_norm": 3.538520097732544, + "learning_rate": 5.278261208524482e-09, + "loss": 1.1952, + "step": 15789 + }, + { + "epoch": 0.9900307229293372, + "grad_norm": 3.4397966861724854, + "learning_rate": 5.21249484004005e-09, + "loss": 1.0107, + "step": 15790 + }, + { + "epoch": 0.9900934227851276, + "grad_norm": 3.30106258392334, + "learning_rate": 5.147140657029148e-09, + "loss": 0.9332, + "step": 15791 + }, + { + "epoch": 0.9901561226409179, + "grad_norm": 3.224363088607788, + "learning_rate": 5.082198662186289e-09, + "loss": 1.1738, + "step": 15792 + }, + { + "epoch": 0.9902188224967082, + "grad_norm": 4.16478967666626, + "learning_rate": 5.017668858189328e-09, + "loss": 1.0771, + "step": 15793 + }, + { + "epoch": 0.9902815223524986, + "grad_norm": 3.1784250736236572, + "learning_rate": 4.953551247701694e-09, + "loss": 1.1323, + "step": 15794 + }, + { + "epoch": 0.9903442222082889, + "grad_norm": 3.6013259887695312, + "learning_rate": 4.889845833364604e-09, + "loss": 1.0849, + "step": 15795 + }, + { + "epoch": 0.9904069220640792, + "grad_norm": 3.3055970668792725, + "learning_rate": 4.826552617807067e-09, + "loss": 1.1094, + "step": 15796 + }, + { + "epoch": 0.9904696219198695, + "grad_norm": 3.362677574157715, + "learning_rate": 4.763671603639219e-09, + "loss": 1.1562, + "step": 15797 + }, + { + "epoch": 0.9905323217756599, + "grad_norm": 3.4988715648651123, + "learning_rate": 4.70120279345454e-09, + "loss": 1.009, + "step": 15798 + }, + { + "epoch": 0.9905950216314503, + "grad_norm": 3.225857734680176, + "learning_rate": 4.639146189828747e-09, + "loss": 1.1379, + "step": 15799 + }, + { + "epoch": 0.9906577214872406, + "grad_norm": 3.516347885131836, + "learning_rate": 4.5775017953197944e-09, + "loss": 1.0384, + "step": 15800 + }, + { + "epoch": 0.990720421343031, + "grad_norm": 3.344417095184326, + "learning_rate": 4.5162696124723124e-09, + "loss": 1.1606, + "step": 15801 + }, + { + "epoch": 0.9907831211988213, + "grad_norm": 3.41847562789917, + "learning_rate": 4.45544964380984e-09, + "loss": 1.166, + "step": 15802 + }, + { + "epoch": 0.9908458210546116, + "grad_norm": 3.6416056156158447, + "learning_rate": 4.395041891841479e-09, + "loss": 1.07, + "step": 15803 + }, + { + "epoch": 0.9909085209104019, + "grad_norm": 3.1561856269836426, + "learning_rate": 4.33504635905857e-09, + "loss": 1.0809, + "step": 15804 + }, + { + "epoch": 0.9909712207661923, + "grad_norm": 3.3563072681427, + "learning_rate": 4.275463047934691e-09, + "loss": 1.0645, + "step": 15805 + }, + { + "epoch": 0.9910339206219826, + "grad_norm": 3.2100536823272705, + "learning_rate": 4.216291960925656e-09, + "loss": 1.1608, + "step": 15806 + }, + { + "epoch": 0.9910966204777729, + "grad_norm": 3.6639535427093506, + "learning_rate": 4.157533100475064e-09, + "loss": 1.1612, + "step": 15807 + }, + { + "epoch": 0.9911593203335632, + "grad_norm": 3.8564298152923584, + "learning_rate": 4.099186469003202e-09, + "loss": 1.0283, + "step": 15808 + }, + { + "epoch": 0.9912220201893536, + "grad_norm": 3.8277080059051514, + "learning_rate": 4.041252068918145e-09, + "loss": 0.931, + "step": 15809 + }, + { + "epoch": 0.9912847200451439, + "grad_norm": 3.5527584552764893, + "learning_rate": 3.98372990260798e-09, + "loss": 1.2072, + "step": 15810 + }, + { + "epoch": 0.9913474199009342, + "grad_norm": 3.2289018630981445, + "learning_rate": 3.926619972446366e-09, + "loss": 1.1584, + "step": 15811 + }, + { + "epoch": 0.9914101197567246, + "grad_norm": 3.6814935207366943, + "learning_rate": 3.869922280785865e-09, + "loss": 1.0715, + "step": 15812 + }, + { + "epoch": 0.9914728196125149, + "grad_norm": 3.625566005706787, + "learning_rate": 3.8136368299668266e-09, + "loss": 1.0936, + "step": 15813 + }, + { + "epoch": 0.9915355194683052, + "grad_norm": 3.882146120071411, + "learning_rate": 3.757763622310728e-09, + "loss": 1.1238, + "step": 15814 + }, + { + "epoch": 0.9915982193240955, + "grad_norm": 3.281687021255493, + "learning_rate": 3.7023026601201718e-09, + "loss": 1.0007, + "step": 15815 + }, + { + "epoch": 0.9916609191798859, + "grad_norm": 3.5296571254730225, + "learning_rate": 3.6472539456833268e-09, + "loss": 1.1936, + "step": 15816 + }, + { + "epoch": 0.9917236190356762, + "grad_norm": 3.7877087593078613, + "learning_rate": 3.5926174812705994e-09, + "loss": 1.0796, + "step": 15817 + }, + { + "epoch": 0.9917863188914665, + "grad_norm": 3.5265321731567383, + "learning_rate": 3.5383932691346323e-09, + "loss": 1.0174, + "step": 15818 + }, + { + "epoch": 0.9918490187472568, + "grad_norm": 2.955258846282959, + "learning_rate": 3.4845813115114147e-09, + "loss": 1.1075, + "step": 15819 + }, + { + "epoch": 0.9919117186030472, + "grad_norm": 3.0837090015411377, + "learning_rate": 3.4311816106213926e-09, + "loss": 1.1514, + "step": 15820 + }, + { + "epoch": 0.9919744184588375, + "grad_norm": 3.835886001586914, + "learning_rate": 3.378194168666138e-09, + "loss": 1.1644, + "step": 15821 + }, + { + "epoch": 0.9920371183146279, + "grad_norm": 3.3088197708129883, + "learning_rate": 3.3256189878294597e-09, + "loss": 1.1096, + "step": 15822 + }, + { + "epoch": 0.9920998181704183, + "grad_norm": 3.189755916595459, + "learning_rate": 3.273456070281844e-09, + "loss": 1.1318, + "step": 15823 + }, + { + "epoch": 0.9921625180262086, + "grad_norm": 3.2922818660736084, + "learning_rate": 3.2217054181737927e-09, + "loss": 1.1644, + "step": 15824 + }, + { + "epoch": 0.9922252178819989, + "grad_norm": 3.7036590576171875, + "learning_rate": 3.1703670336380444e-09, + "loss": 1.1352, + "step": 15825 + }, + { + "epoch": 0.9922879177377892, + "grad_norm": 3.759476900100708, + "learning_rate": 3.1194409187940146e-09, + "loss": 1.1042, + "step": 15826 + }, + { + "epoch": 0.9923506175935796, + "grad_norm": 3.5611395835876465, + "learning_rate": 3.0689270757400247e-09, + "loss": 1.1036, + "step": 15827 + }, + { + "epoch": 0.9924133174493699, + "grad_norm": 3.1832222938537598, + "learning_rate": 3.0188255065599635e-09, + "loss": 1.2414, + "step": 15828 + }, + { + "epoch": 0.9924760173051602, + "grad_norm": 3.7086105346679688, + "learning_rate": 2.9691362133210667e-09, + "loss": 1.0829, + "step": 15829 + }, + { + "epoch": 0.9925387171609505, + "grad_norm": 3.471862316131592, + "learning_rate": 2.9198591980705847e-09, + "loss": 1.1377, + "step": 15830 + }, + { + "epoch": 0.9926014170167409, + "grad_norm": 4.070611953735352, + "learning_rate": 2.8709944628413367e-09, + "loss": 1.0688, + "step": 15831 + }, + { + "epoch": 0.9926641168725312, + "grad_norm": 3.6825177669525146, + "learning_rate": 2.8225420096494873e-09, + "loss": 1.1293, + "step": 15832 + }, + { + "epoch": 0.9927268167283215, + "grad_norm": 3.287724256515503, + "learning_rate": 2.7745018404934374e-09, + "loss": 1.0134, + "step": 15833 + }, + { + "epoch": 0.9927895165841119, + "grad_norm": 3.1537442207336426, + "learning_rate": 2.726873957352716e-09, + "loss": 1.0847, + "step": 15834 + }, + { + "epoch": 0.9928522164399022, + "grad_norm": 3.416447401046753, + "learning_rate": 2.6796583621924165e-09, + "loss": 1.0084, + "step": 15835 + }, + { + "epoch": 0.9929149162956925, + "grad_norm": 3.4567184448242188, + "learning_rate": 2.6328550569587607e-09, + "loss": 1.0925, + "step": 15836 + }, + { + "epoch": 0.9929776161514828, + "grad_norm": 3.3225395679473877, + "learning_rate": 2.5864640435835362e-09, + "loss": 1.0941, + "step": 15837 + }, + { + "epoch": 0.9930403160072732, + "grad_norm": 3.372939109802246, + "learning_rate": 2.5404853239796578e-09, + "loss": 1.1439, + "step": 15838 + }, + { + "epoch": 0.9931030158630635, + "grad_norm": 3.518390655517578, + "learning_rate": 2.4949189000411654e-09, + "loss": 1.1678, + "step": 15839 + }, + { + "epoch": 0.9931657157188538, + "grad_norm": 3.390504837036133, + "learning_rate": 2.4497647736498877e-09, + "loss": 0.9626, + "step": 15840 + }, + { + "epoch": 0.9932284155746441, + "grad_norm": 3.2542874813079834, + "learning_rate": 2.405022946667668e-09, + "loss": 0.9342, + "step": 15841 + }, + { + "epoch": 0.9932911154304345, + "grad_norm": 3.638631820678711, + "learning_rate": 2.3606934209374764e-09, + "loss": 1.1298, + "step": 15842 + }, + { + "epoch": 0.9933538152862248, + "grad_norm": 3.4110264778137207, + "learning_rate": 2.3167761982900716e-09, + "loss": 1.0603, + "step": 15843 + }, + { + "epoch": 0.9934165151420151, + "grad_norm": 3.864391326904297, + "learning_rate": 2.273271280534006e-09, + "loss": 1.0181, + "step": 15844 + }, + { + "epoch": 0.9934792149978056, + "grad_norm": 3.3637685775756836, + "learning_rate": 2.2301786694656214e-09, + "loss": 1.0245, + "step": 15845 + }, + { + "epoch": 0.9935419148535959, + "grad_norm": 3.2054250240325928, + "learning_rate": 2.1874983668612736e-09, + "loss": 1.1798, + "step": 15846 + }, + { + "epoch": 0.9936046147093862, + "grad_norm": 3.655038356781006, + "learning_rate": 2.145230374481777e-09, + "loss": 1.2326, + "step": 15847 + }, + { + "epoch": 0.9936673145651765, + "grad_norm": 3.645397663116455, + "learning_rate": 2.1033746940679613e-09, + "loss": 1.0426, + "step": 15848 + }, + { + "epoch": 0.9937300144209669, + "grad_norm": 3.4289584159851074, + "learning_rate": 2.0619313273484432e-09, + "loss": 1.1194, + "step": 15849 + }, + { + "epoch": 0.9937927142767572, + "grad_norm": 3.7400963306427, + "learning_rate": 2.0209002760318565e-09, + "loss": 0.837, + "step": 15850 + }, + { + "epoch": 0.9938554141325475, + "grad_norm": 3.254143238067627, + "learning_rate": 1.9802815418101805e-09, + "loss": 1.0011, + "step": 15851 + }, + { + "epoch": 0.9939181139883378, + "grad_norm": 3.578530788421631, + "learning_rate": 1.9400751263576324e-09, + "loss": 1.1077, + "step": 15852 + }, + { + "epoch": 0.9939808138441282, + "grad_norm": 3.4945411682128906, + "learning_rate": 1.9002810313328847e-09, + "loss": 1.0126, + "step": 15853 + }, + { + "epoch": 0.9940435136999185, + "grad_norm": 3.2835443019866943, + "learning_rate": 1.8608992583779572e-09, + "loss": 0.9793, + "step": 15854 + }, + { + "epoch": 0.9941062135557088, + "grad_norm": 3.329955816268921, + "learning_rate": 1.8219298091148863e-09, + "loss": 1.0102, + "step": 15855 + }, + { + "epoch": 0.9941689134114992, + "grad_norm": 3.234790325164795, + "learning_rate": 1.783372685153495e-09, + "loss": 1.1988, + "step": 15856 + }, + { + "epoch": 0.9942316132672895, + "grad_norm": 3.938215494155884, + "learning_rate": 1.7452278880814022e-09, + "loss": 0.9973, + "step": 15857 + }, + { + "epoch": 0.9942943131230798, + "grad_norm": 3.1199302673339844, + "learning_rate": 1.7074954194729044e-09, + "loss": 1.1576, + "step": 15858 + }, + { + "epoch": 0.9943570129788701, + "grad_norm": 3.3450629711151123, + "learning_rate": 1.670175280884534e-09, + "loss": 1.0784, + "step": 15859 + }, + { + "epoch": 0.9944197128346605, + "grad_norm": 3.305253267288208, + "learning_rate": 1.63326747385395e-09, + "loss": 1.1049, + "step": 15860 + }, + { + "epoch": 0.9944824126904508, + "grad_norm": 4.086877822875977, + "learning_rate": 1.5967719999043784e-09, + "loss": 1.0175, + "step": 15861 + }, + { + "epoch": 0.9945451125462411, + "grad_norm": 3.3823745250701904, + "learning_rate": 1.5606888605412818e-09, + "loss": 1.0911, + "step": 15862 + }, + { + "epoch": 0.9946078124020314, + "grad_norm": 3.458181619644165, + "learning_rate": 1.5250180572512484e-09, + "loss": 1.0971, + "step": 15863 + }, + { + "epoch": 0.9946705122578218, + "grad_norm": 3.2341229915618896, + "learning_rate": 1.4897595915053242e-09, + "loss": 1.1205, + "step": 15864 + }, + { + "epoch": 0.9947332121136121, + "grad_norm": 3.0954644680023193, + "learning_rate": 1.4549134647601215e-09, + "loss": 1.0465, + "step": 15865 + }, + { + "epoch": 0.9947959119694024, + "grad_norm": 3.2756636142730713, + "learning_rate": 1.4204796784500485e-09, + "loss": 1.2371, + "step": 15866 + }, + { + "epoch": 0.9948586118251928, + "grad_norm": 3.2137303352355957, + "learning_rate": 1.3864582339961908e-09, + "loss": 1.1067, + "step": 15867 + }, + { + "epoch": 0.9949213116809832, + "grad_norm": 3.9782092571258545, + "learning_rate": 1.3528491328007597e-09, + "loss": 1.073, + "step": 15868 + }, + { + "epoch": 0.9949840115367735, + "grad_norm": 3.398864269256592, + "learning_rate": 1.3196523762504243e-09, + "loss": 0.9529, + "step": 15869 + }, + { + "epoch": 0.9950467113925638, + "grad_norm": 3.4965615272521973, + "learning_rate": 1.2868679657151994e-09, + "loss": 1.2237, + "step": 15870 + }, + { + "epoch": 0.9951094112483542, + "grad_norm": 3.6538965702056885, + "learning_rate": 1.2544959025462266e-09, + "loss": 1.0912, + "step": 15871 + }, + { + "epoch": 0.9951721111041445, + "grad_norm": 3.4360036849975586, + "learning_rate": 1.222536188077994e-09, + "loss": 1.0213, + "step": 15872 + }, + { + "epoch": 0.9952348109599348, + "grad_norm": 3.4340226650238037, + "learning_rate": 1.1909888236283363e-09, + "loss": 1.0513, + "step": 15873 + }, + { + "epoch": 0.9952975108157252, + "grad_norm": 3.568080425262451, + "learning_rate": 1.159853810500655e-09, + "loss": 1.0163, + "step": 15874 + }, + { + "epoch": 0.9953602106715155, + "grad_norm": 3.294785499572754, + "learning_rate": 1.1291311499761482e-09, + "loss": 1.0601, + "step": 15875 + }, + { + "epoch": 0.9954229105273058, + "grad_norm": 3.1172168254852295, + "learning_rate": 1.09882084332269e-09, + "loss": 0.8654, + "step": 15876 + }, + { + "epoch": 0.9954856103830961, + "grad_norm": 3.1740715503692627, + "learning_rate": 1.0689228917915017e-09, + "loss": 1.0442, + "step": 15877 + }, + { + "epoch": 0.9955483102388865, + "grad_norm": 3.545466661453247, + "learning_rate": 1.039437296613821e-09, + "loss": 1.1602, + "step": 15878 + }, + { + "epoch": 0.9956110100946768, + "grad_norm": 3.546431303024292, + "learning_rate": 1.0103640590064524e-09, + "loss": 1.0803, + "step": 15879 + }, + { + "epoch": 0.9956737099504671, + "grad_norm": 3.4448869228363037, + "learning_rate": 9.817031801684362e-10, + "loss": 1.028, + "step": 15880 + }, + { + "epoch": 0.9957364098062574, + "grad_norm": 3.465238332748413, + "learning_rate": 9.534546612810502e-10, + "loss": 1.1009, + "step": 15881 + }, + { + "epoch": 0.9957991096620478, + "grad_norm": 3.326427936553955, + "learning_rate": 9.256185035111387e-10, + "loss": 1.0506, + "step": 15882 + }, + { + "epoch": 0.9958618095178381, + "grad_norm": 3.1269683837890625, + "learning_rate": 8.981947080044518e-10, + "loss": 1.1808, + "step": 15883 + }, + { + "epoch": 0.9959245093736284, + "grad_norm": 3.4316561222076416, + "learning_rate": 8.711832758934169e-10, + "loss": 0.9926, + "step": 15884 + }, + { + "epoch": 0.9959872092294187, + "grad_norm": 3.39951229095459, + "learning_rate": 8.445842082904776e-10, + "loss": 1.0457, + "step": 15885 + }, + { + "epoch": 0.9960499090852091, + "grad_norm": 3.6921916007995605, + "learning_rate": 8.183975062947546e-10, + "loss": 1.1073, + "step": 15886 + }, + { + "epoch": 0.9961126089409994, + "grad_norm": 3.3913490772247314, + "learning_rate": 7.926231709842747e-10, + "loss": 0.9376, + "step": 15887 + }, + { + "epoch": 0.9961753087967897, + "grad_norm": 2.9814741611480713, + "learning_rate": 7.672612034226312e-10, + "loss": 1.1237, + "step": 15888 + }, + { + "epoch": 0.99623800865258, + "grad_norm": 3.263929605484009, + "learning_rate": 7.423116046556544e-10, + "loss": 1.0904, + "step": 15889 + }, + { + "epoch": 0.9963007085083704, + "grad_norm": 3.4978439807891846, + "learning_rate": 7.177743757136313e-10, + "loss": 1.0032, + "step": 15890 + }, + { + "epoch": 0.9963634083641607, + "grad_norm": 3.4226725101470947, + "learning_rate": 6.936495176057545e-10, + "loss": 1.0944, + "step": 15891 + }, + { + "epoch": 0.9964261082199511, + "grad_norm": 3.4285426139831543, + "learning_rate": 6.699370313290044e-10, + "loss": 1.1026, + "step": 15892 + }, + { + "epoch": 0.9964888080757415, + "grad_norm": 3.473910093307495, + "learning_rate": 6.466369178614873e-10, + "loss": 1.1121, + "step": 15893 + }, + { + "epoch": 0.9965515079315318, + "grad_norm": 3.316312313079834, + "learning_rate": 6.237491781624361e-10, + "loss": 1.0308, + "step": 15894 + }, + { + "epoch": 0.9966142077873221, + "grad_norm": 3.033876419067383, + "learning_rate": 6.012738131766505e-10, + "loss": 1.087, + "step": 15895 + }, + { + "epoch": 0.9966769076431125, + "grad_norm": 3.6091818809509277, + "learning_rate": 5.792108238311667e-10, + "loss": 1.1373, + "step": 15896 + }, + { + "epoch": 0.9967396074989028, + "grad_norm": 3.3569369316101074, + "learning_rate": 5.575602110363675e-10, + "loss": 0.9088, + "step": 15897 + }, + { + "epoch": 0.9968023073546931, + "grad_norm": 3.3272268772125244, + "learning_rate": 5.363219756837624e-10, + "loss": 1.0927, + "step": 15898 + }, + { + "epoch": 0.9968650072104834, + "grad_norm": 3.350053548812866, + "learning_rate": 5.154961186493168e-10, + "loss": 1.1433, + "step": 15899 + }, + { + "epoch": 0.9969277070662738, + "grad_norm": 3.6312453746795654, + "learning_rate": 4.95082640793454e-10, + "loss": 0.9806, + "step": 15900 + }, + { + "epoch": 0.9969904069220641, + "grad_norm": 3.3721282482147217, + "learning_rate": 4.750815429566125e-10, + "loss": 0.8813, + "step": 15901 + }, + { + "epoch": 0.9970531067778544, + "grad_norm": 3.43097186088562, + "learning_rate": 4.5549282596479836e-10, + "loss": 1.1635, + "step": 15902 + }, + { + "epoch": 0.9971158066336447, + "grad_norm": 3.50581693649292, + "learning_rate": 4.3631649062403356e-10, + "loss": 0.9564, + "step": 15903 + }, + { + "epoch": 0.9971785064894351, + "grad_norm": 3.8132309913635254, + "learning_rate": 4.175525377270173e-10, + "loss": 0.9415, + "step": 15904 + }, + { + "epoch": 0.9972412063452254, + "grad_norm": 3.3441972732543945, + "learning_rate": 3.992009680464648e-10, + "loss": 0.9729, + "step": 15905 + }, + { + "epoch": 0.9973039062010157, + "grad_norm": 4.043051719665527, + "learning_rate": 3.812617823395481e-10, + "loss": 0.9573, + "step": 15906 + }, + { + "epoch": 0.997366606056806, + "grad_norm": 4.1553544998168945, + "learning_rate": 3.637349813467861e-10, + "loss": 1.068, + "step": 15907 + }, + { + "epoch": 0.9974293059125964, + "grad_norm": 3.223410129547119, + "learning_rate": 3.466205657898236e-10, + "loss": 1.2035, + "step": 15908 + }, + { + "epoch": 0.9974920057683867, + "grad_norm": 3.424510955810547, + "learning_rate": 3.2991853637476257e-10, + "loss": 1.2088, + "step": 15909 + }, + { + "epoch": 0.997554705624177, + "grad_norm": 3.2600293159484863, + "learning_rate": 3.136288937910514e-10, + "loss": 1.1132, + "step": 15910 + }, + { + "epoch": 0.9976174054799674, + "grad_norm": 4.061476707458496, + "learning_rate": 2.9775163870926494e-10, + "loss": 1.0616, + "step": 15911 + }, + { + "epoch": 0.9976801053357577, + "grad_norm": 3.1747233867645264, + "learning_rate": 2.822867717855449e-10, + "loss": 1.0485, + "step": 15912 + }, + { + "epoch": 0.997742805191548, + "grad_norm": 3.3969242572784424, + "learning_rate": 2.67234293656049e-10, + "loss": 1.0768, + "step": 15913 + }, + { + "epoch": 0.9978055050473383, + "grad_norm": 3.253164052963257, + "learning_rate": 2.525942049436125e-10, + "loss": 1.1539, + "step": 15914 + }, + { + "epoch": 0.9978682049031288, + "grad_norm": 3.327260971069336, + "learning_rate": 2.3836650624997627e-10, + "loss": 1.035, + "step": 15915 + }, + { + "epoch": 0.9979309047589191, + "grad_norm": 3.7320706844329834, + "learning_rate": 2.2455119816355842e-10, + "loss": 0.8923, + "step": 15916 + }, + { + "epoch": 0.9979936046147094, + "grad_norm": 3.2592570781707764, + "learning_rate": 2.111482812527932e-10, + "loss": 0.9122, + "step": 15917 + }, + { + "epoch": 0.9980563044704998, + "grad_norm": 3.206636428833008, + "learning_rate": 1.9815775607057163e-10, + "loss": 1.0942, + "step": 15918 + }, + { + "epoch": 0.9981190043262901, + "grad_norm": 3.4921483993530273, + "learning_rate": 1.8557962315424172e-10, + "loss": 0.9707, + "step": 15919 + }, + { + "epoch": 0.9981817041820804, + "grad_norm": 3.4125723838806152, + "learning_rate": 1.7341388302005714e-10, + "loss": 1.0971, + "step": 15920 + }, + { + "epoch": 0.9982444040378707, + "grad_norm": 3.8317995071411133, + "learning_rate": 1.616605361720591e-10, + "loss": 1.2329, + "step": 15921 + }, + { + "epoch": 0.9983071038936611, + "grad_norm": 3.538464069366455, + "learning_rate": 1.5031958309319472e-10, + "loss": 1.017, + "step": 15922 + }, + { + "epoch": 0.9983698037494514, + "grad_norm": 3.5555880069732666, + "learning_rate": 1.3939102425197803e-10, + "loss": 1.0532, + "step": 15923 + }, + { + "epoch": 0.9984325036052417, + "grad_norm": 3.726567506790161, + "learning_rate": 1.2887486009915962e-10, + "loss": 1.0653, + "step": 15924 + }, + { + "epoch": 0.998495203461032, + "grad_norm": 3.489579916000366, + "learning_rate": 1.1877109106772643e-10, + "loss": 0.9003, + "step": 15925 + }, + { + "epoch": 0.9985579033168224, + "grad_norm": 3.7029850482940674, + "learning_rate": 1.0907971757512237e-10, + "loss": 1.2922, + "step": 15926 + }, + { + "epoch": 0.9986206031726127, + "grad_norm": 3.215035915374756, + "learning_rate": 9.980074002102769e-11, + "loss": 0.9379, + "step": 15927 + }, + { + "epoch": 0.998683303028403, + "grad_norm": 3.723724842071533, + "learning_rate": 9.093415878735912e-11, + "loss": 1.0633, + "step": 15928 + }, + { + "epoch": 0.9987460028841934, + "grad_norm": 3.395115852355957, + "learning_rate": 8.247997424049026e-11, + "loss": 1.0509, + "step": 15929 + }, + { + "epoch": 0.9988087027399837, + "grad_norm": 3.2451982498168945, + "learning_rate": 7.443818672903114e-11, + "loss": 0.9376, + "step": 15930 + }, + { + "epoch": 0.998871402595774, + "grad_norm": 3.096423864364624, + "learning_rate": 6.680879658493845e-11, + "loss": 1.0705, + "step": 15931 + }, + { + "epoch": 0.9989341024515643, + "grad_norm": 3.4159018993377686, + "learning_rate": 5.959180412129506e-11, + "loss": 1.0766, + "step": 15932 + }, + { + "epoch": 0.9989968023073547, + "grad_norm": 3.179603338241577, + "learning_rate": 5.278720963786121e-11, + "loss": 1.0702, + "step": 15933 + }, + { + "epoch": 0.999059502163145, + "grad_norm": 3.7588143348693848, + "learning_rate": 4.639501341330288e-11, + "loss": 1.1311, + "step": 15934 + }, + { + "epoch": 0.9991222020189353, + "grad_norm": 3.7132725715637207, + "learning_rate": 4.041521571296336e-11, + "loss": 1.0197, + "step": 15935 + }, + { + "epoch": 0.9991849018747256, + "grad_norm": 3.9740657806396484, + "learning_rate": 3.4847816781091725e-11, + "loss": 1.1583, + "step": 15936 + }, + { + "epoch": 0.999247601730516, + "grad_norm": 3.415534496307373, + "learning_rate": 2.969281684972458e-11, + "loss": 1.0742, + "step": 15937 + }, + { + "epoch": 0.9993103015863064, + "grad_norm": 3.338090658187866, + "learning_rate": 2.495021613091453e-11, + "loss": 1.0259, + "step": 15938 + }, + { + "epoch": 0.9993730014420967, + "grad_norm": 3.047762155532837, + "learning_rate": 2.0620014818950596e-11, + "loss": 1.031, + "step": 15939 + }, + { + "epoch": 0.9994357012978871, + "grad_norm": 3.456256866455078, + "learning_rate": 1.6702213094799137e-11, + "loss": 0.9808, + "step": 15940 + }, + { + "epoch": 0.9994984011536774, + "grad_norm": 4.122440338134766, + "learning_rate": 1.3196811117222042e-11, + "loss": 1.0891, + "step": 15941 + }, + { + "epoch": 0.9995611010094677, + "grad_norm": 3.0063397884368896, + "learning_rate": 1.0103809032768753e-11, + "loss": 1.1616, + "step": 15942 + }, + { + "epoch": 0.999623800865258, + "grad_norm": 3.397578477859497, + "learning_rate": 7.423206968004693e-12, + "loss": 1.1976, + "step": 15943 + }, + { + "epoch": 0.9996865007210484, + "grad_norm": 3.233900785446167, + "learning_rate": 5.155005033952165e-12, + "loss": 1.2213, + "step": 15944 + }, + { + "epoch": 0.9997492005768387, + "grad_norm": 3.460611581802368, + "learning_rate": 3.299203323869904e-12, + "loss": 1.1186, + "step": 15945 + }, + { + "epoch": 0.999811900432629, + "grad_norm": 3.3574094772338867, + "learning_rate": 1.8558019143632976e-12, + "loss": 0.8771, + "step": 15946 + }, + { + "epoch": 0.9998746002884193, + "grad_norm": 3.6878631114959717, + "learning_rate": 8.248008653843898e-13, + "loss": 1.0145, + "step": 15947 + }, + { + "epoch": 0.9999373001442097, + "grad_norm": 3.4921257495880127, + "learning_rate": 2.0620021801143196e-13, + "loss": 1.0187, + "step": 15948 + }, + { + "epoch": 1.0, + "grad_norm": 3.4830031394958496, + "learning_rate": 0.0, + "loss": 1.0728, + "step": 15949 + }, + { + "epoch": 1.0, + "step": 15949, + "total_flos": 3.2419969157778386e+18, + "train_loss": 1.1513992259439476, + "train_runtime": 143129.4422, + "train_samples_per_second": 1.783, + "train_steps_per_second": 0.111 + } + ], + "logging_steps": 1.0, + "max_steps": 15949, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 1000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 3.2419969157778386e+18, + "train_batch_size": 4, + "trial_name": null, + "trial_params": null +}