diff --git "a/checkpoint-1280/trainer_state.json" "b/checkpoint-1280/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1280/trainer_state.json" @@ -0,0 +1,9066 @@ +{ + "best_global_step": null, + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 0.20122226807365048, + "eval_steps": 160, + "global_step": 1280, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.00015720489693253945, + "grad_norm": 1.3751904964447021, + "learning_rate": 0.0, + "loss": 3.5741, + "step": 1 + }, + { + "epoch": 0.00015720489693253945, + "eval_loss": 3.4173049926757812, + "eval_runtime": 2315.7248, + "eval_samples_per_second": 3.998, + "eval_steps_per_second": 1.999, + "step": 1 + }, + { + "epoch": 0.0003144097938650789, + "grad_norm": 1.231239676475525, + "learning_rate": 5e-06, + "loss": 3.3021, + "step": 2 + }, + { + "epoch": 0.00047161469079761836, + "grad_norm": 1.3657807111740112, + "learning_rate": 1e-05, + "loss": 3.6333, + "step": 3 + }, + { + "epoch": 0.0006288195877301578, + "grad_norm": 1.3117496967315674, + "learning_rate": 1.5e-05, + "loss": 3.3731, + "step": 4 + }, + { + "epoch": 0.0007860244846626972, + "grad_norm": 1.4118576049804688, + "learning_rate": 2e-05, + "loss": 3.612, + "step": 5 + }, + { + "epoch": 0.0009432293815952367, + "grad_norm": 1.3155895471572876, + "learning_rate": 2.5e-05, + "loss": 3.3296, + "step": 6 + }, + { + "epoch": 0.001100434278527776, + "grad_norm": 1.2847192287445068, + "learning_rate": 3e-05, + "loss": 3.2168, + "step": 7 + }, + { + "epoch": 0.0012576391754603156, + "grad_norm": 1.1421078443527222, + "learning_rate": 3.5e-05, + "loss": 3.085, + "step": 8 + }, + { + "epoch": 0.0014148440723928551, + "grad_norm": 0.9923035502433777, + "learning_rate": 4e-05, + "loss": 3.0472, + "step": 9 + }, + { + "epoch": 0.0015720489693253944, + "grad_norm": 0.795043408870697, + "learning_rate": 4.5e-05, + "loss": 2.6666, + "step": 10 + }, + { + "epoch": 0.001729253866257934, + "grad_norm": 0.5987974405288696, + "learning_rate": 5e-05, + "loss": 2.473, + "step": 11 + }, + { + "epoch": 0.0018864587631904734, + "grad_norm": 0.4488905668258667, + "learning_rate": 4.9999999236547564e-05, + "loss": 2.3731, + "step": 12 + }, + { + "epoch": 0.002043663660123013, + "grad_norm": 0.3517301380634308, + "learning_rate": 4.999999694619029e-05, + "loss": 2.2158, + "step": 13 + }, + { + "epoch": 0.002200868557055552, + "grad_norm": 0.3045121431350708, + "learning_rate": 4.999999312892831e-05, + "loss": 2.3351, + "step": 14 + }, + { + "epoch": 0.002358073453988092, + "grad_norm": 0.24488244950771332, + "learning_rate": 4.9999987784761884e-05, + "loss": 2.2693, + "step": 15 + }, + { + "epoch": 0.0025152783509206312, + "grad_norm": 0.22892728447914124, + "learning_rate": 4.999998091369132e-05, + "loss": 2.1006, + "step": 16 + }, + { + "epoch": 0.0026724832478531705, + "grad_norm": 0.23219206929206848, + "learning_rate": 4.999997251571704e-05, + "loss": 2.215, + "step": 17 + }, + { + "epoch": 0.0028296881447857102, + "grad_norm": 0.24427154660224915, + "learning_rate": 4.999996259083956e-05, + "loss": 2.1708, + "step": 18 + }, + { + "epoch": 0.0029868930417182495, + "grad_norm": 0.2640205919742584, + "learning_rate": 4.999995113905947e-05, + "loss": 2.1709, + "step": 19 + }, + { + "epoch": 0.003144097938650789, + "grad_norm": 0.26644033193588257, + "learning_rate": 4.999993816037749e-05, + "loss": 2.1733, + "step": 20 + }, + { + "epoch": 0.0033013028355833285, + "grad_norm": 0.2621535062789917, + "learning_rate": 4.9999923654794414e-05, + "loss": 2.0059, + "step": 21 + }, + { + "epoch": 0.003458507732515868, + "grad_norm": 0.2586187422275543, + "learning_rate": 4.999990762231111e-05, + "loss": 2.0336, + "step": 22 + }, + { + "epoch": 0.003615712629448407, + "grad_norm": 0.26732271909713745, + "learning_rate": 4.9999890062928566e-05, + "loss": 2.0566, + "step": 23 + }, + { + "epoch": 0.003772917526380947, + "grad_norm": 0.2357867807149887, + "learning_rate": 4.999987097664787e-05, + "loss": 1.9529, + "step": 24 + }, + { + "epoch": 0.003930122423313486, + "grad_norm": 0.2297009825706482, + "learning_rate": 4.999985036347016e-05, + "loss": 2.0369, + "step": 25 + }, + { + "epoch": 0.004087327320246026, + "grad_norm": 0.20529747009277344, + "learning_rate": 4.9999828223396705e-05, + "loss": 1.9781, + "step": 26 + }, + { + "epoch": 0.004244532217178565, + "grad_norm": 0.18342873454093933, + "learning_rate": 4.999980455642887e-05, + "loss": 1.9986, + "step": 27 + }, + { + "epoch": 0.004401737114111104, + "grad_norm": 0.16487397253513336, + "learning_rate": 4.999977936256809e-05, + "loss": 1.9063, + "step": 28 + }, + { + "epoch": 0.004558942011043644, + "grad_norm": 0.1762266606092453, + "learning_rate": 4.99997526418159e-05, + "loss": 1.9517, + "step": 29 + }, + { + "epoch": 0.004716146907976184, + "grad_norm": 0.16371938586235046, + "learning_rate": 4.999972439417394e-05, + "loss": 1.7734, + "step": 30 + }, + { + "epoch": 0.004873351804908723, + "grad_norm": 0.17309769988059998, + "learning_rate": 4.999969461964392e-05, + "loss": 1.8732, + "step": 31 + }, + { + "epoch": 0.0050305567018412625, + "grad_norm": 0.15772338211536407, + "learning_rate": 4.9999663318227683e-05, + "loss": 1.7537, + "step": 32 + }, + { + "epoch": 0.005187761598773802, + "grad_norm": 0.17521986365318298, + "learning_rate": 4.9999630489927126e-05, + "loss": 2.0077, + "step": 33 + }, + { + "epoch": 0.005344966495706341, + "grad_norm": 0.15462292730808258, + "learning_rate": 4.999959613474425e-05, + "loss": 1.8576, + "step": 34 + }, + { + "epoch": 0.005502171392638881, + "grad_norm": 0.15280336141586304, + "learning_rate": 4.999956025268117e-05, + "loss": 1.862, + "step": 35 + }, + { + "epoch": 0.0056593762895714205, + "grad_norm": 0.14518432319164276, + "learning_rate": 4.999952284374006e-05, + "loss": 1.8893, + "step": 36 + }, + { + "epoch": 0.005816581186503959, + "grad_norm": 0.16087624430656433, + "learning_rate": 4.999948390792321e-05, + "loss": 1.8658, + "step": 37 + }, + { + "epoch": 0.005973786083436499, + "grad_norm": 0.17504698038101196, + "learning_rate": 4.999944344523301e-05, + "loss": 1.7647, + "step": 38 + }, + { + "epoch": 0.006130990980369039, + "grad_norm": 0.17786233127117157, + "learning_rate": 4.999940145567191e-05, + "loss": 1.8133, + "step": 39 + }, + { + "epoch": 0.006288195877301578, + "grad_norm": 0.1628972887992859, + "learning_rate": 4.999935793924249e-05, + "loss": 1.7731, + "step": 40 + }, + { + "epoch": 0.006445400774234117, + "grad_norm": 0.13461466133594513, + "learning_rate": 4.9999312895947406e-05, + "loss": 1.7558, + "step": 41 + }, + { + "epoch": 0.006602605671166657, + "grad_norm": 0.12960125505924225, + "learning_rate": 4.99992663257894e-05, + "loss": 1.7639, + "step": 42 + }, + { + "epoch": 0.006759810568099196, + "grad_norm": 0.10991287231445312, + "learning_rate": 4.9999218228771324e-05, + "loss": 1.7538, + "step": 43 + }, + { + "epoch": 0.006917015465031736, + "grad_norm": 0.11583230644464493, + "learning_rate": 4.999916860489612e-05, + "loss": 1.715, + "step": 44 + }, + { + "epoch": 0.007074220361964275, + "grad_norm": 0.10344280302524567, + "learning_rate": 4.999911745416681e-05, + "loss": 1.6907, + "step": 45 + }, + { + "epoch": 0.007231425258896814, + "grad_norm": 0.10546118766069412, + "learning_rate": 4.999906477658651e-05, + "loss": 1.7294, + "step": 46 + }, + { + "epoch": 0.007388630155829354, + "grad_norm": 0.11775675415992737, + "learning_rate": 4.9999010572158465e-05, + "loss": 1.7146, + "step": 47 + }, + { + "epoch": 0.007545835052761894, + "grad_norm": 0.11109112203121185, + "learning_rate": 4.999895484088596e-05, + "loss": 1.6939, + "step": 48 + }, + { + "epoch": 0.007703039949694433, + "grad_norm": 0.1116517186164856, + "learning_rate": 4.999889758277242e-05, + "loss": 1.7271, + "step": 49 + }, + { + "epoch": 0.007860244846626972, + "grad_norm": 0.11245547980070114, + "learning_rate": 4.999883879782132e-05, + "loss": 1.7333, + "step": 50 + }, + { + "epoch": 0.008017449743559512, + "grad_norm": 0.1150551363825798, + "learning_rate": 4.999877848603626e-05, + "loss": 1.7036, + "step": 51 + }, + { + "epoch": 0.008174654640492052, + "grad_norm": 0.10856381803750992, + "learning_rate": 4.999871664742093e-05, + "loss": 1.7493, + "step": 52 + }, + { + "epoch": 0.008331859537424591, + "grad_norm": 0.10760089010000229, + "learning_rate": 4.9998653281979095e-05, + "loss": 1.6292, + "step": 53 + }, + { + "epoch": 0.00848906443435713, + "grad_norm": 0.0932115837931633, + "learning_rate": 4.9998588389714634e-05, + "loss": 1.6608, + "step": 54 + }, + { + "epoch": 0.00864626933128967, + "grad_norm": 0.09837482124567032, + "learning_rate": 4.9998521970631504e-05, + "loss": 1.7834, + "step": 55 + }, + { + "epoch": 0.008803474228222209, + "grad_norm": 0.08872833847999573, + "learning_rate": 4.9998454024733775e-05, + "loss": 1.6484, + "step": 56 + }, + { + "epoch": 0.008960679125154749, + "grad_norm": 0.08829163759946823, + "learning_rate": 4.9998384552025577e-05, + "loss": 1.5913, + "step": 57 + }, + { + "epoch": 0.009117884022087288, + "grad_norm": 0.09087682515382767, + "learning_rate": 4.999831355251117e-05, + "loss": 1.6809, + "step": 58 + }, + { + "epoch": 0.009275088919019828, + "grad_norm": 0.08675853163003922, + "learning_rate": 4.9998241026194884e-05, + "loss": 1.6519, + "step": 59 + }, + { + "epoch": 0.009432293815952368, + "grad_norm": 0.08463481813669205, + "learning_rate": 4.999816697308114e-05, + "loss": 1.6234, + "step": 60 + }, + { + "epoch": 0.009589498712884906, + "grad_norm": 0.08403950184583664, + "learning_rate": 4.999809139317448e-05, + "loss": 1.6533, + "step": 61 + }, + { + "epoch": 0.009746703609817445, + "grad_norm": 0.08155622333288193, + "learning_rate": 4.99980142864795e-05, + "loss": 1.6726, + "step": 62 + }, + { + "epoch": 0.009903908506749985, + "grad_norm": 0.08056480437517166, + "learning_rate": 4.999793565300093e-05, + "loss": 1.5881, + "step": 63 + }, + { + "epoch": 0.010061113403682525, + "grad_norm": 0.07879023998975754, + "learning_rate": 4.999785549274355e-05, + "loss": 1.5568, + "step": 64 + }, + { + "epoch": 0.010218318300615065, + "grad_norm": 0.07828455418348312, + "learning_rate": 4.9997773805712265e-05, + "loss": 1.6464, + "step": 65 + }, + { + "epoch": 0.010375523197547604, + "grad_norm": 0.08054805546998978, + "learning_rate": 4.9997690591912075e-05, + "loss": 1.6213, + "step": 66 + }, + { + "epoch": 0.010532728094480142, + "grad_norm": 0.07610727101564407, + "learning_rate": 4.999760585134805e-05, + "loss": 1.5729, + "step": 67 + }, + { + "epoch": 0.010689932991412682, + "grad_norm": 0.07693428546190262, + "learning_rate": 4.999751958402537e-05, + "loss": 1.5444, + "step": 68 + }, + { + "epoch": 0.010847137888345222, + "grad_norm": 0.0810319185256958, + "learning_rate": 4.99974317899493e-05, + "loss": 1.7045, + "step": 69 + }, + { + "epoch": 0.011004342785277762, + "grad_norm": 0.07729896157979965, + "learning_rate": 4.9997342469125205e-05, + "loss": 1.6268, + "step": 70 + }, + { + "epoch": 0.011161547682210301, + "grad_norm": 0.07730107754468918, + "learning_rate": 4.999725162155855e-05, + "loss": 1.658, + "step": 71 + }, + { + "epoch": 0.011318752579142841, + "grad_norm": 0.08072328567504883, + "learning_rate": 4.9997159247254864e-05, + "loss": 1.5045, + "step": 72 + }, + { + "epoch": 0.011475957476075379, + "grad_norm": 0.08120577782392502, + "learning_rate": 4.9997065346219805e-05, + "loss": 1.568, + "step": 73 + }, + { + "epoch": 0.011633162373007919, + "grad_norm": 0.08131498098373413, + "learning_rate": 4.99969699184591e-05, + "loss": 1.6035, + "step": 74 + }, + { + "epoch": 0.011790367269940458, + "grad_norm": 0.08395873010158539, + "learning_rate": 4.9996872963978584e-05, + "loss": 1.5844, + "step": 75 + }, + { + "epoch": 0.011947572166872998, + "grad_norm": 0.08502068370580673, + "learning_rate": 4.999677448278417e-05, + "loss": 1.6661, + "step": 76 + }, + { + "epoch": 0.012104777063805538, + "grad_norm": 0.08467952907085419, + "learning_rate": 4.999667447488188e-05, + "loss": 1.5537, + "step": 77 + }, + { + "epoch": 0.012261981960738078, + "grad_norm": 0.19682182371616364, + "learning_rate": 4.999657294027782e-05, + "loss": 1.5051, + "step": 78 + }, + { + "epoch": 0.012419186857670617, + "grad_norm": 0.08586428314447403, + "learning_rate": 4.999646987897818e-05, + "loss": 1.565, + "step": 79 + }, + { + "epoch": 0.012576391754603155, + "grad_norm": 0.08156823366880417, + "learning_rate": 4.999636529098928e-05, + "loss": 1.6627, + "step": 80 + }, + { + "epoch": 0.012733596651535695, + "grad_norm": 0.08715341240167618, + "learning_rate": 4.9996259176317486e-05, + "loss": 1.5862, + "step": 81 + }, + { + "epoch": 0.012890801548468235, + "grad_norm": 0.09664586186408997, + "learning_rate": 4.999615153496928e-05, + "loss": 1.5741, + "step": 82 + }, + { + "epoch": 0.013048006445400774, + "grad_norm": 0.08438891172409058, + "learning_rate": 4.999604236695125e-05, + "loss": 1.5933, + "step": 83 + }, + { + "epoch": 0.013205211342333314, + "grad_norm": 0.08333732932806015, + "learning_rate": 4.999593167227006e-05, + "loss": 1.5904, + "step": 84 + }, + { + "epoch": 0.013362416239265854, + "grad_norm": 0.07945791631937027, + "learning_rate": 4.9995819450932455e-05, + "loss": 1.5763, + "step": 85 + }, + { + "epoch": 0.013519621136198392, + "grad_norm": 0.07682961225509644, + "learning_rate": 4.9995705702945304e-05, + "loss": 1.5197, + "step": 86 + }, + { + "epoch": 0.013676826033130932, + "grad_norm": 0.07547677308320999, + "learning_rate": 4.999559042831555e-05, + "loss": 1.6825, + "step": 87 + }, + { + "epoch": 0.013834030930063471, + "grad_norm": 0.07293456047773361, + "learning_rate": 4.999547362705025e-05, + "loss": 1.5466, + "step": 88 + }, + { + "epoch": 0.013991235826996011, + "grad_norm": 0.07730914652347565, + "learning_rate": 4.999535529915651e-05, + "loss": 1.5775, + "step": 89 + }, + { + "epoch": 0.01414844072392855, + "grad_norm": 0.07689664512872696, + "learning_rate": 4.9995235444641565e-05, + "loss": 1.5881, + "step": 90 + }, + { + "epoch": 0.01430564562086109, + "grad_norm": 0.07754997909069061, + "learning_rate": 4.999511406351275e-05, + "loss": 1.5037, + "step": 91 + }, + { + "epoch": 0.014462850517793628, + "grad_norm": 0.07229866087436676, + "learning_rate": 4.999499115577746e-05, + "loss": 1.5077, + "step": 92 + }, + { + "epoch": 0.014620055414726168, + "grad_norm": 0.07491567730903625, + "learning_rate": 4.9994866721443215e-05, + "loss": 1.5461, + "step": 93 + }, + { + "epoch": 0.014777260311658708, + "grad_norm": 0.07258685678243637, + "learning_rate": 4.9994740760517605e-05, + "loss": 1.5516, + "step": 94 + }, + { + "epoch": 0.014934465208591248, + "grad_norm": 0.07643327116966248, + "learning_rate": 4.9994613273008334e-05, + "loss": 1.6223, + "step": 95 + }, + { + "epoch": 0.015091670105523787, + "grad_norm": 0.0740588903427124, + "learning_rate": 4.999448425892318e-05, + "loss": 1.5322, + "step": 96 + }, + { + "epoch": 0.015248875002456327, + "grad_norm": 0.44172239303588867, + "learning_rate": 4.999435371827003e-05, + "loss": 1.5498, + "step": 97 + }, + { + "epoch": 0.015406079899388867, + "grad_norm": 0.0756363570690155, + "learning_rate": 4.999422165105684e-05, + "loss": 1.559, + "step": 98 + }, + { + "epoch": 0.015563284796321405, + "grad_norm": 0.07251248508691788, + "learning_rate": 4.99940880572917e-05, + "loss": 1.5903, + "step": 99 + }, + { + "epoch": 0.015720489693253945, + "grad_norm": 0.06931837648153305, + "learning_rate": 4.999395293698275e-05, + "loss": 1.4849, + "step": 100 + }, + { + "epoch": 0.015877694590186484, + "grad_norm": 0.07403590530157089, + "learning_rate": 4.9993816290138254e-05, + "loss": 1.5191, + "step": 101 + }, + { + "epoch": 0.016034899487119024, + "grad_norm": 0.07027724385261536, + "learning_rate": 4.999367811676655e-05, + "loss": 1.5655, + "step": 102 + }, + { + "epoch": 0.016192104384051564, + "grad_norm": 0.07320379465818405, + "learning_rate": 4.9993538416876093e-05, + "loss": 1.4869, + "step": 103 + }, + { + "epoch": 0.016349309280984103, + "grad_norm": 0.0726180374622345, + "learning_rate": 4.9993397190475396e-05, + "loss": 1.4629, + "step": 104 + }, + { + "epoch": 0.016506514177916643, + "grad_norm": 0.07542011886835098, + "learning_rate": 4.999325443757309e-05, + "loss": 1.5976, + "step": 105 + }, + { + "epoch": 0.016663719074849183, + "grad_norm": 0.07440067082643509, + "learning_rate": 4.9993110158177895e-05, + "loss": 1.5469, + "step": 106 + }, + { + "epoch": 0.016820923971781723, + "grad_norm": 0.07547372579574585, + "learning_rate": 4.999296435229863e-05, + "loss": 1.5328, + "step": 107 + }, + { + "epoch": 0.01697812886871426, + "grad_norm": 0.07532137632369995, + "learning_rate": 4.999281701994419e-05, + "loss": 1.6742, + "step": 108 + }, + { + "epoch": 0.0171353337656468, + "grad_norm": 0.07249438762664795, + "learning_rate": 4.999266816112358e-05, + "loss": 1.4799, + "step": 109 + }, + { + "epoch": 0.01729253866257934, + "grad_norm": 0.07399806380271912, + "learning_rate": 4.999251777584589e-05, + "loss": 1.5438, + "step": 110 + }, + { + "epoch": 0.017449743559511878, + "grad_norm": 0.08135057240724564, + "learning_rate": 4.99923658641203e-05, + "loss": 1.5608, + "step": 111 + }, + { + "epoch": 0.017606948456444418, + "grad_norm": 0.07508935779333115, + "learning_rate": 4.99922124259561e-05, + "loss": 1.5894, + "step": 112 + }, + { + "epoch": 0.017764153353376957, + "grad_norm": 0.07432372123003006, + "learning_rate": 4.999205746136265e-05, + "loss": 1.4818, + "step": 113 + }, + { + "epoch": 0.017921358250309497, + "grad_norm": 0.07694194465875626, + "learning_rate": 4.999190097034942e-05, + "loss": 1.5629, + "step": 114 + }, + { + "epoch": 0.018078563147242037, + "grad_norm": 0.07384433597326279, + "learning_rate": 4.999174295292597e-05, + "loss": 1.4829, + "step": 115 + }, + { + "epoch": 0.018235768044174577, + "grad_norm": 0.07152919471263885, + "learning_rate": 4.999158340910195e-05, + "loss": 1.4748, + "step": 116 + }, + { + "epoch": 0.018392972941107116, + "grad_norm": 0.07719701528549194, + "learning_rate": 4.999142233888709e-05, + "loss": 1.5524, + "step": 117 + }, + { + "epoch": 0.018550177838039656, + "grad_norm": 0.07540587335824966, + "learning_rate": 4.999125974229125e-05, + "loss": 1.4661, + "step": 118 + }, + { + "epoch": 0.018707382734972196, + "grad_norm": 0.0787581130862236, + "learning_rate": 4.9991095619324344e-05, + "loss": 1.6455, + "step": 119 + }, + { + "epoch": 0.018864587631904736, + "grad_norm": 0.07454577833414078, + "learning_rate": 4.999092996999641e-05, + "loss": 1.5083, + "step": 120 + }, + { + "epoch": 0.019021792528837272, + "grad_norm": 0.0751076266169548, + "learning_rate": 4.9990762794317545e-05, + "loss": 1.4874, + "step": 121 + }, + { + "epoch": 0.01917899742576981, + "grad_norm": 0.07733119279146194, + "learning_rate": 4.999059409229798e-05, + "loss": 1.6308, + "step": 122 + }, + { + "epoch": 0.01933620232270235, + "grad_norm": 0.07897089421749115, + "learning_rate": 4.999042386394802e-05, + "loss": 1.5906, + "step": 123 + }, + { + "epoch": 0.01949340721963489, + "grad_norm": 0.07758141309022903, + "learning_rate": 4.999025210927804e-05, + "loss": 1.5604, + "step": 124 + }, + { + "epoch": 0.01965061211656743, + "grad_norm": 0.07845707982778549, + "learning_rate": 4.9990078828298544e-05, + "loss": 1.5901, + "step": 125 + }, + { + "epoch": 0.01980781701349997, + "grad_norm": 0.0772818773984909, + "learning_rate": 4.998990402102012e-05, + "loss": 1.4516, + "step": 126 + }, + { + "epoch": 0.01996502191043251, + "grad_norm": 0.07795504480600357, + "learning_rate": 4.998972768745344e-05, + "loss": 1.4642, + "step": 127 + }, + { + "epoch": 0.02012222680736505, + "grad_norm": 0.0784008800983429, + "learning_rate": 4.998954982760926e-05, + "loss": 1.5936, + "step": 128 + }, + { + "epoch": 0.02027943170429759, + "grad_norm": 0.07791212201118469, + "learning_rate": 4.9989370441498465e-05, + "loss": 1.4705, + "step": 129 + }, + { + "epoch": 0.02043663660123013, + "grad_norm": 0.07785367220640182, + "learning_rate": 4.9989189529132004e-05, + "loss": 1.5085, + "step": 130 + }, + { + "epoch": 0.02059384149816267, + "grad_norm": 0.07916689664125443, + "learning_rate": 4.9989007090520925e-05, + "loss": 1.5365, + "step": 131 + }, + { + "epoch": 0.02075104639509521, + "grad_norm": 0.0775083601474762, + "learning_rate": 4.9988823125676367e-05, + "loss": 1.5286, + "step": 132 + }, + { + "epoch": 0.020908251292027745, + "grad_norm": 0.08110442757606506, + "learning_rate": 4.998863763460956e-05, + "loss": 1.5779, + "step": 133 + }, + { + "epoch": 0.021065456188960285, + "grad_norm": 0.0814640000462532, + "learning_rate": 4.998845061733185e-05, + "loss": 1.4778, + "step": 134 + }, + { + "epoch": 0.021222661085892824, + "grad_norm": 0.08069492131471634, + "learning_rate": 4.998826207385465e-05, + "loss": 1.5317, + "step": 135 + }, + { + "epoch": 0.021379865982825364, + "grad_norm": 0.07377774268388748, + "learning_rate": 4.998807200418948e-05, + "loss": 1.5258, + "step": 136 + }, + { + "epoch": 0.021537070879757904, + "grad_norm": 0.0787922590970993, + "learning_rate": 4.9987880408347945e-05, + "loss": 1.5185, + "step": 137 + }, + { + "epoch": 0.021694275776690444, + "grad_norm": 0.07662995159626007, + "learning_rate": 4.9987687286341745e-05, + "loss": 1.4637, + "step": 138 + }, + { + "epoch": 0.021851480673622983, + "grad_norm": 0.08528955280780792, + "learning_rate": 4.9987492638182676e-05, + "loss": 1.4776, + "step": 139 + }, + { + "epoch": 0.022008685570555523, + "grad_norm": 0.08089053630828857, + "learning_rate": 4.9987296463882626e-05, + "loss": 1.5885, + "step": 140 + }, + { + "epoch": 0.022165890467488063, + "grad_norm": 0.08029694855213165, + "learning_rate": 4.998709876345358e-05, + "loss": 1.4557, + "step": 141 + }, + { + "epoch": 0.022323095364420602, + "grad_norm": 0.07918502390384674, + "learning_rate": 4.9986899536907614e-05, + "loss": 1.4285, + "step": 142 + }, + { + "epoch": 0.022480300261353142, + "grad_norm": 0.0813126415014267, + "learning_rate": 4.998669878425689e-05, + "loss": 1.5958, + "step": 143 + }, + { + "epoch": 0.022637505158285682, + "grad_norm": 0.07935188710689545, + "learning_rate": 4.998649650551368e-05, + "loss": 1.5249, + "step": 144 + }, + { + "epoch": 0.02279471005521822, + "grad_norm": 0.08163304626941681, + "learning_rate": 4.9986292700690324e-05, + "loss": 1.483, + "step": 145 + }, + { + "epoch": 0.022951914952150758, + "grad_norm": 0.08277447521686554, + "learning_rate": 4.998608736979928e-05, + "loss": 1.6212, + "step": 146 + }, + { + "epoch": 0.023109119849083298, + "grad_norm": 0.08285827934741974, + "learning_rate": 4.9985880512853076e-05, + "loss": 1.4495, + "step": 147 + }, + { + "epoch": 0.023266324746015837, + "grad_norm": 0.082750603556633, + "learning_rate": 4.998567212986437e-05, + "loss": 1.4335, + "step": 148 + }, + { + "epoch": 0.023423529642948377, + "grad_norm": 0.07986058294773102, + "learning_rate": 4.998546222084587e-05, + "loss": 1.4704, + "step": 149 + }, + { + "epoch": 0.023580734539880917, + "grad_norm": 0.08105576783418655, + "learning_rate": 4.9985250785810396e-05, + "loss": 1.5183, + "step": 150 + }, + { + "epoch": 0.023737939436813457, + "grad_norm": 0.08202917128801346, + "learning_rate": 4.9985037824770866e-05, + "loss": 1.5423, + "step": 151 + }, + { + "epoch": 0.023895144333745996, + "grad_norm": 0.08937894552946091, + "learning_rate": 4.998482333774029e-05, + "loss": 1.5731, + "step": 152 + }, + { + "epoch": 0.024052349230678536, + "grad_norm": 0.08333728462457657, + "learning_rate": 4.9984607324731766e-05, + "loss": 1.5133, + "step": 153 + }, + { + "epoch": 0.024209554127611076, + "grad_norm": 0.08529175072908401, + "learning_rate": 4.998438978575849e-05, + "loss": 1.516, + "step": 154 + }, + { + "epoch": 0.024366759024543615, + "grad_norm": 0.08508963882923126, + "learning_rate": 4.998417072083374e-05, + "loss": 1.5646, + "step": 155 + }, + { + "epoch": 0.024523963921476155, + "grad_norm": 0.08971578627824783, + "learning_rate": 4.99839501299709e-05, + "loss": 1.4714, + "step": 156 + }, + { + "epoch": 0.024681168818408695, + "grad_norm": 0.08380109816789627, + "learning_rate": 4.998372801318345e-05, + "loss": 1.4476, + "step": 157 + }, + { + "epoch": 0.024838373715341235, + "grad_norm": 0.08533143252134323, + "learning_rate": 4.9983504370484945e-05, + "loss": 1.4866, + "step": 158 + }, + { + "epoch": 0.02499557861227377, + "grad_norm": 0.08318709582090378, + "learning_rate": 4.998327920188905e-05, + "loss": 1.5274, + "step": 159 + }, + { + "epoch": 0.02515278350920631, + "grad_norm": 0.08486370742321014, + "learning_rate": 4.9983052507409525e-05, + "loss": 1.4713, + "step": 160 + }, + { + "epoch": 0.02515278350920631, + "eval_loss": 1.5136528015136719, + "eval_runtime": 2318.8971, + "eval_samples_per_second": 3.992, + "eval_steps_per_second": 1.996, + "step": 160 + }, + { + "epoch": 0.02530998840613885, + "grad_norm": 0.08242359757423401, + "learning_rate": 4.9982824287060195e-05, + "loss": 1.5069, + "step": 161 + }, + { + "epoch": 0.02546719330307139, + "grad_norm": 0.08547423779964447, + "learning_rate": 4.9982594540855014e-05, + "loss": 1.4973, + "step": 162 + }, + { + "epoch": 0.02562439820000393, + "grad_norm": 0.08345580101013184, + "learning_rate": 4.9982363268808016e-05, + "loss": 1.5078, + "step": 163 + }, + { + "epoch": 0.02578160309693647, + "grad_norm": 0.0830339640378952, + "learning_rate": 4.9982130470933316e-05, + "loss": 1.4098, + "step": 164 + }, + { + "epoch": 0.02593880799386901, + "grad_norm": 0.08568515628576279, + "learning_rate": 4.998189614724514e-05, + "loss": 1.4628, + "step": 165 + }, + { + "epoch": 0.02609601289080155, + "grad_norm": 0.08261829614639282, + "learning_rate": 4.998166029775779e-05, + "loss": 1.4492, + "step": 166 + }, + { + "epoch": 0.02625321778773409, + "grad_norm": 0.08944887667894363, + "learning_rate": 4.998142292248569e-05, + "loss": 1.5633, + "step": 167 + }, + { + "epoch": 0.02641042268466663, + "grad_norm": 0.08632911741733551, + "learning_rate": 4.998118402144332e-05, + "loss": 1.5106, + "step": 168 + }, + { + "epoch": 0.026567627581599168, + "grad_norm": 0.08733859658241272, + "learning_rate": 4.998094359464528e-05, + "loss": 1.5607, + "step": 169 + }, + { + "epoch": 0.026724832478531708, + "grad_norm": 0.08667927235364914, + "learning_rate": 4.9980701642106245e-05, + "loss": 1.4544, + "step": 170 + }, + { + "epoch": 0.026882037375464244, + "grad_norm": 0.08655022084712982, + "learning_rate": 4.9980458163841006e-05, + "loss": 1.5264, + "step": 171 + }, + { + "epoch": 0.027039242272396784, + "grad_norm": 0.08899988234043121, + "learning_rate": 4.9980213159864426e-05, + "loss": 1.4778, + "step": 172 + }, + { + "epoch": 0.027196447169329323, + "grad_norm": 0.09411856532096863, + "learning_rate": 4.997996663019147e-05, + "loss": 1.5269, + "step": 173 + }, + { + "epoch": 0.027353652066261863, + "grad_norm": 0.087191641330719, + "learning_rate": 4.997971857483719e-05, + "loss": 1.5166, + "step": 174 + }, + { + "epoch": 0.027510856963194403, + "grad_norm": 0.08959636092185974, + "learning_rate": 4.997946899381675e-05, + "loss": 1.5503, + "step": 175 + }, + { + "epoch": 0.027668061860126943, + "grad_norm": 0.0951187014579773, + "learning_rate": 4.997921788714537e-05, + "loss": 1.4879, + "step": 176 + }, + { + "epoch": 0.027825266757059482, + "grad_norm": 0.09324768930673599, + "learning_rate": 4.997896525483841e-05, + "loss": 1.5714, + "step": 177 + }, + { + "epoch": 0.027982471653992022, + "grad_norm": 0.08633986115455627, + "learning_rate": 4.997871109691129e-05, + "loss": 1.4198, + "step": 178 + }, + { + "epoch": 0.028139676550924562, + "grad_norm": 0.08947525173425674, + "learning_rate": 4.9978455413379535e-05, + "loss": 1.4702, + "step": 179 + }, + { + "epoch": 0.0282968814478571, + "grad_norm": 0.09275490790605545, + "learning_rate": 4.9978198204258766e-05, + "loss": 1.5252, + "step": 180 + }, + { + "epoch": 0.02845408634478964, + "grad_norm": 0.08761609345674515, + "learning_rate": 4.9977939469564676e-05, + "loss": 1.505, + "step": 181 + }, + { + "epoch": 0.02861129124172218, + "grad_norm": 0.08683087676763535, + "learning_rate": 4.997767920931308e-05, + "loss": 1.5059, + "step": 182 + }, + { + "epoch": 0.02876849613865472, + "grad_norm": 0.08931361883878708, + "learning_rate": 4.997741742351988e-05, + "loss": 1.5003, + "step": 183 + }, + { + "epoch": 0.028925701035587257, + "grad_norm": 0.08820109069347382, + "learning_rate": 4.997715411220105e-05, + "loss": 1.5132, + "step": 184 + }, + { + "epoch": 0.029082905932519797, + "grad_norm": 0.09284964948892593, + "learning_rate": 4.997688927537268e-05, + "loss": 1.4561, + "step": 185 + }, + { + "epoch": 0.029240110829452336, + "grad_norm": 0.09472864121198654, + "learning_rate": 4.997662291305094e-05, + "loss": 1.4729, + "step": 186 + }, + { + "epoch": 0.029397315726384876, + "grad_norm": 0.08725330233573914, + "learning_rate": 4.997635502525211e-05, + "loss": 1.3994, + "step": 187 + }, + { + "epoch": 0.029554520623317416, + "grad_norm": 0.09085626900196075, + "learning_rate": 4.9976085611992536e-05, + "loss": 1.4695, + "step": 188 + }, + { + "epoch": 0.029711725520249956, + "grad_norm": 0.09322400391101837, + "learning_rate": 4.9975814673288684e-05, + "loss": 1.4753, + "step": 189 + }, + { + "epoch": 0.029868930417182495, + "grad_norm": 0.08927160501480103, + "learning_rate": 4.99755422091571e-05, + "loss": 1.4465, + "step": 190 + }, + { + "epoch": 0.030026135314115035, + "grad_norm": 0.09317070990800858, + "learning_rate": 4.997526821961442e-05, + "loss": 1.5124, + "step": 191 + }, + { + "epoch": 0.030183340211047575, + "grad_norm": 0.08911167085170746, + "learning_rate": 4.9974992704677385e-05, + "loss": 1.4515, + "step": 192 + }, + { + "epoch": 0.030340545107980114, + "grad_norm": 0.09432853013277054, + "learning_rate": 4.997471566436282e-05, + "loss": 1.4623, + "step": 193 + }, + { + "epoch": 0.030497750004912654, + "grad_norm": 0.09417332708835602, + "learning_rate": 4.997443709868764e-05, + "loss": 1.5103, + "step": 194 + }, + { + "epoch": 0.030654954901845194, + "grad_norm": 0.09564542025327682, + "learning_rate": 4.997415700766887e-05, + "loss": 1.4929, + "step": 195 + }, + { + "epoch": 0.030812159798777734, + "grad_norm": 0.09101004898548126, + "learning_rate": 4.997387539132361e-05, + "loss": 1.4225, + "step": 196 + }, + { + "epoch": 0.03096936469571027, + "grad_norm": 0.09196274727582932, + "learning_rate": 4.997359224966906e-05, + "loss": 1.4701, + "step": 197 + }, + { + "epoch": 0.03112656959264281, + "grad_norm": 0.09573279321193695, + "learning_rate": 4.997330758272251e-05, + "loss": 1.4425, + "step": 198 + }, + { + "epoch": 0.03128377448957535, + "grad_norm": 0.09180758893489838, + "learning_rate": 4.9973021390501354e-05, + "loss": 1.4426, + "step": 199 + }, + { + "epoch": 0.03144097938650789, + "grad_norm": 0.09583238512277603, + "learning_rate": 4.997273367302306e-05, + "loss": 1.5158, + "step": 200 + }, + { + "epoch": 0.03159818428344043, + "grad_norm": 0.09394747018814087, + "learning_rate": 4.997244443030521e-05, + "loss": 1.4306, + "step": 201 + }, + { + "epoch": 0.03175538918037297, + "grad_norm": 0.09470199793577194, + "learning_rate": 4.9972153662365474e-05, + "loss": 1.5286, + "step": 202 + }, + { + "epoch": 0.031912594077305505, + "grad_norm": 0.09274959564208984, + "learning_rate": 4.997186136922161e-05, + "loss": 1.4803, + "step": 203 + }, + { + "epoch": 0.03206979897423805, + "grad_norm": 0.09344369918107986, + "learning_rate": 4.997156755089145e-05, + "loss": 1.5449, + "step": 204 + }, + { + "epoch": 0.032227003871170584, + "grad_norm": 0.09794919937849045, + "learning_rate": 4.997127220739296e-05, + "loss": 1.4383, + "step": 205 + }, + { + "epoch": 0.03238420876810313, + "grad_norm": 0.09698093682527542, + "learning_rate": 4.997097533874418e-05, + "loss": 1.4462, + "step": 206 + }, + { + "epoch": 0.032541413665035664, + "grad_norm": 0.09690559655427933, + "learning_rate": 4.997067694496323e-05, + "loss": 1.4735, + "step": 207 + }, + { + "epoch": 0.03269861856196821, + "grad_norm": 0.09657544642686844, + "learning_rate": 4.9970377026068336e-05, + "loss": 1.5672, + "step": 208 + }, + { + "epoch": 0.03285582345890074, + "grad_norm": 0.09483659267425537, + "learning_rate": 4.9970075582077825e-05, + "loss": 1.4931, + "step": 209 + }, + { + "epoch": 0.033013028355833286, + "grad_norm": 0.09744243323802948, + "learning_rate": 4.9969772613010104e-05, + "loss": 1.4638, + "step": 210 + }, + { + "epoch": 0.03317023325276582, + "grad_norm": 0.09521006047725677, + "learning_rate": 4.9969468118883665e-05, + "loss": 1.4127, + "step": 211 + }, + { + "epoch": 0.033327438149698366, + "grad_norm": 0.09646004438400269, + "learning_rate": 4.996916209971713e-05, + "loss": 1.5139, + "step": 212 + }, + { + "epoch": 0.0334846430466309, + "grad_norm": 0.09292810410261154, + "learning_rate": 4.996885455552916e-05, + "loss": 1.4399, + "step": 213 + }, + { + "epoch": 0.033641847943563445, + "grad_norm": 0.09986516088247299, + "learning_rate": 4.996854548633857e-05, + "loss": 1.4637, + "step": 214 + }, + { + "epoch": 0.03379905284049598, + "grad_norm": 0.09723702073097229, + "learning_rate": 4.996823489216421e-05, + "loss": 1.5673, + "step": 215 + }, + { + "epoch": 0.03395625773742852, + "grad_norm": 0.09608977288007736, + "learning_rate": 4.996792277302507e-05, + "loss": 1.4428, + "step": 216 + }, + { + "epoch": 0.03411346263436106, + "grad_norm": 0.09329380095005035, + "learning_rate": 4.99676091289402e-05, + "loss": 1.3892, + "step": 217 + }, + { + "epoch": 0.0342706675312936, + "grad_norm": 0.0959913358092308, + "learning_rate": 4.996729395992875e-05, + "loss": 1.5219, + "step": 218 + }, + { + "epoch": 0.03442787242822614, + "grad_norm": 0.09832671284675598, + "learning_rate": 4.996697726600999e-05, + "loss": 1.5259, + "step": 219 + }, + { + "epoch": 0.03458507732515868, + "grad_norm": 0.10061636567115784, + "learning_rate": 4.996665904720325e-05, + "loss": 1.5216, + "step": 220 + }, + { + "epoch": 0.03474228222209122, + "grad_norm": 0.09742400050163269, + "learning_rate": 4.9966339303527965e-05, + "loss": 1.3819, + "step": 221 + }, + { + "epoch": 0.034899487119023756, + "grad_norm": 0.09629969298839569, + "learning_rate": 4.996601803500367e-05, + "loss": 1.5341, + "step": 222 + }, + { + "epoch": 0.0350566920159563, + "grad_norm": 0.09776200354099274, + "learning_rate": 4.996569524164998e-05, + "loss": 1.5054, + "step": 223 + }, + { + "epoch": 0.035213896912888835, + "grad_norm": 0.1008530780673027, + "learning_rate": 4.996537092348661e-05, + "loss": 1.5333, + "step": 224 + }, + { + "epoch": 0.03537110180982138, + "grad_norm": 0.09749735891819, + "learning_rate": 4.996504508053338e-05, + "loss": 1.3899, + "step": 225 + }, + { + "epoch": 0.035528306706753915, + "grad_norm": 0.10522401332855225, + "learning_rate": 4.9964717712810175e-05, + "loss": 1.5413, + "step": 226 + }, + { + "epoch": 0.03568551160368646, + "grad_norm": 0.09566272795200348, + "learning_rate": 4.9964388820336996e-05, + "loss": 1.435, + "step": 227 + }, + { + "epoch": 0.035842716500618994, + "grad_norm": 0.10133984684944153, + "learning_rate": 4.996405840313393e-05, + "loss": 1.445, + "step": 228 + }, + { + "epoch": 0.03599992139755153, + "grad_norm": 0.09702739119529724, + "learning_rate": 4.996372646122116e-05, + "loss": 1.4287, + "step": 229 + }, + { + "epoch": 0.036157126294484074, + "grad_norm": 0.1012992411851883, + "learning_rate": 4.996339299461896e-05, + "loss": 1.382, + "step": 230 + }, + { + "epoch": 0.03631433119141661, + "grad_norm": 0.09877166152000427, + "learning_rate": 4.99630580033477e-05, + "loss": 1.5729, + "step": 231 + }, + { + "epoch": 0.03647153608834915, + "grad_norm": 0.1033129170536995, + "learning_rate": 4.996272148742783e-05, + "loss": 1.4754, + "step": 232 + }, + { + "epoch": 0.03662874098528169, + "grad_norm": 0.09901215881109238, + "learning_rate": 4.9962383446879914e-05, + "loss": 1.5153, + "step": 233 + }, + { + "epoch": 0.03678594588221423, + "grad_norm": 0.10241983830928802, + "learning_rate": 4.996204388172458e-05, + "loss": 1.5131, + "step": 234 + }, + { + "epoch": 0.03694315077914677, + "grad_norm": 0.09574593603610992, + "learning_rate": 4.9961702791982594e-05, + "loss": 1.5285, + "step": 235 + }, + { + "epoch": 0.03710035567607931, + "grad_norm": 0.10309838503599167, + "learning_rate": 4.996136017767477e-05, + "loss": 1.5751, + "step": 236 + }, + { + "epoch": 0.03725756057301185, + "grad_norm": 0.09928470849990845, + "learning_rate": 4.996101603882204e-05, + "loss": 1.5108, + "step": 237 + }, + { + "epoch": 0.03741476546994439, + "grad_norm": 0.10514767467975616, + "learning_rate": 4.996067037544542e-05, + "loss": 1.4206, + "step": 238 + }, + { + "epoch": 0.03757197036687693, + "grad_norm": 0.10411518812179565, + "learning_rate": 4.996032318756601e-05, + "loss": 1.5628, + "step": 239 + }, + { + "epoch": 0.03772917526380947, + "grad_norm": 0.0989808738231659, + "learning_rate": 4.9959974475205045e-05, + "loss": 1.4444, + "step": 240 + }, + { + "epoch": 0.03788638016074201, + "grad_norm": 0.10069911926984787, + "learning_rate": 4.9959624238383804e-05, + "loss": 1.4805, + "step": 241 + }, + { + "epoch": 0.038043585057674544, + "grad_norm": 0.10637518763542175, + "learning_rate": 4.995927247712367e-05, + "loss": 1.5289, + "step": 242 + }, + { + "epoch": 0.03820078995460709, + "grad_norm": 0.10085684061050415, + "learning_rate": 4.995891919144614e-05, + "loss": 1.5288, + "step": 243 + }, + { + "epoch": 0.03835799485153962, + "grad_norm": 0.09989017248153687, + "learning_rate": 4.995856438137279e-05, + "loss": 1.5444, + "step": 244 + }, + { + "epoch": 0.038515199748472166, + "grad_norm": 0.10382463037967682, + "learning_rate": 4.9958208046925294e-05, + "loss": 1.4621, + "step": 245 + }, + { + "epoch": 0.0386724046454047, + "grad_norm": 0.10208063572645187, + "learning_rate": 4.99578501881254e-05, + "loss": 1.5003, + "step": 246 + }, + { + "epoch": 0.038829609542337246, + "grad_norm": 0.1028011366724968, + "learning_rate": 4.9957490804994977e-05, + "loss": 1.516, + "step": 247 + }, + { + "epoch": 0.03898681443926978, + "grad_norm": 0.10475701838731766, + "learning_rate": 4.995712989755598e-05, + "loss": 1.5333, + "step": 248 + }, + { + "epoch": 0.039144019336202325, + "grad_norm": 0.1038154736161232, + "learning_rate": 4.995676746583044e-05, + "loss": 1.4779, + "step": 249 + }, + { + "epoch": 0.03930122423313486, + "grad_norm": 0.10413440316915512, + "learning_rate": 4.99564035098405e-05, + "loss": 1.5241, + "step": 250 + }, + { + "epoch": 0.039458429130067404, + "grad_norm": 0.09869382530450821, + "learning_rate": 4.995603802960838e-05, + "loss": 1.442, + "step": 251 + }, + { + "epoch": 0.03961563402699994, + "grad_norm": 0.10138234496116638, + "learning_rate": 4.995567102515641e-05, + "loss": 1.5393, + "step": 252 + }, + { + "epoch": 0.039772838923932484, + "grad_norm": 0.10225867480039597, + "learning_rate": 4.995530249650701e-05, + "loss": 1.4516, + "step": 253 + }, + { + "epoch": 0.03993004382086502, + "grad_norm": 0.09942895174026489, + "learning_rate": 4.995493244368268e-05, + "loss": 1.4543, + "step": 254 + }, + { + "epoch": 0.040087248717797556, + "grad_norm": 0.11218860745429993, + "learning_rate": 4.995456086670602e-05, + "loss": 1.4985, + "step": 255 + }, + { + "epoch": 0.0402444536147301, + "grad_norm": 0.10839337855577469, + "learning_rate": 4.9954187765599736e-05, + "loss": 1.4805, + "step": 256 + }, + { + "epoch": 0.040401658511662636, + "grad_norm": 0.10317599028348923, + "learning_rate": 4.9953813140386595e-05, + "loss": 1.4412, + "step": 257 + }, + { + "epoch": 0.04055886340859518, + "grad_norm": 0.10285656154155731, + "learning_rate": 4.99534369910895e-05, + "loss": 1.476, + "step": 258 + }, + { + "epoch": 0.040716068305527715, + "grad_norm": 0.10330680012702942, + "learning_rate": 4.995305931773141e-05, + "loss": 1.5157, + "step": 259 + }, + { + "epoch": 0.04087327320246026, + "grad_norm": 0.1086694598197937, + "learning_rate": 4.99526801203354e-05, + "loss": 1.4999, + "step": 260 + }, + { + "epoch": 0.041030478099392795, + "grad_norm": 0.10800144821405411, + "learning_rate": 4.995229939892464e-05, + "loss": 1.4764, + "step": 261 + }, + { + "epoch": 0.04118768299632534, + "grad_norm": 0.10645303875207901, + "learning_rate": 4.9951917153522355e-05, + "loss": 1.4404, + "step": 262 + }, + { + "epoch": 0.041344887893257874, + "grad_norm": 0.10440964996814728, + "learning_rate": 4.9951533384151906e-05, + "loss": 1.3678, + "step": 263 + }, + { + "epoch": 0.04150209279019042, + "grad_norm": 0.10993078351020813, + "learning_rate": 4.995114809083673e-05, + "loss": 1.5064, + "step": 264 + }, + { + "epoch": 0.041659297687122954, + "grad_norm": 0.10710245370864868, + "learning_rate": 4.9950761273600366e-05, + "loss": 1.4134, + "step": 265 + }, + { + "epoch": 0.04181650258405549, + "grad_norm": 0.11030582338571548, + "learning_rate": 4.995037293246644e-05, + "loss": 1.5299, + "step": 266 + }, + { + "epoch": 0.04197370748098803, + "grad_norm": 0.1058267131447792, + "learning_rate": 4.994998306745866e-05, + "loss": 1.3654, + "step": 267 + }, + { + "epoch": 0.04213091237792057, + "grad_norm": 0.10541702806949615, + "learning_rate": 4.994959167860084e-05, + "loss": 1.4297, + "step": 268 + }, + { + "epoch": 0.04228811727485311, + "grad_norm": 0.11085420846939087, + "learning_rate": 4.994919876591689e-05, + "loss": 1.4876, + "step": 269 + }, + { + "epoch": 0.04244532217178565, + "grad_norm": 0.11054470390081406, + "learning_rate": 4.994880432943081e-05, + "loss": 1.574, + "step": 270 + }, + { + "epoch": 0.04260252706871819, + "grad_norm": 0.11234510689973831, + "learning_rate": 4.994840836916668e-05, + "loss": 1.5079, + "step": 271 + }, + { + "epoch": 0.04275973196565073, + "grad_norm": 0.11040106415748596, + "learning_rate": 4.994801088514869e-05, + "loss": 1.5091, + "step": 272 + }, + { + "epoch": 0.04291693686258327, + "grad_norm": 0.10639887303113937, + "learning_rate": 4.994761187740111e-05, + "loss": 1.4495, + "step": 273 + }, + { + "epoch": 0.04307414175951581, + "grad_norm": 0.11268071085214615, + "learning_rate": 4.994721134594833e-05, + "loss": 1.5057, + "step": 274 + }, + { + "epoch": 0.04323134665644835, + "grad_norm": 0.10079260170459747, + "learning_rate": 4.994680929081479e-05, + "loss": 1.4145, + "step": 275 + }, + { + "epoch": 0.04338855155338089, + "grad_norm": 0.11474710702896118, + "learning_rate": 4.994640571202506e-05, + "loss": 1.5061, + "step": 276 + }, + { + "epoch": 0.04354575645031343, + "grad_norm": 0.10946876555681229, + "learning_rate": 4.994600060960377e-05, + "loss": 1.5306, + "step": 277 + }, + { + "epoch": 0.04370296134724597, + "grad_norm": 0.11192137002944946, + "learning_rate": 4.994559398357569e-05, + "loss": 1.5347, + "step": 278 + }, + { + "epoch": 0.0438601662441785, + "grad_norm": 0.10744784027338028, + "learning_rate": 4.994518583396564e-05, + "loss": 1.4686, + "step": 279 + }, + { + "epoch": 0.044017371141111046, + "grad_norm": 0.11113352328538895, + "learning_rate": 4.9944776160798544e-05, + "loss": 1.4101, + "step": 280 + }, + { + "epoch": 0.04417457603804358, + "grad_norm": 0.11456230282783508, + "learning_rate": 4.994436496409943e-05, + "loss": 1.4036, + "step": 281 + }, + { + "epoch": 0.044331780934976125, + "grad_norm": 0.11608672887086868, + "learning_rate": 4.994395224389342e-05, + "loss": 1.4949, + "step": 282 + }, + { + "epoch": 0.04448898583190866, + "grad_norm": 0.1232326403260231, + "learning_rate": 4.9943538000205705e-05, + "loss": 1.5501, + "step": 283 + }, + { + "epoch": 0.044646190728841205, + "grad_norm": 0.11791515350341797, + "learning_rate": 4.994312223306159e-05, + "loss": 1.4542, + "step": 284 + }, + { + "epoch": 0.04480339562577374, + "grad_norm": 0.11657550930976868, + "learning_rate": 4.9942704942486476e-05, + "loss": 1.4724, + "step": 285 + }, + { + "epoch": 0.044960600522706284, + "grad_norm": 0.11560262739658356, + "learning_rate": 4.994228612850584e-05, + "loss": 1.4036, + "step": 286 + }, + { + "epoch": 0.04511780541963882, + "grad_norm": 0.10999175906181335, + "learning_rate": 4.994186579114527e-05, + "loss": 1.4489, + "step": 287 + }, + { + "epoch": 0.045275010316571364, + "grad_norm": 0.11586826294660568, + "learning_rate": 4.9941443930430436e-05, + "loss": 1.5486, + "step": 288 + }, + { + "epoch": 0.0454322152135039, + "grad_norm": 0.11349951475858688, + "learning_rate": 4.994102054638711e-05, + "loss": 1.5698, + "step": 289 + }, + { + "epoch": 0.04558942011043644, + "grad_norm": 0.11978698521852493, + "learning_rate": 4.9940595639041134e-05, + "loss": 1.3933, + "step": 290 + }, + { + "epoch": 0.04574662500736898, + "grad_norm": 0.11438622325658798, + "learning_rate": 4.994016920841846e-05, + "loss": 1.5005, + "step": 291 + }, + { + "epoch": 0.045903829904301516, + "grad_norm": 0.11395915597677231, + "learning_rate": 4.9939741254545155e-05, + "loss": 1.4521, + "step": 292 + }, + { + "epoch": 0.04606103480123406, + "grad_norm": 0.11659599095582962, + "learning_rate": 4.993931177744734e-05, + "loss": 1.5166, + "step": 293 + }, + { + "epoch": 0.046218239698166595, + "grad_norm": 0.11053171753883362, + "learning_rate": 4.9938880777151254e-05, + "loss": 1.4459, + "step": 294 + }, + { + "epoch": 0.04637544459509914, + "grad_norm": 0.11428084224462509, + "learning_rate": 4.993844825368321e-05, + "loss": 1.4448, + "step": 295 + }, + { + "epoch": 0.046532649492031675, + "grad_norm": 0.10734150558710098, + "learning_rate": 4.993801420706964e-05, + "loss": 1.3388, + "step": 296 + }, + { + "epoch": 0.04668985438896422, + "grad_norm": 0.11137369275093079, + "learning_rate": 4.993757863733703e-05, + "loss": 1.4155, + "step": 297 + }, + { + "epoch": 0.046847059285896754, + "grad_norm": 0.1221408098936081, + "learning_rate": 4.993714154451202e-05, + "loss": 1.4884, + "step": 298 + }, + { + "epoch": 0.0470042641828293, + "grad_norm": 0.11707969009876251, + "learning_rate": 4.993670292862127e-05, + "loss": 1.4605, + "step": 299 + }, + { + "epoch": 0.047161469079761834, + "grad_norm": 0.11751751601696014, + "learning_rate": 4.993626278969158e-05, + "loss": 1.5538, + "step": 300 + }, + { + "epoch": 0.04731867397669438, + "grad_norm": 0.11617731302976608, + "learning_rate": 4.993582112774984e-05, + "loss": 1.438, + "step": 301 + }, + { + "epoch": 0.04747587887362691, + "grad_norm": 0.15164637565612793, + "learning_rate": 4.993537794282302e-05, + "loss": 1.4607, + "step": 302 + }, + { + "epoch": 0.047633083770559456, + "grad_norm": 0.12434446811676025, + "learning_rate": 4.9934933234938193e-05, + "loss": 1.4167, + "step": 303 + }, + { + "epoch": 0.04779028866749199, + "grad_norm": 0.12518739700317383, + "learning_rate": 4.993448700412251e-05, + "loss": 1.4003, + "step": 304 + }, + { + "epoch": 0.04794749356442453, + "grad_norm": 0.11146944761276245, + "learning_rate": 4.993403925040323e-05, + "loss": 1.3913, + "step": 305 + }, + { + "epoch": 0.04810469846135707, + "grad_norm": 0.11682326346635818, + "learning_rate": 4.993358997380771e-05, + "loss": 1.3415, + "step": 306 + }, + { + "epoch": 0.04826190335828961, + "grad_norm": 0.1197504773736, + "learning_rate": 4.993313917436336e-05, + "loss": 1.515, + "step": 307 + }, + { + "epoch": 0.04841910825522215, + "grad_norm": 0.14647473394870758, + "learning_rate": 4.993268685209775e-05, + "loss": 1.4529, + "step": 308 + }, + { + "epoch": 0.04857631315215469, + "grad_norm": 0.12431525439023972, + "learning_rate": 4.9932233007038484e-05, + "loss": 1.5426, + "step": 309 + }, + { + "epoch": 0.04873351804908723, + "grad_norm": 0.11715538799762726, + "learning_rate": 4.9931777639213284e-05, + "loss": 1.4615, + "step": 310 + }, + { + "epoch": 0.04889072294601977, + "grad_norm": 0.12391876429319382, + "learning_rate": 4.993132074864997e-05, + "loss": 1.4138, + "step": 311 + }, + { + "epoch": 0.04904792784295231, + "grad_norm": 0.11894181370735168, + "learning_rate": 4.9930862335376444e-05, + "loss": 1.4383, + "step": 312 + }, + { + "epoch": 0.049205132739884846, + "grad_norm": 0.1225295439362526, + "learning_rate": 4.9930402399420695e-05, + "loss": 1.3847, + "step": 313 + }, + { + "epoch": 0.04936233763681739, + "grad_norm": 0.11435995995998383, + "learning_rate": 4.9929940940810825e-05, + "loss": 1.4254, + "step": 314 + }, + { + "epoch": 0.049519542533749926, + "grad_norm": 0.11988761276006699, + "learning_rate": 4.9929477959575024e-05, + "loss": 1.4787, + "step": 315 + }, + { + "epoch": 0.04967674743068247, + "grad_norm": 0.11983373016119003, + "learning_rate": 4.992901345574155e-05, + "loss": 1.4341, + "step": 316 + }, + { + "epoch": 0.049833952327615005, + "grad_norm": 0.13395054638385773, + "learning_rate": 4.992854742933878e-05, + "loss": 1.4315, + "step": 317 + }, + { + "epoch": 0.04999115722454754, + "grad_norm": 0.12578143179416656, + "learning_rate": 4.9928079880395186e-05, + "loss": 1.4143, + "step": 318 + }, + { + "epoch": 0.050148362121480085, + "grad_norm": 0.1401878446340561, + "learning_rate": 4.992761080893932e-05, + "loss": 1.4665, + "step": 319 + }, + { + "epoch": 0.05030556701841262, + "grad_norm": 0.13048145174980164, + "learning_rate": 4.9927140214999826e-05, + "loss": 1.4266, + "step": 320 + }, + { + "epoch": 0.05030556701841262, + "eval_loss": 1.450086236000061, + "eval_runtime": 2316.1877, + "eval_samples_per_second": 3.997, + "eval_steps_per_second": 1.999, + "step": 320 + }, + { + "epoch": 0.050462771915345164, + "grad_norm": 0.13121232390403748, + "learning_rate": 4.992666809860545e-05, + "loss": 1.4946, + "step": 321 + }, + { + "epoch": 0.0506199768122777, + "grad_norm": 0.13547195494174957, + "learning_rate": 4.9926194459785015e-05, + "loss": 1.5532, + "step": 322 + }, + { + "epoch": 0.050777181709210244, + "grad_norm": 0.11797169595956802, + "learning_rate": 4.992571929856747e-05, + "loss": 1.4118, + "step": 323 + }, + { + "epoch": 0.05093438660614278, + "grad_norm": 0.12734922766685486, + "learning_rate": 4.992524261498183e-05, + "loss": 1.4427, + "step": 324 + }, + { + "epoch": 0.05109159150307532, + "grad_norm": 0.12444902211427689, + "learning_rate": 4.99247644090572e-05, + "loss": 1.4369, + "step": 325 + }, + { + "epoch": 0.05124879640000786, + "grad_norm": 0.12244518846273422, + "learning_rate": 4.99242846808228e-05, + "loss": 1.4587, + "step": 326 + }, + { + "epoch": 0.0514060012969404, + "grad_norm": 0.12424397468566895, + "learning_rate": 4.9923803430307916e-05, + "loss": 1.3949, + "step": 327 + }, + { + "epoch": 0.05156320619387294, + "grad_norm": 0.1352718621492386, + "learning_rate": 4.9923320657541944e-05, + "loss": 1.504, + "step": 328 + }, + { + "epoch": 0.05172041109080548, + "grad_norm": 0.12855666875839233, + "learning_rate": 4.992283636255438e-05, + "loss": 1.4271, + "step": 329 + }, + { + "epoch": 0.05187761598773802, + "grad_norm": 0.129829540848732, + "learning_rate": 4.99223505453748e-05, + "loss": 1.455, + "step": 330 + }, + { + "epoch": 0.052034820884670555, + "grad_norm": 0.12780050933361053, + "learning_rate": 4.992186320603286e-05, + "loss": 1.4045, + "step": 331 + }, + { + "epoch": 0.0521920257816031, + "grad_norm": 0.13515712320804596, + "learning_rate": 4.992137434455834e-05, + "loss": 1.4335, + "step": 332 + }, + { + "epoch": 0.052349230678535634, + "grad_norm": 0.15026766061782837, + "learning_rate": 4.99208839609811e-05, + "loss": 1.5386, + "step": 333 + }, + { + "epoch": 0.05250643557546818, + "grad_norm": 0.13422101736068726, + "learning_rate": 4.992039205533108e-05, + "loss": 1.454, + "step": 334 + }, + { + "epoch": 0.05266364047240071, + "grad_norm": 0.13735777139663696, + "learning_rate": 4.991989862763833e-05, + "loss": 1.4415, + "step": 335 + }, + { + "epoch": 0.05282084536933326, + "grad_norm": 0.12985137104988098, + "learning_rate": 4.9919403677932994e-05, + "loss": 1.385, + "step": 336 + }, + { + "epoch": 0.05297805026626579, + "grad_norm": 0.1301167607307434, + "learning_rate": 4.9918907206245285e-05, + "loss": 1.4364, + "step": 337 + }, + { + "epoch": 0.053135255163198336, + "grad_norm": 0.1407599002122879, + "learning_rate": 4.991840921260553e-05, + "loss": 1.4454, + "step": 338 + }, + { + "epoch": 0.05329246006013087, + "grad_norm": 0.12763133645057678, + "learning_rate": 4.9917909697044164e-05, + "loss": 1.4008, + "step": 339 + }, + { + "epoch": 0.053449664957063416, + "grad_norm": 0.1443052589893341, + "learning_rate": 4.991740865959167e-05, + "loss": 1.5184, + "step": 340 + }, + { + "epoch": 0.05360686985399595, + "grad_norm": 0.13496418297290802, + "learning_rate": 4.991690610027866e-05, + "loss": 1.3888, + "step": 341 + }, + { + "epoch": 0.05376407475092849, + "grad_norm": 0.12681293487548828, + "learning_rate": 4.991640201913583e-05, + "loss": 1.42, + "step": 342 + }, + { + "epoch": 0.05392127964786103, + "grad_norm": 0.13178062438964844, + "learning_rate": 4.9915896416193965e-05, + "loss": 1.4178, + "step": 343 + }, + { + "epoch": 0.05407848454479357, + "grad_norm": 0.14452503621578217, + "learning_rate": 4.991538929148394e-05, + "loss": 1.4248, + "step": 344 + }, + { + "epoch": 0.05423568944172611, + "grad_norm": 0.1352955400943756, + "learning_rate": 4.991488064503674e-05, + "loss": 1.4304, + "step": 345 + }, + { + "epoch": 0.05439289433865865, + "grad_norm": 0.14846469461917877, + "learning_rate": 4.991437047688343e-05, + "loss": 1.4784, + "step": 346 + }, + { + "epoch": 0.05455009923559119, + "grad_norm": 0.12475849688053131, + "learning_rate": 4.9913858787055156e-05, + "loss": 1.4131, + "step": 347 + }, + { + "epoch": 0.054707304132523726, + "grad_norm": 0.13835409283638, + "learning_rate": 4.991334557558318e-05, + "loss": 1.4913, + "step": 348 + }, + { + "epoch": 0.05486450902945627, + "grad_norm": 0.13921529054641724, + "learning_rate": 4.991283084249885e-05, + "loss": 1.3713, + "step": 349 + }, + { + "epoch": 0.055021713926388806, + "grad_norm": 0.13188250362873077, + "learning_rate": 4.9912314587833586e-05, + "loss": 1.3608, + "step": 350 + }, + { + "epoch": 0.05517891882332135, + "grad_norm": 0.12457428872585297, + "learning_rate": 4.991179681161895e-05, + "loss": 1.4427, + "step": 351 + }, + { + "epoch": 0.055336123720253885, + "grad_norm": 0.12452542781829834, + "learning_rate": 4.9911277513886535e-05, + "loss": 1.4179, + "step": 352 + }, + { + "epoch": 0.05549332861718643, + "grad_norm": 0.14799195528030396, + "learning_rate": 4.9910756694668074e-05, + "loss": 1.4532, + "step": 353 + }, + { + "epoch": 0.055650533514118965, + "grad_norm": 0.13485541939735413, + "learning_rate": 4.991023435399538e-05, + "loss": 1.4114, + "step": 354 + }, + { + "epoch": 0.0558077384110515, + "grad_norm": 0.1422443389892578, + "learning_rate": 4.990971049190034e-05, + "loss": 1.377, + "step": 355 + }, + { + "epoch": 0.055964943307984044, + "grad_norm": 0.12994804978370667, + "learning_rate": 4.990918510841496e-05, + "loss": 1.4474, + "step": 356 + }, + { + "epoch": 0.05612214820491658, + "grad_norm": 0.1429785192012787, + "learning_rate": 4.990865820357133e-05, + "loss": 1.4435, + "step": 357 + }, + { + "epoch": 0.056279353101849124, + "grad_norm": 0.12979790568351746, + "learning_rate": 4.9908129777401625e-05, + "loss": 1.4039, + "step": 358 + }, + { + "epoch": 0.05643655799878166, + "grad_norm": 0.1332644671201706, + "learning_rate": 4.990759982993812e-05, + "loss": 1.4377, + "step": 359 + }, + { + "epoch": 0.0565937628957142, + "grad_norm": 0.13796579837799072, + "learning_rate": 4.99070683612132e-05, + "loss": 1.3951, + "step": 360 + }, + { + "epoch": 0.05675096779264674, + "grad_norm": 0.14315246045589447, + "learning_rate": 4.9906535371259294e-05, + "loss": 1.4042, + "step": 361 + }, + { + "epoch": 0.05690817268957928, + "grad_norm": 0.1463768631219864, + "learning_rate": 4.9906000860108974e-05, + "loss": 1.461, + "step": 362 + }, + { + "epoch": 0.05706537758651182, + "grad_norm": 0.14041170477867126, + "learning_rate": 4.9905464827794884e-05, + "loss": 1.4147, + "step": 363 + }, + { + "epoch": 0.05722258248344436, + "grad_norm": 0.19242697954177856, + "learning_rate": 4.990492727434976e-05, + "loss": 1.3435, + "step": 364 + }, + { + "epoch": 0.0573797873803769, + "grad_norm": 0.1556611955165863, + "learning_rate": 4.990438819980644e-05, + "loss": 1.4075, + "step": 365 + }, + { + "epoch": 0.05753699227730944, + "grad_norm": 0.13157570362091064, + "learning_rate": 4.990384760419784e-05, + "loss": 1.3334, + "step": 366 + }, + { + "epoch": 0.05769419717424198, + "grad_norm": 0.17953743040561676, + "learning_rate": 4.990330548755698e-05, + "loss": 1.4609, + "step": 367 + }, + { + "epoch": 0.057851402071174514, + "grad_norm": 0.14179491996765137, + "learning_rate": 4.990276184991697e-05, + "loss": 1.4344, + "step": 368 + }, + { + "epoch": 0.05800860696810706, + "grad_norm": 0.16522593796253204, + "learning_rate": 4.9902216691311024e-05, + "loss": 1.3794, + "step": 369 + }, + { + "epoch": 0.05816581186503959, + "grad_norm": 0.12736016511917114, + "learning_rate": 4.9901670011772425e-05, + "loss": 1.4167, + "step": 370 + }, + { + "epoch": 0.058323016761972137, + "grad_norm": 0.15869787335395813, + "learning_rate": 4.990112181133456e-05, + "loss": 1.4293, + "step": 371 + }, + { + "epoch": 0.05848022165890467, + "grad_norm": 0.14410504698753357, + "learning_rate": 4.990057209003093e-05, + "loss": 1.4357, + "step": 372 + }, + { + "epoch": 0.058637426555837216, + "grad_norm": 0.1567080020904541, + "learning_rate": 4.9900020847895086e-05, + "loss": 1.4146, + "step": 373 + }, + { + "epoch": 0.05879463145276975, + "grad_norm": 0.1430107057094574, + "learning_rate": 4.989946808496071e-05, + "loss": 1.3415, + "step": 374 + }, + { + "epoch": 0.058951836349702295, + "grad_norm": 0.146332785487175, + "learning_rate": 4.989891380126156e-05, + "loss": 1.496, + "step": 375 + }, + { + "epoch": 0.05910904124663483, + "grad_norm": 0.13674487173557281, + "learning_rate": 4.989835799683149e-05, + "loss": 1.3611, + "step": 376 + }, + { + "epoch": 0.059266246143567375, + "grad_norm": 0.1321984827518463, + "learning_rate": 4.989780067170444e-05, + "loss": 1.4695, + "step": 377 + }, + { + "epoch": 0.05942345104049991, + "grad_norm": 0.1535942554473877, + "learning_rate": 4.9897241825914464e-05, + "loss": 1.3564, + "step": 378 + }, + { + "epoch": 0.059580655937432454, + "grad_norm": 0.1538037806749344, + "learning_rate": 4.989668145949568e-05, + "loss": 1.3502, + "step": 379 + }, + { + "epoch": 0.05973786083436499, + "grad_norm": 0.15744829177856445, + "learning_rate": 4.989611957248232e-05, + "loss": 1.4318, + "step": 380 + }, + { + "epoch": 0.05989506573129753, + "grad_norm": 0.17178332805633545, + "learning_rate": 4.98955561649087e-05, + "loss": 1.4643, + "step": 381 + }, + { + "epoch": 0.06005227062823007, + "grad_norm": 0.15913072228431702, + "learning_rate": 4.989499123680923e-05, + "loss": 1.487, + "step": 382 + }, + { + "epoch": 0.060209475525162606, + "grad_norm": 0.15134060382843018, + "learning_rate": 4.9894424788218415e-05, + "loss": 1.4705, + "step": 383 + }, + { + "epoch": 0.06036668042209515, + "grad_norm": 0.13704389333724976, + "learning_rate": 4.989385681917085e-05, + "loss": 1.4756, + "step": 384 + }, + { + "epoch": 0.060523885319027686, + "grad_norm": 0.14025503396987915, + "learning_rate": 4.989328732970122e-05, + "loss": 1.443, + "step": 385 + }, + { + "epoch": 0.06068109021596023, + "grad_norm": 0.1822325438261032, + "learning_rate": 4.9892716319844325e-05, + "loss": 1.3996, + "step": 386 + }, + { + "epoch": 0.060838295112892765, + "grad_norm": 0.15639656782150269, + "learning_rate": 4.989214378963502e-05, + "loss": 1.3656, + "step": 387 + }, + { + "epoch": 0.06099550000982531, + "grad_norm": 0.15097728371620178, + "learning_rate": 4.989156973910828e-05, + "loss": 1.4055, + "step": 388 + }, + { + "epoch": 0.061152704906757845, + "grad_norm": 0.18977142870426178, + "learning_rate": 4.989099416829917e-05, + "loss": 1.4472, + "step": 389 + }, + { + "epoch": 0.06130990980369039, + "grad_norm": 0.1596304178237915, + "learning_rate": 4.989041707724284e-05, + "loss": 1.4373, + "step": 390 + }, + { + "epoch": 0.061467114700622924, + "grad_norm": 0.171820729970932, + "learning_rate": 4.988983846597454e-05, + "loss": 1.468, + "step": 391 + }, + { + "epoch": 0.06162431959755547, + "grad_norm": 0.14266176521778107, + "learning_rate": 4.98892583345296e-05, + "loss": 1.4037, + "step": 392 + }, + { + "epoch": 0.061781524494488003, + "grad_norm": 0.13375528156757355, + "learning_rate": 4.988867668294346e-05, + "loss": 1.437, + "step": 393 + }, + { + "epoch": 0.06193872939142054, + "grad_norm": 0.13332228362560272, + "learning_rate": 4.988809351125165e-05, + "loss": 1.3892, + "step": 394 + }, + { + "epoch": 0.06209593428835308, + "grad_norm": 0.17180980741977692, + "learning_rate": 4.988750881948977e-05, + "loss": 1.3494, + "step": 395 + }, + { + "epoch": 0.06225313918528562, + "grad_norm": 0.1419111043214798, + "learning_rate": 4.988692260769355e-05, + "loss": 1.3748, + "step": 396 + }, + { + "epoch": 0.06241034408221816, + "grad_norm": 0.17256620526313782, + "learning_rate": 4.9886334875898776e-05, + "loss": 1.3549, + "step": 397 + }, + { + "epoch": 0.0625675489791507, + "grad_norm": 0.2243422418832779, + "learning_rate": 4.988574562414137e-05, + "loss": 1.4465, + "step": 398 + }, + { + "epoch": 0.06272475387608324, + "grad_norm": 0.15700723230838776, + "learning_rate": 4.9885154852457294e-05, + "loss": 1.4477, + "step": 399 + }, + { + "epoch": 0.06288195877301578, + "grad_norm": 0.14497259259223938, + "learning_rate": 4.988456256088264e-05, + "loss": 1.3861, + "step": 400 + }, + { + "epoch": 0.06303916366994831, + "grad_norm": 0.14747034013271332, + "learning_rate": 4.988396874945359e-05, + "loss": 1.4206, + "step": 401 + }, + { + "epoch": 0.06319636856688086, + "grad_norm": 0.17671054601669312, + "learning_rate": 4.98833734182064e-05, + "loss": 1.2475, + "step": 402 + }, + { + "epoch": 0.0633535734638134, + "grad_norm": 0.16974316537380219, + "learning_rate": 4.9882776567177446e-05, + "loss": 1.4955, + "step": 403 + }, + { + "epoch": 0.06351077836074594, + "grad_norm": 0.15419775247573853, + "learning_rate": 4.988217819640317e-05, + "loss": 1.4209, + "step": 404 + }, + { + "epoch": 0.06366798325767847, + "grad_norm": 0.13987664878368378, + "learning_rate": 4.988157830592012e-05, + "loss": 1.456, + "step": 405 + }, + { + "epoch": 0.06382518815461101, + "grad_norm": 0.24560455977916718, + "learning_rate": 4.988097689576493e-05, + "loss": 1.3567, + "step": 406 + }, + { + "epoch": 0.06398239305154356, + "grad_norm": 0.13870076835155487, + "learning_rate": 4.9880373965974334e-05, + "loss": 1.3752, + "step": 407 + }, + { + "epoch": 0.0641395979484761, + "grad_norm": 0.16167718172073364, + "learning_rate": 4.987976951658517e-05, + "loss": 1.4766, + "step": 408 + }, + { + "epoch": 0.06429680284540863, + "grad_norm": 0.1700398474931717, + "learning_rate": 4.9879163547634346e-05, + "loss": 1.427, + "step": 409 + }, + { + "epoch": 0.06445400774234117, + "grad_norm": 0.15502458810806274, + "learning_rate": 4.987855605915887e-05, + "loss": 1.3965, + "step": 410 + }, + { + "epoch": 0.06461121263927372, + "grad_norm": 0.14834032952785492, + "learning_rate": 4.987794705119584e-05, + "loss": 1.4399, + "step": 411 + }, + { + "epoch": 0.06476841753620625, + "grad_norm": 0.22443649172782898, + "learning_rate": 4.987733652378246e-05, + "loss": 1.3736, + "step": 412 + }, + { + "epoch": 0.06492562243313879, + "grad_norm": 0.14396560192108154, + "learning_rate": 4.9876724476956015e-05, + "loss": 1.4648, + "step": 413 + }, + { + "epoch": 0.06508282733007133, + "grad_norm": 0.15352006256580353, + "learning_rate": 4.987611091075389e-05, + "loss": 1.4988, + "step": 414 + }, + { + "epoch": 0.06524003222700388, + "grad_norm": 0.13210074603557587, + "learning_rate": 4.987549582521356e-05, + "loss": 1.3705, + "step": 415 + }, + { + "epoch": 0.06539723712393641, + "grad_norm": 0.16056782007217407, + "learning_rate": 4.98748792203726e-05, + "loss": 1.3388, + "step": 416 + }, + { + "epoch": 0.06555444202086895, + "grad_norm": 0.18992343544960022, + "learning_rate": 4.9874261096268647e-05, + "loss": 1.3842, + "step": 417 + }, + { + "epoch": 0.06571164691780149, + "grad_norm": 0.1789916455745697, + "learning_rate": 4.9873641452939466e-05, + "loss": 1.3622, + "step": 418 + }, + { + "epoch": 0.06586885181473402, + "grad_norm": 0.21043789386749268, + "learning_rate": 4.9873020290422915e-05, + "loss": 1.3477, + "step": 419 + }, + { + "epoch": 0.06602605671166657, + "grad_norm": 0.15355254709720612, + "learning_rate": 4.987239760875691e-05, + "loss": 1.3643, + "step": 420 + }, + { + "epoch": 0.06618326160859911, + "grad_norm": 0.1433190107345581, + "learning_rate": 4.9871773407979496e-05, + "loss": 1.3753, + "step": 421 + }, + { + "epoch": 0.06634046650553165, + "grad_norm": 0.17479249835014343, + "learning_rate": 4.987114768812879e-05, + "loss": 1.3809, + "step": 422 + }, + { + "epoch": 0.06649767140246418, + "grad_norm": 0.186944842338562, + "learning_rate": 4.987052044924302e-05, + "loss": 1.3616, + "step": 423 + }, + { + "epoch": 0.06665487629939673, + "grad_norm": 0.15202952921390533, + "learning_rate": 4.986989169136048e-05, + "loss": 1.4479, + "step": 424 + }, + { + "epoch": 0.06681208119632927, + "grad_norm": 0.16295532882213593, + "learning_rate": 4.9869261414519575e-05, + "loss": 1.3713, + "step": 425 + }, + { + "epoch": 0.0669692860932618, + "grad_norm": 0.19577625393867493, + "learning_rate": 4.986862961875881e-05, + "loss": 1.4199, + "step": 426 + }, + { + "epoch": 0.06712649099019434, + "grad_norm": 0.22768542170524597, + "learning_rate": 4.986799630411677e-05, + "loss": 1.3529, + "step": 427 + }, + { + "epoch": 0.06728369588712689, + "grad_norm": 0.25184011459350586, + "learning_rate": 4.986736147063212e-05, + "loss": 1.3944, + "step": 428 + }, + { + "epoch": 0.06744090078405943, + "grad_norm": 0.15565118193626404, + "learning_rate": 4.986672511834366e-05, + "loss": 1.4505, + "step": 429 + }, + { + "epoch": 0.06759810568099196, + "grad_norm": 0.16559922695159912, + "learning_rate": 4.986608724729024e-05, + "loss": 1.3742, + "step": 430 + }, + { + "epoch": 0.0677553105779245, + "grad_norm": 0.14826242625713348, + "learning_rate": 4.986544785751081e-05, + "loss": 1.4008, + "step": 431 + }, + { + "epoch": 0.06791251547485704, + "grad_norm": 0.16543184220790863, + "learning_rate": 4.986480694904444e-05, + "loss": 1.3433, + "step": 432 + }, + { + "epoch": 0.06806972037178959, + "grad_norm": 0.15332931280136108, + "learning_rate": 4.986416452193027e-05, + "loss": 1.4459, + "step": 433 + }, + { + "epoch": 0.06822692526872212, + "grad_norm": 0.18880733847618103, + "learning_rate": 4.986352057620752e-05, + "loss": 1.3902, + "step": 434 + }, + { + "epoch": 0.06838413016565466, + "grad_norm": 0.1513829231262207, + "learning_rate": 4.986287511191554e-05, + "loss": 1.3485, + "step": 435 + }, + { + "epoch": 0.0685413350625872, + "grad_norm": 0.15241704881191254, + "learning_rate": 4.9862228129093745e-05, + "loss": 1.3051, + "step": 436 + }, + { + "epoch": 0.06869853995951974, + "grad_norm": 0.1956702321767807, + "learning_rate": 4.986157962778165e-05, + "loss": 1.4647, + "step": 437 + }, + { + "epoch": 0.06885574485645228, + "grad_norm": 0.2027936428785324, + "learning_rate": 4.9860929608018866e-05, + "loss": 1.3602, + "step": 438 + }, + { + "epoch": 0.06901294975338482, + "grad_norm": 0.1623186320066452, + "learning_rate": 4.986027806984509e-05, + "loss": 1.4154, + "step": 439 + }, + { + "epoch": 0.06917015465031735, + "grad_norm": 0.16111283004283905, + "learning_rate": 4.985962501330011e-05, + "loss": 1.4311, + "step": 440 + }, + { + "epoch": 0.0693273595472499, + "grad_norm": 0.16754299402236938, + "learning_rate": 4.985897043842382e-05, + "loss": 1.349, + "step": 441 + }, + { + "epoch": 0.06948456444418244, + "grad_norm": 0.1766330897808075, + "learning_rate": 4.985831434525621e-05, + "loss": 1.3714, + "step": 442 + }, + { + "epoch": 0.06964176934111498, + "grad_norm": 0.1742810308933258, + "learning_rate": 4.985765673383733e-05, + "loss": 1.4161, + "step": 443 + }, + { + "epoch": 0.06979897423804751, + "grad_norm": 0.17025281488895416, + "learning_rate": 4.985699760420736e-05, + "loss": 1.3925, + "step": 444 + }, + { + "epoch": 0.06995617913498005, + "grad_norm": 0.19201375544071198, + "learning_rate": 4.985633695640655e-05, + "loss": 1.4158, + "step": 445 + }, + { + "epoch": 0.0701133840319126, + "grad_norm": 0.1636267751455307, + "learning_rate": 4.985567479047524e-05, + "loss": 1.4071, + "step": 446 + }, + { + "epoch": 0.07027058892884513, + "grad_norm": 0.19676333665847778, + "learning_rate": 4.9855011106453894e-05, + "loss": 1.3449, + "step": 447 + }, + { + "epoch": 0.07042779382577767, + "grad_norm": 0.17712907493114471, + "learning_rate": 4.985434590438303e-05, + "loss": 1.3421, + "step": 448 + }, + { + "epoch": 0.07058499872271021, + "grad_norm": 0.18515101075172424, + "learning_rate": 4.985367918430329e-05, + "loss": 1.4051, + "step": 449 + }, + { + "epoch": 0.07074220361964276, + "grad_norm": 0.17168915271759033, + "learning_rate": 4.985301094625538e-05, + "loss": 1.3093, + "step": 450 + }, + { + "epoch": 0.0708994085165753, + "grad_norm": 0.1891397386789322, + "learning_rate": 4.9852341190280127e-05, + "loss": 1.3075, + "step": 451 + }, + { + "epoch": 0.07105661341350783, + "grad_norm": 0.17731457948684692, + "learning_rate": 4.985166991641843e-05, + "loss": 1.3986, + "step": 452 + }, + { + "epoch": 0.07121381831044037, + "grad_norm": 0.18817296624183655, + "learning_rate": 4.985099712471129e-05, + "loss": 1.3531, + "step": 453 + }, + { + "epoch": 0.07137102320737292, + "grad_norm": 0.1782791018486023, + "learning_rate": 4.9850322815199795e-05, + "loss": 1.4064, + "step": 454 + }, + { + "epoch": 0.07152822810430545, + "grad_norm": 0.18053874373435974, + "learning_rate": 4.984964698792514e-05, + "loss": 1.4607, + "step": 455 + }, + { + "epoch": 0.07168543300123799, + "grad_norm": 0.286338746547699, + "learning_rate": 4.984896964292858e-05, + "loss": 1.3036, + "step": 456 + }, + { + "epoch": 0.07184263789817052, + "grad_norm": 0.2560707926750183, + "learning_rate": 4.98482907802515e-05, + "loss": 1.3428, + "step": 457 + }, + { + "epoch": 0.07199984279510306, + "grad_norm": 0.19296897947788239, + "learning_rate": 4.984761039993537e-05, + "loss": 1.3502, + "step": 458 + }, + { + "epoch": 0.07215704769203561, + "grad_norm": 0.19685949385166168, + "learning_rate": 4.9846928502021725e-05, + "loss": 1.4015, + "step": 459 + }, + { + "epoch": 0.07231425258896815, + "grad_norm": 0.1548481583595276, + "learning_rate": 4.984624508655223e-05, + "loss": 1.3698, + "step": 460 + }, + { + "epoch": 0.07247145748590068, + "grad_norm": 0.16076034307479858, + "learning_rate": 4.984556015356862e-05, + "loss": 1.3627, + "step": 461 + }, + { + "epoch": 0.07262866238283322, + "grad_norm": 0.18571603298187256, + "learning_rate": 4.9844873703112726e-05, + "loss": 1.3506, + "step": 462 + }, + { + "epoch": 0.07278586727976577, + "grad_norm": 0.1540035605430603, + "learning_rate": 4.984418573522648e-05, + "loss": 1.4483, + "step": 463 + }, + { + "epoch": 0.0729430721766983, + "grad_norm": 0.1730145364999771, + "learning_rate": 4.984349624995188e-05, + "loss": 1.3678, + "step": 464 + }, + { + "epoch": 0.07310027707363084, + "grad_norm": 0.26254212856292725, + "learning_rate": 4.984280524733107e-05, + "loss": 1.401, + "step": 465 + }, + { + "epoch": 0.07325748197056338, + "grad_norm": 0.2079063057899475, + "learning_rate": 4.984211272740623e-05, + "loss": 1.3655, + "step": 466 + }, + { + "epoch": 0.07341468686749593, + "grad_norm": 0.21711499989032745, + "learning_rate": 4.9841418690219653e-05, + "loss": 1.4011, + "step": 467 + }, + { + "epoch": 0.07357189176442847, + "grad_norm": 0.18226252496242523, + "learning_rate": 4.984072313581375e-05, + "loss": 1.4213, + "step": 468 + }, + { + "epoch": 0.073729096661361, + "grad_norm": 0.1463780552148819, + "learning_rate": 4.9840026064230984e-05, + "loss": 1.4519, + "step": 469 + }, + { + "epoch": 0.07388630155829354, + "grad_norm": 0.18232892453670502, + "learning_rate": 4.983932747551394e-05, + "loss": 1.3657, + "step": 470 + }, + { + "epoch": 0.07404350645522607, + "grad_norm": 0.19644559919834137, + "learning_rate": 4.9838627369705285e-05, + "loss": 1.3988, + "step": 471 + }, + { + "epoch": 0.07420071135215862, + "grad_norm": 0.16292576491832733, + "learning_rate": 4.983792574684776e-05, + "loss": 1.4369, + "step": 472 + }, + { + "epoch": 0.07435791624909116, + "grad_norm": 0.2244543433189392, + "learning_rate": 4.983722260698425e-05, + "loss": 1.4269, + "step": 473 + }, + { + "epoch": 0.0745151211460237, + "grad_norm": 0.2582489848136902, + "learning_rate": 4.9836517950157666e-05, + "loss": 1.3986, + "step": 474 + }, + { + "epoch": 0.07467232604295623, + "grad_norm": 0.15564194321632385, + "learning_rate": 4.983581177641108e-05, + "loss": 1.3871, + "step": 475 + }, + { + "epoch": 0.07482953093988878, + "grad_norm": 0.2301008552312851, + "learning_rate": 4.9835104085787596e-05, + "loss": 1.3572, + "step": 476 + }, + { + "epoch": 0.07498673583682132, + "grad_norm": 0.21603424847126007, + "learning_rate": 4.9834394878330444e-05, + "loss": 1.3803, + "step": 477 + }, + { + "epoch": 0.07514394073375386, + "grad_norm": 0.16744717955589294, + "learning_rate": 4.9833684154082937e-05, + "loss": 1.4233, + "step": 478 + }, + { + "epoch": 0.07530114563068639, + "grad_norm": 0.23016415536403656, + "learning_rate": 4.98329719130885e-05, + "loss": 1.3962, + "step": 479 + }, + { + "epoch": 0.07545835052761894, + "grad_norm": 0.19687114655971527, + "learning_rate": 4.983225815539061e-05, + "loss": 1.3667, + "step": 480 + }, + { + "epoch": 0.07545835052761894, + "eval_loss": 1.3748993873596191, + "eval_runtime": 2315.5952, + "eval_samples_per_second": 3.998, + "eval_steps_per_second": 1.999, + "step": 480 + }, + { + "epoch": 0.07561555542455148, + "grad_norm": 0.1833205670118332, + "learning_rate": 4.9831542881032884e-05, + "loss": 1.4365, + "step": 481 + }, + { + "epoch": 0.07577276032148401, + "grad_norm": 0.17124423384666443, + "learning_rate": 4.983082609005899e-05, + "loss": 1.3641, + "step": 482 + }, + { + "epoch": 0.07592996521841655, + "grad_norm": 0.17352670431137085, + "learning_rate": 4.9830107782512715e-05, + "loss": 1.3415, + "step": 483 + }, + { + "epoch": 0.07608717011534909, + "grad_norm": 0.20768220722675323, + "learning_rate": 4.982938795843793e-05, + "loss": 1.3261, + "step": 484 + }, + { + "epoch": 0.07624437501228164, + "grad_norm": 0.21459853649139404, + "learning_rate": 4.982866661787859e-05, + "loss": 1.4185, + "step": 485 + }, + { + "epoch": 0.07640157990921417, + "grad_norm": 0.26912233233451843, + "learning_rate": 4.982794376087877e-05, + "loss": 1.3941, + "step": 486 + }, + { + "epoch": 0.07655878480614671, + "grad_norm": 0.28497114777565, + "learning_rate": 4.982721938748261e-05, + "loss": 1.3201, + "step": 487 + }, + { + "epoch": 0.07671598970307925, + "grad_norm": 0.15378472208976746, + "learning_rate": 4.982649349773435e-05, + "loss": 1.3615, + "step": 488 + }, + { + "epoch": 0.0768731946000118, + "grad_norm": 0.16169893741607666, + "learning_rate": 4.982576609167831e-05, + "loss": 1.3342, + "step": 489 + }, + { + "epoch": 0.07703039949694433, + "grad_norm": 0.24693650007247925, + "learning_rate": 4.982503716935896e-05, + "loss": 1.3788, + "step": 490 + }, + { + "epoch": 0.07718760439387687, + "grad_norm": 0.1769181787967682, + "learning_rate": 4.982430673082077e-05, + "loss": 1.3664, + "step": 491 + }, + { + "epoch": 0.0773448092908094, + "grad_norm": 0.26325106620788574, + "learning_rate": 4.982357477610839e-05, + "loss": 1.3173, + "step": 492 + }, + { + "epoch": 0.07750201418774195, + "grad_norm": 0.2063319832086563, + "learning_rate": 4.9822841305266506e-05, + "loss": 1.4125, + "step": 493 + }, + { + "epoch": 0.07765921908467449, + "grad_norm": 0.29141879081726074, + "learning_rate": 4.982210631833992e-05, + "loss": 1.3596, + "step": 494 + }, + { + "epoch": 0.07781642398160703, + "grad_norm": 0.18967591226100922, + "learning_rate": 4.982136981537352e-05, + "loss": 1.4128, + "step": 495 + }, + { + "epoch": 0.07797362887853956, + "grad_norm": 0.2291795313358307, + "learning_rate": 4.9820631796412287e-05, + "loss": 1.3772, + "step": 496 + }, + { + "epoch": 0.0781308337754721, + "grad_norm": 0.200834721326828, + "learning_rate": 4.98198922615013e-05, + "loss": 1.369, + "step": 497 + }, + { + "epoch": 0.07828803867240465, + "grad_norm": 0.22960609197616577, + "learning_rate": 4.9819151210685736e-05, + "loss": 1.3979, + "step": 498 + }, + { + "epoch": 0.07844524356933719, + "grad_norm": 0.17247427999973297, + "learning_rate": 4.981840864401084e-05, + "loss": 1.3927, + "step": 499 + }, + { + "epoch": 0.07860244846626972, + "grad_norm": 0.2623608112335205, + "learning_rate": 4.981766456152198e-05, + "loss": 1.3919, + "step": 500 + }, + { + "epoch": 0.07875965336320226, + "grad_norm": 0.19911788403987885, + "learning_rate": 4.981691896326459e-05, + "loss": 1.3925, + "step": 501 + }, + { + "epoch": 0.07891685826013481, + "grad_norm": 0.24869734048843384, + "learning_rate": 4.9816171849284205e-05, + "loss": 1.3562, + "step": 502 + }, + { + "epoch": 0.07907406315706735, + "grad_norm": 0.31372350454330444, + "learning_rate": 4.981542321962647e-05, + "loss": 1.3211, + "step": 503 + }, + { + "epoch": 0.07923126805399988, + "grad_norm": 0.21760910749435425, + "learning_rate": 4.981467307433709e-05, + "loss": 1.3042, + "step": 504 + }, + { + "epoch": 0.07938847295093242, + "grad_norm": 0.2469843477010727, + "learning_rate": 4.9813921413461906e-05, + "loss": 1.2831, + "step": 505 + }, + { + "epoch": 0.07954567784786497, + "grad_norm": 0.24319148063659668, + "learning_rate": 4.981316823704681e-05, + "loss": 1.2703, + "step": 506 + }, + { + "epoch": 0.0797028827447975, + "grad_norm": 0.19718031585216522, + "learning_rate": 4.98124135451378e-05, + "loss": 1.3258, + "step": 507 + }, + { + "epoch": 0.07986008764173004, + "grad_norm": 0.17459236085414886, + "learning_rate": 4.981165733778098e-05, + "loss": 1.4248, + "step": 508 + }, + { + "epoch": 0.08001729253866258, + "grad_norm": 0.17684616148471832, + "learning_rate": 4.981089961502253e-05, + "loss": 1.3939, + "step": 509 + }, + { + "epoch": 0.08017449743559511, + "grad_norm": 0.17499729990959167, + "learning_rate": 4.981014037690874e-05, + "loss": 1.4156, + "step": 510 + }, + { + "epoch": 0.08033170233252766, + "grad_norm": 0.1901170015335083, + "learning_rate": 4.9809379623485964e-05, + "loss": 1.4209, + "step": 511 + }, + { + "epoch": 0.0804889072294602, + "grad_norm": 0.18230682611465454, + "learning_rate": 4.980861735480067e-05, + "loss": 1.4607, + "step": 512 + }, + { + "epoch": 0.08064611212639274, + "grad_norm": 0.22843636572360992, + "learning_rate": 4.9807853570899427e-05, + "loss": 1.3671, + "step": 513 + }, + { + "epoch": 0.08080331702332527, + "grad_norm": 0.2288489192724228, + "learning_rate": 4.980708827182887e-05, + "loss": 1.3657, + "step": 514 + }, + { + "epoch": 0.08096052192025782, + "grad_norm": 0.19647593796253204, + "learning_rate": 4.980632145763575e-05, + "loss": 1.4079, + "step": 515 + }, + { + "epoch": 0.08111772681719036, + "grad_norm": 0.20980435609817505, + "learning_rate": 4.98055531283669e-05, + "loss": 1.3746, + "step": 516 + }, + { + "epoch": 0.0812749317141229, + "grad_norm": 0.19381123781204224, + "learning_rate": 4.980478328406923e-05, + "loss": 1.3986, + "step": 517 + }, + { + "epoch": 0.08143213661105543, + "grad_norm": 0.2224361151456833, + "learning_rate": 4.980401192478979e-05, + "loss": 1.3082, + "step": 518 + }, + { + "epoch": 0.08158934150798797, + "grad_norm": 0.20567384362220764, + "learning_rate": 4.9803239050575664e-05, + "loss": 1.4417, + "step": 519 + }, + { + "epoch": 0.08174654640492052, + "grad_norm": 0.22890503704547882, + "learning_rate": 4.9802464661474074e-05, + "loss": 1.3034, + "step": 520 + }, + { + "epoch": 0.08190375130185305, + "grad_norm": 0.23220910131931305, + "learning_rate": 4.9801688757532304e-05, + "loss": 1.3705, + "step": 521 + }, + { + "epoch": 0.08206095619878559, + "grad_norm": 0.29084959626197815, + "learning_rate": 4.980091133879775e-05, + "loss": 1.3246, + "step": 522 + }, + { + "epoch": 0.08221816109571813, + "grad_norm": 0.15776456892490387, + "learning_rate": 4.9800132405317895e-05, + "loss": 1.4311, + "step": 523 + }, + { + "epoch": 0.08237536599265068, + "grad_norm": 0.2636071443557739, + "learning_rate": 4.9799351957140314e-05, + "loss": 1.3265, + "step": 524 + }, + { + "epoch": 0.08253257088958321, + "grad_norm": 0.20042134821414948, + "learning_rate": 4.979856999431266e-05, + "loss": 1.3257, + "step": 525 + }, + { + "epoch": 0.08268977578651575, + "grad_norm": 0.24039289355278015, + "learning_rate": 4.9797786516882714e-05, + "loss": 1.3999, + "step": 526 + }, + { + "epoch": 0.08284698068344828, + "grad_norm": 0.16932524740695953, + "learning_rate": 4.9797001524898315e-05, + "loss": 1.4113, + "step": 527 + }, + { + "epoch": 0.08300418558038083, + "grad_norm": 0.2101370096206665, + "learning_rate": 4.97962150184074e-05, + "loss": 1.3973, + "step": 528 + }, + { + "epoch": 0.08316139047731337, + "grad_norm": 0.20983585715293884, + "learning_rate": 4.979542699745803e-05, + "loss": 1.3255, + "step": 529 + }, + { + "epoch": 0.08331859537424591, + "grad_norm": 0.20477800071239471, + "learning_rate": 4.97946374620983e-05, + "loss": 1.4349, + "step": 530 + }, + { + "epoch": 0.08347580027117844, + "grad_norm": 0.22637289762496948, + "learning_rate": 4.979384641237647e-05, + "loss": 1.3263, + "step": 531 + }, + { + "epoch": 0.08363300516811098, + "grad_norm": 0.20332221686840057, + "learning_rate": 4.9793053848340835e-05, + "loss": 1.3411, + "step": 532 + }, + { + "epoch": 0.08379021006504353, + "grad_norm": 0.22744616866111755, + "learning_rate": 4.979225977003979e-05, + "loss": 1.4042, + "step": 533 + }, + { + "epoch": 0.08394741496197607, + "grad_norm": 0.20091576874256134, + "learning_rate": 4.979146417752185e-05, + "loss": 1.3218, + "step": 534 + }, + { + "epoch": 0.0841046198589086, + "grad_norm": 0.2225920408964157, + "learning_rate": 4.9790667070835604e-05, + "loss": 1.4223, + "step": 535 + }, + { + "epoch": 0.08426182475584114, + "grad_norm": 0.20447570085525513, + "learning_rate": 4.9789868450029745e-05, + "loss": 1.3884, + "step": 536 + }, + { + "epoch": 0.08441902965277369, + "grad_norm": 0.22765719890594482, + "learning_rate": 4.9789068315153035e-05, + "loss": 1.3575, + "step": 537 + }, + { + "epoch": 0.08457623454970623, + "grad_norm": 0.18886259198188782, + "learning_rate": 4.9788266666254343e-05, + "loss": 1.2737, + "step": 538 + }, + { + "epoch": 0.08473343944663876, + "grad_norm": 0.26551586389541626, + "learning_rate": 4.978746350338264e-05, + "loss": 1.3867, + "step": 539 + }, + { + "epoch": 0.0848906443435713, + "grad_norm": 0.29268744587898254, + "learning_rate": 4.9786658826586975e-05, + "loss": 1.4266, + "step": 540 + }, + { + "epoch": 0.08504784924050385, + "grad_norm": 0.2537211775779724, + "learning_rate": 4.97858526359165e-05, + "loss": 1.3402, + "step": 541 + }, + { + "epoch": 0.08520505413743638, + "grad_norm": 0.20287925004959106, + "learning_rate": 4.978504493142045e-05, + "loss": 1.3148, + "step": 542 + }, + { + "epoch": 0.08536225903436892, + "grad_norm": 0.18584851920604706, + "learning_rate": 4.978423571314814e-05, + "loss": 1.3293, + "step": 543 + }, + { + "epoch": 0.08551946393130146, + "grad_norm": 0.1944153755903244, + "learning_rate": 4.978342498114903e-05, + "loss": 1.4084, + "step": 544 + }, + { + "epoch": 0.08567666882823399, + "grad_norm": 0.18139739334583282, + "learning_rate": 4.978261273547261e-05, + "loss": 1.2734, + "step": 545 + }, + { + "epoch": 0.08583387372516654, + "grad_norm": 0.20824116468429565, + "learning_rate": 4.97817989761685e-05, + "loss": 1.3346, + "step": 546 + }, + { + "epoch": 0.08599107862209908, + "grad_norm": 0.16180047392845154, + "learning_rate": 4.978098370328639e-05, + "loss": 1.4547, + "step": 547 + }, + { + "epoch": 0.08614828351903162, + "grad_norm": 0.17156392335891724, + "learning_rate": 4.978016691687609e-05, + "loss": 1.366, + "step": 548 + }, + { + "epoch": 0.08630548841596415, + "grad_norm": 0.17913401126861572, + "learning_rate": 4.977934861698746e-05, + "loss": 1.2771, + "step": 549 + }, + { + "epoch": 0.0864626933128967, + "grad_norm": 0.17393502593040466, + "learning_rate": 4.977852880367051e-05, + "loss": 1.3061, + "step": 550 + }, + { + "epoch": 0.08661989820982924, + "grad_norm": 0.21741637587547302, + "learning_rate": 4.97777074769753e-05, + "loss": 1.3232, + "step": 551 + }, + { + "epoch": 0.08677710310676177, + "grad_norm": 0.26123344898223877, + "learning_rate": 4.977688463695198e-05, + "loss": 1.2678, + "step": 552 + }, + { + "epoch": 0.08693430800369431, + "grad_norm": 0.2508600354194641, + "learning_rate": 4.9776060283650826e-05, + "loss": 1.4543, + "step": 553 + }, + { + "epoch": 0.08709151290062686, + "grad_norm": 0.18527132272720337, + "learning_rate": 4.977523441712217e-05, + "loss": 1.3359, + "step": 554 + }, + { + "epoch": 0.0872487177975594, + "grad_norm": 0.24495406448841095, + "learning_rate": 4.977440703741646e-05, + "loss": 1.2892, + "step": 555 + }, + { + "epoch": 0.08740592269449193, + "grad_norm": 0.22759339213371277, + "learning_rate": 4.9773578144584235e-05, + "loss": 1.2212, + "step": 556 + }, + { + "epoch": 0.08756312759142447, + "grad_norm": 0.1627693474292755, + "learning_rate": 4.977274773867611e-05, + "loss": 1.3461, + "step": 557 + }, + { + "epoch": 0.087720332488357, + "grad_norm": 0.2068985551595688, + "learning_rate": 4.9771915819742804e-05, + "loss": 1.3348, + "step": 558 + }, + { + "epoch": 0.08787753738528956, + "grad_norm": 0.19731195271015167, + "learning_rate": 4.9771082387835135e-05, + "loss": 1.3727, + "step": 559 + }, + { + "epoch": 0.08803474228222209, + "grad_norm": 0.26571184396743774, + "learning_rate": 4.977024744300399e-05, + "loss": 1.3911, + "step": 560 + }, + { + "epoch": 0.08819194717915463, + "grad_norm": 0.23141519725322723, + "learning_rate": 4.976941098530039e-05, + "loss": 1.3978, + "step": 561 + }, + { + "epoch": 0.08834915207608716, + "grad_norm": 0.2507224380970001, + "learning_rate": 4.97685730147754e-05, + "loss": 1.3017, + "step": 562 + }, + { + "epoch": 0.08850635697301971, + "grad_norm": 0.2453109323978424, + "learning_rate": 4.976773353148022e-05, + "loss": 1.2977, + "step": 563 + }, + { + "epoch": 0.08866356186995225, + "grad_norm": 0.2600953280925751, + "learning_rate": 4.9766892535466105e-05, + "loss": 1.4015, + "step": 564 + }, + { + "epoch": 0.08882076676688479, + "grad_norm": 0.19863371551036835, + "learning_rate": 4.9766050026784416e-05, + "loss": 1.3593, + "step": 565 + }, + { + "epoch": 0.08897797166381732, + "grad_norm": 0.2115338146686554, + "learning_rate": 4.976520600548663e-05, + "loss": 1.2928, + "step": 566 + }, + { + "epoch": 0.08913517656074987, + "grad_norm": 0.18994684517383575, + "learning_rate": 4.976436047162429e-05, + "loss": 1.3506, + "step": 567 + }, + { + "epoch": 0.08929238145768241, + "grad_norm": 0.22891771793365479, + "learning_rate": 4.976351342524903e-05, + "loss": 1.4449, + "step": 568 + }, + { + "epoch": 0.08944958635461495, + "grad_norm": 0.19313135743141174, + "learning_rate": 4.976266486641259e-05, + "loss": 1.2916, + "step": 569 + }, + { + "epoch": 0.08960679125154748, + "grad_norm": 0.17697346210479736, + "learning_rate": 4.976181479516679e-05, + "loss": 1.3696, + "step": 570 + }, + { + "epoch": 0.08976399614848002, + "grad_norm": 0.22902925312519073, + "learning_rate": 4.976096321156356e-05, + "loss": 1.3688, + "step": 571 + }, + { + "epoch": 0.08992120104541257, + "grad_norm": 0.25305554270744324, + "learning_rate": 4.97601101156549e-05, + "loss": 1.3057, + "step": 572 + }, + { + "epoch": 0.0900784059423451, + "grad_norm": 0.23255370557308197, + "learning_rate": 4.975925550749293e-05, + "loss": 1.3571, + "step": 573 + }, + { + "epoch": 0.09023561083927764, + "grad_norm": 0.25259101390838623, + "learning_rate": 4.9758399387129834e-05, + "loss": 1.3152, + "step": 574 + }, + { + "epoch": 0.09039281573621018, + "grad_norm": 0.26062390208244324, + "learning_rate": 4.97575417546179e-05, + "loss": 1.3042, + "step": 575 + }, + { + "epoch": 0.09055002063314273, + "grad_norm": 0.16536732017993927, + "learning_rate": 4.9756682610009515e-05, + "loss": 1.2797, + "step": 576 + }, + { + "epoch": 0.09070722553007526, + "grad_norm": 0.19088499248027802, + "learning_rate": 4.9755821953357144e-05, + "loss": 1.3774, + "step": 577 + }, + { + "epoch": 0.0908644304270078, + "grad_norm": 0.2181147336959839, + "learning_rate": 4.975495978471336e-05, + "loss": 1.3364, + "step": 578 + }, + { + "epoch": 0.09102163532394034, + "grad_norm": 0.18012750148773193, + "learning_rate": 4.975409610413082e-05, + "loss": 1.3852, + "step": 579 + }, + { + "epoch": 0.09117884022087289, + "grad_norm": 0.18108834326267242, + "learning_rate": 4.975323091166227e-05, + "loss": 1.3214, + "step": 580 + }, + { + "epoch": 0.09133604511780542, + "grad_norm": 0.25102898478507996, + "learning_rate": 4.975236420736056e-05, + "loss": 1.3199, + "step": 581 + }, + { + "epoch": 0.09149325001473796, + "grad_norm": 0.20121383666992188, + "learning_rate": 4.9751495991278626e-05, + "loss": 1.3328, + "step": 582 + }, + { + "epoch": 0.0916504549116705, + "grad_norm": 0.24183815717697144, + "learning_rate": 4.975062626346948e-05, + "loss": 1.3881, + "step": 583 + }, + { + "epoch": 0.09180765980860303, + "grad_norm": 0.23274902999401093, + "learning_rate": 4.974975502398626e-05, + "loss": 1.3674, + "step": 584 + }, + { + "epoch": 0.09196486470553558, + "grad_norm": 0.224375382065773, + "learning_rate": 4.9748882272882165e-05, + "loss": 1.362, + "step": 585 + }, + { + "epoch": 0.09212206960246812, + "grad_norm": 0.2743482291698456, + "learning_rate": 4.97480080102105e-05, + "loss": 1.3028, + "step": 586 + }, + { + "epoch": 0.09227927449940065, + "grad_norm": 0.30631452798843384, + "learning_rate": 4.974713223602467e-05, + "loss": 1.3541, + "step": 587 + }, + { + "epoch": 0.09243647939633319, + "grad_norm": 0.1999395489692688, + "learning_rate": 4.9746254950378166e-05, + "loss": 1.3515, + "step": 588 + }, + { + "epoch": 0.09259368429326574, + "grad_norm": 0.3005799353122711, + "learning_rate": 4.974537615332455e-05, + "loss": 1.3872, + "step": 589 + }, + { + "epoch": 0.09275088919019828, + "grad_norm": 0.21795117855072021, + "learning_rate": 4.9744495844917524e-05, + "loss": 1.2804, + "step": 590 + }, + { + "epoch": 0.09290809408713081, + "grad_norm": 0.2832283675670624, + "learning_rate": 4.9743614025210825e-05, + "loss": 1.3209, + "step": 591 + }, + { + "epoch": 0.09306529898406335, + "grad_norm": 0.21391350030899048, + "learning_rate": 4.9742730694258334e-05, + "loss": 1.3041, + "step": 592 + }, + { + "epoch": 0.0932225038809959, + "grad_norm": 0.21651242673397064, + "learning_rate": 4.974184585211399e-05, + "loss": 1.2529, + "step": 593 + }, + { + "epoch": 0.09337970877792844, + "grad_norm": 0.22796374559402466, + "learning_rate": 4.974095949883183e-05, + "loss": 1.3999, + "step": 594 + }, + { + "epoch": 0.09353691367486097, + "grad_norm": 0.21013247966766357, + "learning_rate": 4.9740071634466e-05, + "loss": 1.3626, + "step": 595 + }, + { + "epoch": 0.09369411857179351, + "grad_norm": 0.31589969992637634, + "learning_rate": 4.973918225907073e-05, + "loss": 1.4096, + "step": 596 + }, + { + "epoch": 0.09385132346872604, + "grad_norm": 0.2923184931278229, + "learning_rate": 4.973829137270033e-05, + "loss": 1.2116, + "step": 597 + }, + { + "epoch": 0.0940085283656586, + "grad_norm": 0.2147187739610672, + "learning_rate": 4.9737398975409224e-05, + "loss": 1.3909, + "step": 598 + }, + { + "epoch": 0.09416573326259113, + "grad_norm": 0.20287127792835236, + "learning_rate": 4.9736505067251896e-05, + "loss": 1.3621, + "step": 599 + }, + { + "epoch": 0.09432293815952367, + "grad_norm": 0.24703876674175262, + "learning_rate": 4.9735609648282965e-05, + "loss": 1.3525, + "step": 600 + }, + { + "epoch": 0.0944801430564562, + "grad_norm": 0.25060412287712097, + "learning_rate": 4.97347127185571e-05, + "loss": 1.36, + "step": 601 + }, + { + "epoch": 0.09463734795338875, + "grad_norm": 0.214557945728302, + "learning_rate": 4.9733814278129096e-05, + "loss": 1.4372, + "step": 602 + }, + { + "epoch": 0.09479455285032129, + "grad_norm": 0.1984785795211792, + "learning_rate": 4.9732914327053825e-05, + "loss": 1.3191, + "step": 603 + }, + { + "epoch": 0.09495175774725383, + "grad_norm": 0.2099440097808838, + "learning_rate": 4.9732012865386244e-05, + "loss": 1.313, + "step": 604 + }, + { + "epoch": 0.09510896264418636, + "grad_norm": 0.20393683016300201, + "learning_rate": 4.9731109893181423e-05, + "loss": 1.3465, + "step": 605 + }, + { + "epoch": 0.09526616754111891, + "grad_norm": 0.25346165895462036, + "learning_rate": 4.97302054104945e-05, + "loss": 1.3379, + "step": 606 + }, + { + "epoch": 0.09542337243805145, + "grad_norm": 0.21876423060894012, + "learning_rate": 4.9729299417380725e-05, + "loss": 1.2746, + "step": 607 + }, + { + "epoch": 0.09558057733498398, + "grad_norm": 0.21032990515232086, + "learning_rate": 4.9728391913895436e-05, + "loss": 1.3215, + "step": 608 + }, + { + "epoch": 0.09573778223191652, + "grad_norm": 0.2550762891769409, + "learning_rate": 4.9727482900094044e-05, + "loss": 1.3239, + "step": 609 + }, + { + "epoch": 0.09589498712884906, + "grad_norm": 0.31706327199935913, + "learning_rate": 4.972657237603208e-05, + "loss": 1.3467, + "step": 610 + }, + { + "epoch": 0.09605219202578161, + "grad_norm": 0.17176879942417145, + "learning_rate": 4.972566034176516e-05, + "loss": 1.3815, + "step": 611 + }, + { + "epoch": 0.09620939692271414, + "grad_norm": 0.22620820999145508, + "learning_rate": 4.972474679734898e-05, + "loss": 1.2593, + "step": 612 + }, + { + "epoch": 0.09636660181964668, + "grad_norm": 0.18735802173614502, + "learning_rate": 4.9723831742839334e-05, + "loss": 1.424, + "step": 613 + }, + { + "epoch": 0.09652380671657922, + "grad_norm": 0.2582910656929016, + "learning_rate": 4.972291517829211e-05, + "loss": 1.2741, + "step": 614 + }, + { + "epoch": 0.09668101161351177, + "grad_norm": 0.19907522201538086, + "learning_rate": 4.97219971037633e-05, + "loss": 1.2045, + "step": 615 + }, + { + "epoch": 0.0968382165104443, + "grad_norm": 0.20451949536800385, + "learning_rate": 4.972107751930896e-05, + "loss": 1.3026, + "step": 616 + }, + { + "epoch": 0.09699542140737684, + "grad_norm": 0.29682090878486633, + "learning_rate": 4.972015642498527e-05, + "loss": 1.3789, + "step": 617 + }, + { + "epoch": 0.09715262630430938, + "grad_norm": 0.27210530638694763, + "learning_rate": 4.9719233820848476e-05, + "loss": 1.3968, + "step": 618 + }, + { + "epoch": 0.09730983120124193, + "grad_norm": 0.24241842329502106, + "learning_rate": 4.971830970695493e-05, + "loss": 1.2763, + "step": 619 + }, + { + "epoch": 0.09746703609817446, + "grad_norm": 0.2535828649997711, + "learning_rate": 4.9717384083361075e-05, + "loss": 1.3463, + "step": 620 + }, + { + "epoch": 0.097624240995107, + "grad_norm": 0.22121217846870422, + "learning_rate": 4.971645695012344e-05, + "loss": 1.3384, + "step": 621 + }, + { + "epoch": 0.09778144589203953, + "grad_norm": 0.28840744495391846, + "learning_rate": 4.971552830729866e-05, + "loss": 1.2418, + "step": 622 + }, + { + "epoch": 0.09793865078897207, + "grad_norm": 0.1682664453983307, + "learning_rate": 4.971459815494345e-05, + "loss": 1.3658, + "step": 623 + }, + { + "epoch": 0.09809585568590462, + "grad_norm": 0.24955761432647705, + "learning_rate": 4.971366649311461e-05, + "loss": 1.2372, + "step": 624 + }, + { + "epoch": 0.09825306058283716, + "grad_norm": 0.2756117582321167, + "learning_rate": 4.971273332186906e-05, + "loss": 1.3212, + "step": 625 + }, + { + "epoch": 0.09841026547976969, + "grad_norm": 0.2370867133140564, + "learning_rate": 4.971179864126377e-05, + "loss": 1.2879, + "step": 626 + }, + { + "epoch": 0.09856747037670223, + "grad_norm": 0.20566895604133606, + "learning_rate": 4.9710862451355846e-05, + "loss": 1.4243, + "step": 627 + }, + { + "epoch": 0.09872467527363478, + "grad_norm": 0.1923399120569229, + "learning_rate": 4.970992475220246e-05, + "loss": 1.2639, + "step": 628 + }, + { + "epoch": 0.09888188017056732, + "grad_norm": 0.17972147464752197, + "learning_rate": 4.9708985543860896e-05, + "loss": 1.3366, + "step": 629 + }, + { + "epoch": 0.09903908506749985, + "grad_norm": 0.1936875432729721, + "learning_rate": 4.97080448263885e-05, + "loss": 1.3496, + "step": 630 + }, + { + "epoch": 0.09919628996443239, + "grad_norm": 0.24409984052181244, + "learning_rate": 4.9707102599842735e-05, + "loss": 1.3268, + "step": 631 + }, + { + "epoch": 0.09935349486136494, + "grad_norm": 0.21084928512573242, + "learning_rate": 4.970615886428115e-05, + "loss": 1.3421, + "step": 632 + }, + { + "epoch": 0.09951069975829747, + "grad_norm": 0.21201804280281067, + "learning_rate": 4.970521361976138e-05, + "loss": 1.3189, + "step": 633 + }, + { + "epoch": 0.09966790465523001, + "grad_norm": 0.2698107063770294, + "learning_rate": 4.9704266866341156e-05, + "loss": 1.2193, + "step": 634 + }, + { + "epoch": 0.09982510955216255, + "grad_norm": 0.27072674036026, + "learning_rate": 4.970331860407831e-05, + "loss": 1.2694, + "step": 635 + }, + { + "epoch": 0.09998231444909508, + "grad_norm": 0.26514896750450134, + "learning_rate": 4.9702368833030754e-05, + "loss": 1.2175, + "step": 636 + }, + { + "epoch": 0.10013951934602763, + "grad_norm": 0.21645940840244293, + "learning_rate": 4.970141755325649e-05, + "loss": 1.3099, + "step": 637 + }, + { + "epoch": 0.10029672424296017, + "grad_norm": 0.27035385370254517, + "learning_rate": 4.970046476481363e-05, + "loss": 1.2723, + "step": 638 + }, + { + "epoch": 0.1004539291398927, + "grad_norm": 0.20999298989772797, + "learning_rate": 4.969951046776036e-05, + "loss": 1.369, + "step": 639 + }, + { + "epoch": 0.10061113403682524, + "grad_norm": 0.18554192781448364, + "learning_rate": 4.969855466215497e-05, + "loss": 1.3483, + "step": 640 + }, + { + "epoch": 0.10061113403682524, + "eval_loss": 1.314468502998352, + "eval_runtime": 2275.7115, + "eval_samples_per_second": 4.068, + "eval_steps_per_second": 2.034, + "step": 640 + }, + { + "epoch": 0.10076833893375779, + "grad_norm": 0.19117292761802673, + "learning_rate": 4.969759734805582e-05, + "loss": 1.3538, + "step": 641 + }, + { + "epoch": 0.10092554383069033, + "grad_norm": 0.21971918642520905, + "learning_rate": 4.969663852552141e-05, + "loss": 1.2827, + "step": 642 + }, + { + "epoch": 0.10108274872762286, + "grad_norm": 0.2663845121860504, + "learning_rate": 4.969567819461027e-05, + "loss": 1.3332, + "step": 643 + }, + { + "epoch": 0.1012399536245554, + "grad_norm": 0.23752686381340027, + "learning_rate": 4.9694716355381076e-05, + "loss": 1.2675, + "step": 644 + }, + { + "epoch": 0.10139715852148795, + "grad_norm": 0.1558876782655716, + "learning_rate": 4.9693753007892565e-05, + "loss": 1.3356, + "step": 645 + }, + { + "epoch": 0.10155436341842049, + "grad_norm": 0.2064114212989807, + "learning_rate": 4.969278815220356e-05, + "loss": 1.3261, + "step": 646 + }, + { + "epoch": 0.10171156831535302, + "grad_norm": 0.2371819168329239, + "learning_rate": 4.969182178837302e-05, + "loss": 1.2706, + "step": 647 + }, + { + "epoch": 0.10186877321228556, + "grad_norm": 0.22757107019424438, + "learning_rate": 4.969085391645994e-05, + "loss": 1.4035, + "step": 648 + }, + { + "epoch": 0.1020259781092181, + "grad_norm": 0.16831007599830627, + "learning_rate": 4.968988453652345e-05, + "loss": 1.3006, + "step": 649 + }, + { + "epoch": 0.10218318300615065, + "grad_norm": 0.1719575822353363, + "learning_rate": 4.968891364862275e-05, + "loss": 1.2439, + "step": 650 + }, + { + "epoch": 0.10234038790308318, + "grad_norm": 0.27235090732574463, + "learning_rate": 4.9687941252817144e-05, + "loss": 1.3065, + "step": 651 + }, + { + "epoch": 0.10249759280001572, + "grad_norm": 0.25622984766960144, + "learning_rate": 4.968696734916601e-05, + "loss": 1.2908, + "step": 652 + }, + { + "epoch": 0.10265479769694826, + "grad_norm": 0.22526390850543976, + "learning_rate": 4.968599193772885e-05, + "loss": 1.3081, + "step": 653 + }, + { + "epoch": 0.1028120025938808, + "grad_norm": 0.2552133798599243, + "learning_rate": 4.968501501856522e-05, + "loss": 1.3292, + "step": 654 + }, + { + "epoch": 0.10296920749081334, + "grad_norm": 0.26533272862434387, + "learning_rate": 4.96840365917348e-05, + "loss": 1.3571, + "step": 655 + }, + { + "epoch": 0.10312641238774588, + "grad_norm": 0.29065170884132385, + "learning_rate": 4.968305665729732e-05, + "loss": 1.2799, + "step": 656 + }, + { + "epoch": 0.10328361728467841, + "grad_norm": 0.27552661299705505, + "learning_rate": 4.968207521531267e-05, + "loss": 1.2262, + "step": 657 + }, + { + "epoch": 0.10344082218161096, + "grad_norm": 1.929308533668518, + "learning_rate": 4.9681092265840775e-05, + "loss": 1.2027, + "step": 658 + }, + { + "epoch": 0.1035980270785435, + "grad_norm": 0.2610799968242645, + "learning_rate": 4.968010780894167e-05, + "loss": 1.3527, + "step": 659 + }, + { + "epoch": 0.10375523197547604, + "grad_norm": 0.28388604521751404, + "learning_rate": 4.967912184467547e-05, + "loss": 1.2989, + "step": 660 + }, + { + "epoch": 0.10391243687240857, + "grad_norm": 0.21056891977787018, + "learning_rate": 4.9678134373102415e-05, + "loss": 1.2748, + "step": 661 + }, + { + "epoch": 0.10406964176934111, + "grad_norm": 0.268331378698349, + "learning_rate": 4.967714539428281e-05, + "loss": 1.3712, + "step": 662 + }, + { + "epoch": 0.10422684666627366, + "grad_norm": 0.28430554270744324, + "learning_rate": 4.967615490827705e-05, + "loss": 1.3641, + "step": 663 + }, + { + "epoch": 0.1043840515632062, + "grad_norm": 0.254165917634964, + "learning_rate": 4.9675162915145636e-05, + "loss": 1.3042, + "step": 664 + }, + { + "epoch": 0.10454125646013873, + "grad_norm": 0.19123367965221405, + "learning_rate": 4.967416941494914e-05, + "loss": 1.3613, + "step": 665 + }, + { + "epoch": 0.10469846135707127, + "grad_norm": 0.20710323750972748, + "learning_rate": 4.967317440774828e-05, + "loss": 1.2815, + "step": 666 + }, + { + "epoch": 0.10485566625400382, + "grad_norm": 0.2143716812133789, + "learning_rate": 4.967217789360379e-05, + "loss": 1.3136, + "step": 667 + }, + { + "epoch": 0.10501287115093635, + "grad_norm": 0.2556392550468445, + "learning_rate": 4.967117987257654e-05, + "loss": 1.384, + "step": 668 + }, + { + "epoch": 0.10517007604786889, + "grad_norm": 0.28254854679107666, + "learning_rate": 4.9670180344727505e-05, + "loss": 1.3218, + "step": 669 + }, + { + "epoch": 0.10532728094480143, + "grad_norm": 0.24643027782440186, + "learning_rate": 4.9669179310117706e-05, + "loss": 1.278, + "step": 670 + }, + { + "epoch": 0.10548448584173396, + "grad_norm": 0.34323665499687195, + "learning_rate": 4.9668176768808304e-05, + "loss": 1.2511, + "step": 671 + }, + { + "epoch": 0.10564169073866651, + "grad_norm": 0.2499508410692215, + "learning_rate": 4.966717272086052e-05, + "loss": 1.338, + "step": 672 + }, + { + "epoch": 0.10579889563559905, + "grad_norm": 0.2145325094461441, + "learning_rate": 4.966616716633567e-05, + "loss": 1.3304, + "step": 673 + }, + { + "epoch": 0.10595610053253159, + "grad_norm": 0.19230923056602478, + "learning_rate": 4.9665160105295185e-05, + "loss": 1.3535, + "step": 674 + }, + { + "epoch": 0.10611330542946412, + "grad_norm": 0.20243465900421143, + "learning_rate": 4.966415153780056e-05, + "loss": 1.3118, + "step": 675 + }, + { + "epoch": 0.10627051032639667, + "grad_norm": 0.24927914142608643, + "learning_rate": 4.966314146391341e-05, + "loss": 1.3136, + "step": 676 + }, + { + "epoch": 0.10642771522332921, + "grad_norm": 0.21791934967041016, + "learning_rate": 4.9662129883695406e-05, + "loss": 1.3314, + "step": 677 + }, + { + "epoch": 0.10658492012026174, + "grad_norm": 0.24318841099739075, + "learning_rate": 4.966111679720835e-05, + "loss": 1.3929, + "step": 678 + }, + { + "epoch": 0.10674212501719428, + "grad_norm": 0.2829376757144928, + "learning_rate": 4.966010220451411e-05, + "loss": 1.3232, + "step": 679 + }, + { + "epoch": 0.10689932991412683, + "grad_norm": 0.2353716641664505, + "learning_rate": 4.965908610567465e-05, + "loss": 1.2851, + "step": 680 + }, + { + "epoch": 0.10705653481105937, + "grad_norm": 0.2615984380245209, + "learning_rate": 4.965806850075203e-05, + "loss": 1.2552, + "step": 681 + }, + { + "epoch": 0.1072137397079919, + "grad_norm": 0.23773109912872314, + "learning_rate": 4.965704938980841e-05, + "loss": 1.2961, + "step": 682 + }, + { + "epoch": 0.10737094460492444, + "grad_norm": 0.2622957229614258, + "learning_rate": 4.9656028772906014e-05, + "loss": 1.3073, + "step": 683 + }, + { + "epoch": 0.10752814950185698, + "grad_norm": 0.24974018335342407, + "learning_rate": 4.965500665010721e-05, + "loss": 1.2774, + "step": 684 + }, + { + "epoch": 0.10768535439878953, + "grad_norm": 0.17124338448047638, + "learning_rate": 4.9653983021474395e-05, + "loss": 1.4159, + "step": 685 + }, + { + "epoch": 0.10784255929572206, + "grad_norm": 0.16673363745212555, + "learning_rate": 4.96529578870701e-05, + "loss": 1.3748, + "step": 686 + }, + { + "epoch": 0.1079997641926546, + "grad_norm": 0.25368422269821167, + "learning_rate": 4.965193124695693e-05, + "loss": 1.3958, + "step": 687 + }, + { + "epoch": 0.10815696908958713, + "grad_norm": 0.22910015285015106, + "learning_rate": 4.96509031011976e-05, + "loss": 1.3259, + "step": 688 + }, + { + "epoch": 0.10831417398651969, + "grad_norm": 0.277851939201355, + "learning_rate": 4.96498734498549e-05, + "loss": 1.3254, + "step": 689 + }, + { + "epoch": 0.10847137888345222, + "grad_norm": 0.32443082332611084, + "learning_rate": 4.964884229299172e-05, + "loss": 1.3007, + "step": 690 + }, + { + "epoch": 0.10862858378038476, + "grad_norm": 0.20710885524749756, + "learning_rate": 4.964780963067102e-05, + "loss": 1.3297, + "step": 691 + }, + { + "epoch": 0.1087857886773173, + "grad_norm": 0.25522252917289734, + "learning_rate": 4.96467754629559e-05, + "loss": 1.2487, + "step": 692 + }, + { + "epoch": 0.10894299357424984, + "grad_norm": 0.3286147713661194, + "learning_rate": 4.9645739789909504e-05, + "loss": 1.2255, + "step": 693 + }, + { + "epoch": 0.10910019847118238, + "grad_norm": 0.3795601725578308, + "learning_rate": 4.964470261159509e-05, + "loss": 1.2725, + "step": 694 + }, + { + "epoch": 0.10925740336811492, + "grad_norm": 0.3112131655216217, + "learning_rate": 4.964366392807602e-05, + "loss": 1.252, + "step": 695 + }, + { + "epoch": 0.10941460826504745, + "grad_norm": 0.2891729176044464, + "learning_rate": 4.964262373941571e-05, + "loss": 1.3377, + "step": 696 + }, + { + "epoch": 0.10957181316197999, + "grad_norm": 0.26973745226860046, + "learning_rate": 4.96415820456777e-05, + "loss": 1.3186, + "step": 697 + }, + { + "epoch": 0.10972901805891254, + "grad_norm": 0.2832094430923462, + "learning_rate": 4.964053884692562e-05, + "loss": 1.3248, + "step": 698 + }, + { + "epoch": 0.10988622295584508, + "grad_norm": 0.2840999960899353, + "learning_rate": 4.963949414322318e-05, + "loss": 1.2677, + "step": 699 + }, + { + "epoch": 0.11004342785277761, + "grad_norm": 0.2891542911529541, + "learning_rate": 4.963844793463418e-05, + "loss": 1.3274, + "step": 700 + }, + { + "epoch": 0.11020063274971015, + "grad_norm": 0.23569005727767944, + "learning_rate": 4.963740022122252e-05, + "loss": 1.2259, + "step": 701 + }, + { + "epoch": 0.1103578376466427, + "grad_norm": 0.2174285501241684, + "learning_rate": 4.963635100305221e-05, + "loss": 1.2785, + "step": 702 + }, + { + "epoch": 0.11051504254357523, + "grad_norm": 0.2753438651561737, + "learning_rate": 4.96353002801873e-05, + "loss": 1.3263, + "step": 703 + }, + { + "epoch": 0.11067224744050777, + "grad_norm": 0.21094419062137604, + "learning_rate": 4.963424805269198e-05, + "loss": 1.2439, + "step": 704 + }, + { + "epoch": 0.1108294523374403, + "grad_norm": 0.20501388609409332, + "learning_rate": 4.963319432063052e-05, + "loss": 1.3091, + "step": 705 + }, + { + "epoch": 0.11098665723437286, + "grad_norm": 0.2041424810886383, + "learning_rate": 4.963213908406728e-05, + "loss": 1.2951, + "step": 706 + }, + { + "epoch": 0.1111438621313054, + "grad_norm": 0.24955442547798157, + "learning_rate": 4.963108234306669e-05, + "loss": 1.2208, + "step": 707 + }, + { + "epoch": 0.11130106702823793, + "grad_norm": 0.39431118965148926, + "learning_rate": 4.9630024097693314e-05, + "loss": 1.306, + "step": 708 + }, + { + "epoch": 0.11145827192517047, + "grad_norm": 0.24803434312343597, + "learning_rate": 4.962896434801178e-05, + "loss": 1.2951, + "step": 709 + }, + { + "epoch": 0.111615476822103, + "grad_norm": 0.2736116349697113, + "learning_rate": 4.962790309408681e-05, + "loss": 1.3245, + "step": 710 + }, + { + "epoch": 0.11177268171903555, + "grad_norm": 0.24502034485340118, + "learning_rate": 4.9626840335983215e-05, + "loss": 1.2961, + "step": 711 + }, + { + "epoch": 0.11192988661596809, + "grad_norm": 0.24158692359924316, + "learning_rate": 4.962577607376592e-05, + "loss": 1.2387, + "step": 712 + }, + { + "epoch": 0.11208709151290062, + "grad_norm": 0.24977251887321472, + "learning_rate": 4.962471030749991e-05, + "loss": 1.2976, + "step": 713 + }, + { + "epoch": 0.11224429640983316, + "grad_norm": 0.15401019155979156, + "learning_rate": 4.962364303725029e-05, + "loss": 1.2684, + "step": 714 + }, + { + "epoch": 0.11240150130676571, + "grad_norm": 0.2611544132232666, + "learning_rate": 4.962257426308224e-05, + "loss": 1.2928, + "step": 715 + }, + { + "epoch": 0.11255870620369825, + "grad_norm": 0.434600830078125, + "learning_rate": 4.962150398506103e-05, + "loss": 1.3657, + "step": 716 + }, + { + "epoch": 0.11271591110063078, + "grad_norm": 0.2896519601345062, + "learning_rate": 4.9620432203252045e-05, + "loss": 1.3055, + "step": 717 + }, + { + "epoch": 0.11287311599756332, + "grad_norm": 0.1891547590494156, + "learning_rate": 4.961935891772073e-05, + "loss": 1.3355, + "step": 718 + }, + { + "epoch": 0.11303032089449587, + "grad_norm": 0.2223133146762848, + "learning_rate": 4.9618284128532644e-05, + "loss": 1.2939, + "step": 719 + }, + { + "epoch": 0.1131875257914284, + "grad_norm": 0.27313077449798584, + "learning_rate": 4.961720783575343e-05, + "loss": 1.2596, + "step": 720 + }, + { + "epoch": 0.11334473068836094, + "grad_norm": 0.24807053804397583, + "learning_rate": 4.961613003944883e-05, + "loss": 1.2851, + "step": 721 + }, + { + "epoch": 0.11350193558529348, + "grad_norm": 0.2343195378780365, + "learning_rate": 4.9615050739684656e-05, + "loss": 1.2899, + "step": 722 + }, + { + "epoch": 0.11365914048222601, + "grad_norm": 0.229730024933815, + "learning_rate": 4.961396993652684e-05, + "loss": 1.3118, + "step": 723 + }, + { + "epoch": 0.11381634537915856, + "grad_norm": 0.2397170215845108, + "learning_rate": 4.9612887630041394e-05, + "loss": 1.2148, + "step": 724 + }, + { + "epoch": 0.1139735502760911, + "grad_norm": 0.2167958915233612, + "learning_rate": 4.9611803820294414e-05, + "loss": 1.2597, + "step": 725 + }, + { + "epoch": 0.11413075517302364, + "grad_norm": 0.21318721771240234, + "learning_rate": 4.961071850735209e-05, + "loss": 1.3949, + "step": 726 + }, + { + "epoch": 0.11428796006995617, + "grad_norm": 0.21988382935523987, + "learning_rate": 4.960963169128073e-05, + "loss": 1.3196, + "step": 727 + }, + { + "epoch": 0.11444516496688872, + "grad_norm": 0.17555692791938782, + "learning_rate": 4.96085433721467e-05, + "loss": 1.3661, + "step": 728 + }, + { + "epoch": 0.11460236986382126, + "grad_norm": 0.3545222282409668, + "learning_rate": 4.960745355001647e-05, + "loss": 1.2659, + "step": 729 + }, + { + "epoch": 0.1147595747607538, + "grad_norm": 0.3196569085121155, + "learning_rate": 4.960636222495659e-05, + "loss": 1.2893, + "step": 730 + }, + { + "epoch": 0.11491677965768633, + "grad_norm": 0.2241334766149521, + "learning_rate": 4.960526939703374e-05, + "loss": 1.2155, + "step": 731 + }, + { + "epoch": 0.11507398455461888, + "grad_norm": 0.26543980836868286, + "learning_rate": 4.960417506631465e-05, + "loss": 1.3615, + "step": 732 + }, + { + "epoch": 0.11523118945155142, + "grad_norm": 0.21146585047245026, + "learning_rate": 4.960307923286616e-05, + "loss": 1.3516, + "step": 733 + }, + { + "epoch": 0.11538839434848396, + "grad_norm": 0.18095079064369202, + "learning_rate": 4.960198189675519e-05, + "loss": 1.3581, + "step": 734 + }, + { + "epoch": 0.11554559924541649, + "grad_norm": 0.26687100529670715, + "learning_rate": 4.9600883058048775e-05, + "loss": 1.1971, + "step": 735 + }, + { + "epoch": 0.11570280414234903, + "grad_norm": 0.2271047830581665, + "learning_rate": 4.959978271681402e-05, + "loss": 1.1867, + "step": 736 + }, + { + "epoch": 0.11586000903928158, + "grad_norm": 0.2102867215871811, + "learning_rate": 4.959868087311814e-05, + "loss": 1.2749, + "step": 737 + }, + { + "epoch": 0.11601721393621411, + "grad_norm": 0.2752761244773865, + "learning_rate": 4.9597577527028424e-05, + "loss": 1.1753, + "step": 738 + }, + { + "epoch": 0.11617441883314665, + "grad_norm": 0.22385725378990173, + "learning_rate": 4.959647267861226e-05, + "loss": 1.343, + "step": 739 + }, + { + "epoch": 0.11633162373007919, + "grad_norm": 0.2597412168979645, + "learning_rate": 4.959536632793712e-05, + "loss": 1.2539, + "step": 740 + }, + { + "epoch": 0.11648882862701174, + "grad_norm": 0.27975237369537354, + "learning_rate": 4.959425847507059e-05, + "loss": 1.2883, + "step": 741 + }, + { + "epoch": 0.11664603352394427, + "grad_norm": 0.29127049446105957, + "learning_rate": 4.959314912008033e-05, + "loss": 1.3139, + "step": 742 + }, + { + "epoch": 0.11680323842087681, + "grad_norm": 0.19929318130016327, + "learning_rate": 4.9592038263034094e-05, + "loss": 1.271, + "step": 743 + }, + { + "epoch": 0.11696044331780935, + "grad_norm": 0.23164550960063934, + "learning_rate": 4.9590925903999716e-05, + "loss": 1.3359, + "step": 744 + }, + { + "epoch": 0.1171176482147419, + "grad_norm": 0.27876612544059753, + "learning_rate": 4.958981204304516e-05, + "loss": 1.2568, + "step": 745 + }, + { + "epoch": 0.11727485311167443, + "grad_norm": 0.2459796965122223, + "learning_rate": 4.9588696680238435e-05, + "loss": 1.2426, + "step": 746 + }, + { + "epoch": 0.11743205800860697, + "grad_norm": 0.2039456069469452, + "learning_rate": 4.958757981564767e-05, + "loss": 1.2681, + "step": 747 + }, + { + "epoch": 0.1175892629055395, + "grad_norm": 0.24796408414840698, + "learning_rate": 4.958646144934108e-05, + "loss": 1.257, + "step": 748 + }, + { + "epoch": 0.11774646780247204, + "grad_norm": 0.2779620289802551, + "learning_rate": 4.958534158138697e-05, + "loss": 1.2933, + "step": 749 + }, + { + "epoch": 0.11790367269940459, + "grad_norm": 0.20878851413726807, + "learning_rate": 4.9584220211853735e-05, + "loss": 1.2902, + "step": 750 + }, + { + "epoch": 0.11806087759633713, + "grad_norm": 0.24720412492752075, + "learning_rate": 4.958309734080987e-05, + "loss": 1.203, + "step": 751 + }, + { + "epoch": 0.11821808249326966, + "grad_norm": 0.287654846906662, + "learning_rate": 4.9581972968323956e-05, + "loss": 1.3141, + "step": 752 + }, + { + "epoch": 0.1183752873902022, + "grad_norm": 0.23071719706058502, + "learning_rate": 4.958084709446466e-05, + "loss": 1.3145, + "step": 753 + }, + { + "epoch": 0.11853249228713475, + "grad_norm": 0.21027110517024994, + "learning_rate": 4.9579719719300746e-05, + "loss": 1.2893, + "step": 754 + }, + { + "epoch": 0.11868969718406729, + "grad_norm": 0.17173202335834503, + "learning_rate": 4.9578590842901066e-05, + "loss": 1.2618, + "step": 755 + }, + { + "epoch": 0.11884690208099982, + "grad_norm": 0.24606984853744507, + "learning_rate": 4.957746046533457e-05, + "loss": 1.1904, + "step": 756 + }, + { + "epoch": 0.11900410697793236, + "grad_norm": 0.248653382062912, + "learning_rate": 4.957632858667031e-05, + "loss": 1.331, + "step": 757 + }, + { + "epoch": 0.11916131187486491, + "grad_norm": 0.1904144436120987, + "learning_rate": 4.9575195206977406e-05, + "loss": 1.3303, + "step": 758 + }, + { + "epoch": 0.11931851677179744, + "grad_norm": 0.39540621638298035, + "learning_rate": 4.9574060326325075e-05, + "loss": 1.3455, + "step": 759 + }, + { + "epoch": 0.11947572166872998, + "grad_norm": 0.20992301404476166, + "learning_rate": 4.957292394478265e-05, + "loss": 1.2911, + "step": 760 + }, + { + "epoch": 0.11963292656566252, + "grad_norm": 0.23418502509593964, + "learning_rate": 4.957178606241951e-05, + "loss": 1.35, + "step": 761 + }, + { + "epoch": 0.11979013146259505, + "grad_norm": 0.24480225145816803, + "learning_rate": 4.957064667930517e-05, + "loss": 1.2138, + "step": 762 + }, + { + "epoch": 0.1199473363595276, + "grad_norm": 0.22909322381019592, + "learning_rate": 4.956950579550922e-05, + "loss": 1.1915, + "step": 763 + }, + { + "epoch": 0.12010454125646014, + "grad_norm": 0.16839763522148132, + "learning_rate": 4.956836341110134e-05, + "loss": 1.234, + "step": 764 + }, + { + "epoch": 0.12026174615339268, + "grad_norm": 0.2291131466627121, + "learning_rate": 4.956721952615129e-05, + "loss": 1.2964, + "step": 765 + }, + { + "epoch": 0.12041895105032521, + "grad_norm": 0.2606765329837799, + "learning_rate": 4.956607414072895e-05, + "loss": 1.2785, + "step": 766 + }, + { + "epoch": 0.12057615594725776, + "grad_norm": 0.24100011587142944, + "learning_rate": 4.956492725490426e-05, + "loss": 1.2389, + "step": 767 + }, + { + "epoch": 0.1207333608441903, + "grad_norm": 0.2868693172931671, + "learning_rate": 4.956377886874729e-05, + "loss": 1.3852, + "step": 768 + }, + { + "epoch": 0.12089056574112284, + "grad_norm": 0.29049259424209595, + "learning_rate": 4.956262898232816e-05, + "loss": 1.1511, + "step": 769 + }, + { + "epoch": 0.12104777063805537, + "grad_norm": 0.31396448612213135, + "learning_rate": 4.9561477595717106e-05, + "loss": 1.2687, + "step": 770 + }, + { + "epoch": 0.12120497553498792, + "grad_norm": 0.3348733186721802, + "learning_rate": 4.956032470898445e-05, + "loss": 1.1933, + "step": 771 + }, + { + "epoch": 0.12136218043192046, + "grad_norm": 0.2009342461824417, + "learning_rate": 4.955917032220061e-05, + "loss": 1.3299, + "step": 772 + }, + { + "epoch": 0.121519385328853, + "grad_norm": 0.2037377655506134, + "learning_rate": 4.9558014435436084e-05, + "loss": 1.3208, + "step": 773 + }, + { + "epoch": 0.12167659022578553, + "grad_norm": 0.3118877410888672, + "learning_rate": 4.955685704876147e-05, + "loss": 1.1927, + "step": 774 + }, + { + "epoch": 0.12183379512271807, + "grad_norm": 0.21884632110595703, + "learning_rate": 4.955569816224747e-05, + "loss": 1.2661, + "step": 775 + }, + { + "epoch": 0.12199100001965062, + "grad_norm": 0.25817862153053284, + "learning_rate": 4.9554537775964846e-05, + "loss": 1.3077, + "step": 776 + }, + { + "epoch": 0.12214820491658315, + "grad_norm": 0.27827751636505127, + "learning_rate": 4.955337588998449e-05, + "loss": 1.2709, + "step": 777 + }, + { + "epoch": 0.12230540981351569, + "grad_norm": 0.30520737171173096, + "learning_rate": 4.955221250437735e-05, + "loss": 1.2407, + "step": 778 + }, + { + "epoch": 0.12246261471044823, + "grad_norm": 0.21729423105716705, + "learning_rate": 4.9551047619214473e-05, + "loss": 1.3392, + "step": 779 + }, + { + "epoch": 0.12261981960738078, + "grad_norm": 0.2408866286277771, + "learning_rate": 4.954988123456703e-05, + "loss": 1.215, + "step": 780 + }, + { + "epoch": 0.12277702450431331, + "grad_norm": 0.23833869397640228, + "learning_rate": 4.954871335050625e-05, + "loss": 1.3607, + "step": 781 + }, + { + "epoch": 0.12293422940124585, + "grad_norm": 0.27017349004745483, + "learning_rate": 4.954754396710345e-05, + "loss": 1.2662, + "step": 782 + }, + { + "epoch": 0.12309143429817838, + "grad_norm": 0.21869684755802155, + "learning_rate": 4.954637308443007e-05, + "loss": 1.2384, + "step": 783 + }, + { + "epoch": 0.12324863919511093, + "grad_norm": 0.18912911415100098, + "learning_rate": 4.9545200702557615e-05, + "loss": 1.2958, + "step": 784 + }, + { + "epoch": 0.12340584409204347, + "grad_norm": 0.27320876717567444, + "learning_rate": 4.954402682155768e-05, + "loss": 1.2546, + "step": 785 + }, + { + "epoch": 0.12356304898897601, + "grad_norm": 0.2938046455383301, + "learning_rate": 4.954285144150198e-05, + "loss": 1.3451, + "step": 786 + }, + { + "epoch": 0.12372025388590854, + "grad_norm": 0.18271508812904358, + "learning_rate": 4.954167456246229e-05, + "loss": 1.2239, + "step": 787 + }, + { + "epoch": 0.12387745878284108, + "grad_norm": 0.21799346804618835, + "learning_rate": 4.9540496184510495e-05, + "loss": 1.2471, + "step": 788 + }, + { + "epoch": 0.12403466367977363, + "grad_norm": 0.21574997901916504, + "learning_rate": 4.9539316307718564e-05, + "loss": 1.3137, + "step": 789 + }, + { + "epoch": 0.12419186857670617, + "grad_norm": 0.21586358547210693, + "learning_rate": 4.953813493215855e-05, + "loss": 1.2763, + "step": 790 + }, + { + "epoch": 0.1243490734736387, + "grad_norm": 0.2723408043384552, + "learning_rate": 4.953695205790262e-05, + "loss": 1.4148, + "step": 791 + }, + { + "epoch": 0.12450627837057124, + "grad_norm": 0.29501527547836304, + "learning_rate": 4.9535767685023026e-05, + "loss": 1.3093, + "step": 792 + }, + { + "epoch": 0.12466348326750379, + "grad_norm": 0.2884112000465393, + "learning_rate": 4.9534581813592086e-05, + "loss": 1.3276, + "step": 793 + }, + { + "epoch": 0.12482068816443632, + "grad_norm": 0.24246759712696075, + "learning_rate": 4.9533394443682234e-05, + "loss": 1.3203, + "step": 794 + }, + { + "epoch": 0.12497789306136886, + "grad_norm": 0.23493270576000214, + "learning_rate": 4.9532205575365995e-05, + "loss": 1.2567, + "step": 795 + }, + { + "epoch": 0.1251350979583014, + "grad_norm": 0.26456305384635925, + "learning_rate": 4.953101520871598e-05, + "loss": 1.3194, + "step": 796 + }, + { + "epoch": 0.12529230285523393, + "grad_norm": 0.18891221284866333, + "learning_rate": 4.952982334380489e-05, + "loss": 1.3041, + "step": 797 + }, + { + "epoch": 0.12544950775216648, + "grad_norm": 0.21460258960723877, + "learning_rate": 4.952862998070552e-05, + "loss": 1.2274, + "step": 798 + }, + { + "epoch": 0.125606712649099, + "grad_norm": 0.2832646667957306, + "learning_rate": 4.9527435119490753e-05, + "loss": 1.2009, + "step": 799 + }, + { + "epoch": 0.12576391754603156, + "grad_norm": 0.22183702886104584, + "learning_rate": 4.9526238760233576e-05, + "loss": 1.31, + "step": 800 + }, + { + "epoch": 0.12576391754603156, + "eval_loss": 1.2745521068572998, + "eval_runtime": 2292.1003, + "eval_samples_per_second": 4.039, + "eval_steps_per_second": 2.02, + "step": 800 + }, + { + "epoch": 0.1259211224429641, + "grad_norm": 0.21697020530700684, + "learning_rate": 4.9525040903007046e-05, + "loss": 1.3197, + "step": 801 + }, + { + "epoch": 0.12607832733989663, + "grad_norm": 0.36354196071624756, + "learning_rate": 4.952384154788433e-05, + "loss": 1.1926, + "step": 802 + }, + { + "epoch": 0.12623553223682918, + "grad_norm": 0.27054232358932495, + "learning_rate": 4.952264069493868e-05, + "loss": 1.3199, + "step": 803 + }, + { + "epoch": 0.12639273713376173, + "grad_norm": 0.2425469011068344, + "learning_rate": 4.952143834424344e-05, + "loss": 1.2906, + "step": 804 + }, + { + "epoch": 0.12654994203069425, + "grad_norm": 0.1988941729068756, + "learning_rate": 4.952023449587205e-05, + "loss": 1.3183, + "step": 805 + }, + { + "epoch": 0.1267071469276268, + "grad_norm": 0.2429157942533493, + "learning_rate": 4.951902914989802e-05, + "loss": 1.2497, + "step": 806 + }, + { + "epoch": 0.12686435182455932, + "grad_norm": 0.2704293727874756, + "learning_rate": 4.951782230639499e-05, + "loss": 1.3113, + "step": 807 + }, + { + "epoch": 0.12702155672149187, + "grad_norm": 0.31801360845565796, + "learning_rate": 4.951661396543664e-05, + "loss": 1.2354, + "step": 808 + }, + { + "epoch": 0.12717876161842442, + "grad_norm": 0.21358463168144226, + "learning_rate": 4.951540412709681e-05, + "loss": 1.3512, + "step": 809 + }, + { + "epoch": 0.12733596651535695, + "grad_norm": 0.24300484359264374, + "learning_rate": 4.951419279144936e-05, + "loss": 1.213, + "step": 810 + }, + { + "epoch": 0.1274931714122895, + "grad_norm": 0.39550015330314636, + "learning_rate": 4.951297995856828e-05, + "loss": 1.1872, + "step": 811 + }, + { + "epoch": 0.12765037630922202, + "grad_norm": 0.20150414109230042, + "learning_rate": 4.951176562852765e-05, + "loss": 1.3469, + "step": 812 + }, + { + "epoch": 0.12780758120615457, + "grad_norm": 0.2050725519657135, + "learning_rate": 4.951054980140164e-05, + "loss": 1.259, + "step": 813 + }, + { + "epoch": 0.12796478610308712, + "grad_norm": 0.23815183341503143, + "learning_rate": 4.950933247726451e-05, + "loss": 1.2961, + "step": 814 + }, + { + "epoch": 0.12812199100001964, + "grad_norm": 0.28224676847457886, + "learning_rate": 4.95081136561906e-05, + "loss": 1.2631, + "step": 815 + }, + { + "epoch": 0.1282791958969522, + "grad_norm": 0.294791042804718, + "learning_rate": 4.9506893338254353e-05, + "loss": 1.1834, + "step": 816 + }, + { + "epoch": 0.12843640079388474, + "grad_norm": 0.29148972034454346, + "learning_rate": 4.9505671523530306e-05, + "loss": 1.2573, + "step": 817 + }, + { + "epoch": 0.12859360569081726, + "grad_norm": 0.29371243715286255, + "learning_rate": 4.950444821209308e-05, + "loss": 1.4532, + "step": 818 + }, + { + "epoch": 0.12875081058774981, + "grad_norm": 0.2303713709115982, + "learning_rate": 4.9503223404017396e-05, + "loss": 1.2828, + "step": 819 + }, + { + "epoch": 0.12890801548468234, + "grad_norm": 0.24906295537948608, + "learning_rate": 4.9501997099378046e-05, + "loss": 1.2759, + "step": 820 + }, + { + "epoch": 0.1290652203816149, + "grad_norm": 0.1983998864889145, + "learning_rate": 4.950076929824994e-05, + "loss": 1.3111, + "step": 821 + }, + { + "epoch": 0.12922242527854744, + "grad_norm": 0.2079075276851654, + "learning_rate": 4.9499540000708064e-05, + "loss": 1.3416, + "step": 822 + }, + { + "epoch": 0.12937963017547996, + "grad_norm": 0.22548237442970276, + "learning_rate": 4.94983092068275e-05, + "loss": 1.3879, + "step": 823 + }, + { + "epoch": 0.1295368350724125, + "grad_norm": 0.2052278220653534, + "learning_rate": 4.949707691668343e-05, + "loss": 1.3347, + "step": 824 + }, + { + "epoch": 0.12969403996934503, + "grad_norm": 0.21978795528411865, + "learning_rate": 4.949584313035109e-05, + "loss": 1.1345, + "step": 825 + }, + { + "epoch": 0.12985124486627758, + "grad_norm": 0.18930193781852722, + "learning_rate": 4.9494607847905863e-05, + "loss": 1.319, + "step": 826 + }, + { + "epoch": 0.13000844976321013, + "grad_norm": 0.24538543820381165, + "learning_rate": 4.9493371069423176e-05, + "loss": 1.3103, + "step": 827 + }, + { + "epoch": 0.13016565466014265, + "grad_norm": 0.2874930799007416, + "learning_rate": 4.9492132794978586e-05, + "loss": 1.3388, + "step": 828 + }, + { + "epoch": 0.1303228595570752, + "grad_norm": 0.23338377475738525, + "learning_rate": 4.949089302464771e-05, + "loss": 1.2793, + "step": 829 + }, + { + "epoch": 0.13048006445400775, + "grad_norm": 0.23670902848243713, + "learning_rate": 4.948965175850626e-05, + "loss": 1.2808, + "step": 830 + }, + { + "epoch": 0.13063726935094028, + "grad_norm": 0.2617732584476471, + "learning_rate": 4.9488408996630066e-05, + "loss": 1.2641, + "step": 831 + }, + { + "epoch": 0.13079447424787283, + "grad_norm": 0.24584044516086578, + "learning_rate": 4.948716473909502e-05, + "loss": 1.2462, + "step": 832 + }, + { + "epoch": 0.13095167914480535, + "grad_norm": 0.2507297098636627, + "learning_rate": 4.948591898597712e-05, + "loss": 1.2211, + "step": 833 + }, + { + "epoch": 0.1311088840417379, + "grad_norm": 0.25439611077308655, + "learning_rate": 4.948467173735245e-05, + "loss": 1.2762, + "step": 834 + }, + { + "epoch": 0.13126608893867045, + "grad_norm": 0.19934779405593872, + "learning_rate": 4.948342299329719e-05, + "loss": 1.1798, + "step": 835 + }, + { + "epoch": 0.13142329383560297, + "grad_norm": 0.24154123663902283, + "learning_rate": 4.948217275388761e-05, + "loss": 1.2608, + "step": 836 + }, + { + "epoch": 0.13158049873253552, + "grad_norm": 0.2484877109527588, + "learning_rate": 4.948092101920006e-05, + "loss": 1.2466, + "step": 837 + }, + { + "epoch": 0.13173770362946804, + "grad_norm": 0.28683343529701233, + "learning_rate": 4.9479667789311e-05, + "loss": 1.1915, + "step": 838 + }, + { + "epoch": 0.1318949085264006, + "grad_norm": 0.21289369463920593, + "learning_rate": 4.9478413064296976e-05, + "loss": 1.2642, + "step": 839 + }, + { + "epoch": 0.13205211342333315, + "grad_norm": 0.22933778166770935, + "learning_rate": 4.947715684423461e-05, + "loss": 1.2182, + "step": 840 + }, + { + "epoch": 0.13220931832026567, + "grad_norm": 0.2507724463939667, + "learning_rate": 4.9475899129200635e-05, + "loss": 1.3089, + "step": 841 + }, + { + "epoch": 0.13236652321719822, + "grad_norm": 0.251770943403244, + "learning_rate": 4.947463991927187e-05, + "loss": 1.3194, + "step": 842 + }, + { + "epoch": 0.13252372811413077, + "grad_norm": 0.2533280849456787, + "learning_rate": 4.947337921452521e-05, + "loss": 1.2141, + "step": 843 + }, + { + "epoch": 0.1326809330110633, + "grad_norm": 0.26309993863105774, + "learning_rate": 4.9472117015037664e-05, + "loss": 1.2265, + "step": 844 + }, + { + "epoch": 0.13283813790799584, + "grad_norm": 0.29711806774139404, + "learning_rate": 4.9470853320886335e-05, + "loss": 1.2538, + "step": 845 + }, + { + "epoch": 0.13299534280492836, + "grad_norm": 0.24551883339881897, + "learning_rate": 4.9469588132148373e-05, + "loss": 1.2927, + "step": 846 + }, + { + "epoch": 0.1331525477018609, + "grad_norm": 0.28027257323265076, + "learning_rate": 4.946832144890108e-05, + "loss": 1.2712, + "step": 847 + }, + { + "epoch": 0.13330975259879346, + "grad_norm": 0.22099149227142334, + "learning_rate": 4.9467053271221804e-05, + "loss": 1.2095, + "step": 848 + }, + { + "epoch": 0.13346695749572599, + "grad_norm": 0.19661381840705872, + "learning_rate": 4.946578359918801e-05, + "loss": 1.2855, + "step": 849 + }, + { + "epoch": 0.13362416239265854, + "grad_norm": 0.22767631709575653, + "learning_rate": 4.946451243287723e-05, + "loss": 1.2932, + "step": 850 + }, + { + "epoch": 0.13378136728959106, + "grad_norm": 0.28008589148521423, + "learning_rate": 4.946323977236712e-05, + "loss": 1.2335, + "step": 851 + }, + { + "epoch": 0.1339385721865236, + "grad_norm": 0.2091825157403946, + "learning_rate": 4.94619656177354e-05, + "loss": 1.3151, + "step": 852 + }, + { + "epoch": 0.13409577708345616, + "grad_norm": 0.1978277713060379, + "learning_rate": 4.946068996905989e-05, + "loss": 1.3359, + "step": 853 + }, + { + "epoch": 0.13425298198038868, + "grad_norm": 0.21397674083709717, + "learning_rate": 4.9459412826418505e-05, + "loss": 1.2998, + "step": 854 + }, + { + "epoch": 0.13441018687732123, + "grad_norm": 0.30490776896476746, + "learning_rate": 4.945813418988925e-05, + "loss": 1.2607, + "step": 855 + }, + { + "epoch": 0.13456739177425378, + "grad_norm": 0.2896914780139923, + "learning_rate": 4.945685405955021e-05, + "loss": 1.2329, + "step": 856 + }, + { + "epoch": 0.1347245966711863, + "grad_norm": 0.1988048106431961, + "learning_rate": 4.945557243547958e-05, + "loss": 1.2877, + "step": 857 + }, + { + "epoch": 0.13488180156811885, + "grad_norm": 0.17888212203979492, + "learning_rate": 4.945428931775563e-05, + "loss": 1.2543, + "step": 858 + }, + { + "epoch": 0.13503900646505138, + "grad_norm": 0.2748056650161743, + "learning_rate": 4.945300470645673e-05, + "loss": 1.3461, + "step": 859 + }, + { + "epoch": 0.13519621136198393, + "grad_norm": 0.23218591511249542, + "learning_rate": 4.945171860166135e-05, + "loss": 1.2878, + "step": 860 + }, + { + "epoch": 0.13535341625891648, + "grad_norm": 0.33142325282096863, + "learning_rate": 4.9450431003448015e-05, + "loss": 1.294, + "step": 861 + }, + { + "epoch": 0.135510621155849, + "grad_norm": 0.2330816686153412, + "learning_rate": 4.944914191189539e-05, + "loss": 1.3593, + "step": 862 + }, + { + "epoch": 0.13566782605278155, + "grad_norm": 0.23989921808242798, + "learning_rate": 4.9447851327082204e-05, + "loss": 1.2879, + "step": 863 + }, + { + "epoch": 0.13582503094971407, + "grad_norm": 0.21358944475650787, + "learning_rate": 4.944655924908727e-05, + "loss": 1.222, + "step": 864 + }, + { + "epoch": 0.13598223584664662, + "grad_norm": 0.30434924364089966, + "learning_rate": 4.9445265677989515e-05, + "loss": 1.273, + "step": 865 + }, + { + "epoch": 0.13613944074357917, + "grad_norm": 0.22028383612632751, + "learning_rate": 4.944397061386794e-05, + "loss": 1.2494, + "step": 866 + }, + { + "epoch": 0.1362966456405117, + "grad_norm": 0.2354927659034729, + "learning_rate": 4.944267405680164e-05, + "loss": 1.1469, + "step": 867 + }, + { + "epoch": 0.13645385053744424, + "grad_norm": 0.28941988945007324, + "learning_rate": 4.944137600686981e-05, + "loss": 1.1678, + "step": 868 + }, + { + "epoch": 0.1366110554343768, + "grad_norm": 0.2538214325904846, + "learning_rate": 4.944007646415172e-05, + "loss": 1.2636, + "step": 869 + }, + { + "epoch": 0.13676826033130932, + "grad_norm": 0.3719157576560974, + "learning_rate": 4.943877542872676e-05, + "loss": 1.2901, + "step": 870 + }, + { + "epoch": 0.13692546522824187, + "grad_norm": 0.2994091212749481, + "learning_rate": 4.943747290067438e-05, + "loss": 1.2209, + "step": 871 + }, + { + "epoch": 0.1370826701251744, + "grad_norm": 0.23586580157279968, + "learning_rate": 4.9436168880074115e-05, + "loss": 1.2989, + "step": 872 + }, + { + "epoch": 0.13723987502210694, + "grad_norm": 0.193126380443573, + "learning_rate": 4.943486336700564e-05, + "loss": 1.204, + "step": 873 + }, + { + "epoch": 0.1373970799190395, + "grad_norm": 0.18505080044269562, + "learning_rate": 4.943355636154868e-05, + "loss": 1.3247, + "step": 874 + }, + { + "epoch": 0.137554284815972, + "grad_norm": 0.2586881220340729, + "learning_rate": 4.9432247863783064e-05, + "loss": 1.3315, + "step": 875 + }, + { + "epoch": 0.13771148971290456, + "grad_norm": 0.2904506027698517, + "learning_rate": 4.943093787378871e-05, + "loss": 1.2593, + "step": 876 + }, + { + "epoch": 0.13786869460983708, + "grad_norm": 0.2971174120903015, + "learning_rate": 4.9429626391645615e-05, + "loss": 1.2241, + "step": 877 + }, + { + "epoch": 0.13802589950676963, + "grad_norm": 0.42521703243255615, + "learning_rate": 4.9428313417433894e-05, + "loss": 1.2638, + "step": 878 + }, + { + "epoch": 0.13818310440370218, + "grad_norm": 0.2515777349472046, + "learning_rate": 4.9426998951233735e-05, + "loss": 1.3111, + "step": 879 + }, + { + "epoch": 0.1383403093006347, + "grad_norm": 0.25959545373916626, + "learning_rate": 4.942568299312541e-05, + "loss": 1.2505, + "step": 880 + }, + { + "epoch": 0.13849751419756726, + "grad_norm": 0.28090932965278625, + "learning_rate": 4.942436554318931e-05, + "loss": 1.1604, + "step": 881 + }, + { + "epoch": 0.1386547190944998, + "grad_norm": 0.21833541989326477, + "learning_rate": 4.942304660150588e-05, + "loss": 1.2246, + "step": 882 + }, + { + "epoch": 0.13881192399143233, + "grad_norm": 0.26167765259742737, + "learning_rate": 4.9421726168155704e-05, + "loss": 1.2399, + "step": 883 + }, + { + "epoch": 0.13896912888836488, + "grad_norm": 0.23778817057609558, + "learning_rate": 4.9420404243219395e-05, + "loss": 1.2692, + "step": 884 + }, + { + "epoch": 0.1391263337852974, + "grad_norm": 0.43253734707832336, + "learning_rate": 4.941908082677773e-05, + "loss": 1.2302, + "step": 885 + }, + { + "epoch": 0.13928353868222995, + "grad_norm": 0.2448786050081253, + "learning_rate": 4.94177559189115e-05, + "loss": 1.3163, + "step": 886 + }, + { + "epoch": 0.1394407435791625, + "grad_norm": 0.24711786210536957, + "learning_rate": 4.941642951970165e-05, + "loss": 1.2756, + "step": 887 + }, + { + "epoch": 0.13959794847609502, + "grad_norm": 0.22932004928588867, + "learning_rate": 4.941510162922917e-05, + "loss": 1.3087, + "step": 888 + }, + { + "epoch": 0.13975515337302757, + "grad_norm": 0.24999158084392548, + "learning_rate": 4.941377224757518e-05, + "loss": 1.3328, + "step": 889 + }, + { + "epoch": 0.1399123582699601, + "grad_norm": 0.21222981810569763, + "learning_rate": 4.941244137482088e-05, + "loss": 1.3177, + "step": 890 + }, + { + "epoch": 0.14006956316689265, + "grad_norm": 0.22691656649112701, + "learning_rate": 4.941110901104754e-05, + "loss": 1.2937, + "step": 891 + }, + { + "epoch": 0.1402267680638252, + "grad_norm": 0.3120933771133423, + "learning_rate": 4.940977515633653e-05, + "loss": 1.1604, + "step": 892 + }, + { + "epoch": 0.14038397296075772, + "grad_norm": 0.24279998242855072, + "learning_rate": 4.940843981076934e-05, + "loss": 1.3234, + "step": 893 + }, + { + "epoch": 0.14054117785769027, + "grad_norm": 0.25406959652900696, + "learning_rate": 4.940710297442751e-05, + "loss": 1.3216, + "step": 894 + }, + { + "epoch": 0.14069838275462282, + "grad_norm": 0.29678472876548767, + "learning_rate": 4.940576464739269e-05, + "loss": 1.2706, + "step": 895 + }, + { + "epoch": 0.14085558765155534, + "grad_norm": 0.25185081362724304, + "learning_rate": 4.9404424829746634e-05, + "loss": 1.2456, + "step": 896 + }, + { + "epoch": 0.1410127925484879, + "grad_norm": 0.2171952873468399, + "learning_rate": 4.940308352157115e-05, + "loss": 1.2943, + "step": 897 + }, + { + "epoch": 0.14116999744542041, + "grad_norm": 0.21498677134513855, + "learning_rate": 4.940174072294818e-05, + "loss": 1.3466, + "step": 898 + }, + { + "epoch": 0.14132720234235296, + "grad_norm": 0.2881999611854553, + "learning_rate": 4.940039643395972e-05, + "loss": 1.2322, + "step": 899 + }, + { + "epoch": 0.14148440723928551, + "grad_norm": 0.2709384858608246, + "learning_rate": 4.939905065468789e-05, + "loss": 1.2228, + "step": 900 + }, + { + "epoch": 0.14164161213621804, + "grad_norm": 0.2723088562488556, + "learning_rate": 4.9397703385214875e-05, + "loss": 1.1937, + "step": 901 + }, + { + "epoch": 0.1417988170331506, + "grad_norm": 0.4296363294124603, + "learning_rate": 4.939635462562297e-05, + "loss": 1.2043, + "step": 902 + }, + { + "epoch": 0.1419560219300831, + "grad_norm": 0.3255182206630707, + "learning_rate": 4.939500437599454e-05, + "loss": 1.1563, + "step": 903 + }, + { + "epoch": 0.14211322682701566, + "grad_norm": 0.33772897720336914, + "learning_rate": 4.939365263641206e-05, + "loss": 1.3019, + "step": 904 + }, + { + "epoch": 0.1422704317239482, + "grad_norm": 0.18991219997406006, + "learning_rate": 4.93922994069581e-05, + "loss": 1.3575, + "step": 905 + }, + { + "epoch": 0.14242763662088073, + "grad_norm": 0.23950403928756714, + "learning_rate": 4.939094468771529e-05, + "loss": 1.2512, + "step": 906 + }, + { + "epoch": 0.14258484151781328, + "grad_norm": 0.29783302545547485, + "learning_rate": 4.938958847876637e-05, + "loss": 1.3033, + "step": 907 + }, + { + "epoch": 0.14274204641474583, + "grad_norm": 0.3168744742870331, + "learning_rate": 4.93882307801942e-05, + "loss": 1.1934, + "step": 908 + }, + { + "epoch": 0.14289925131167835, + "grad_norm": 0.22578391432762146, + "learning_rate": 4.9386871592081675e-05, + "loss": 1.3307, + "step": 909 + }, + { + "epoch": 0.1430564562086109, + "grad_norm": 0.32671108841896057, + "learning_rate": 4.9385510914511824e-05, + "loss": 1.2436, + "step": 910 + }, + { + "epoch": 0.14321366110554343, + "grad_norm": 0.2524665296077728, + "learning_rate": 4.938414874756774e-05, + "loss": 1.2611, + "step": 911 + }, + { + "epoch": 0.14337086600247598, + "grad_norm": 0.3576960563659668, + "learning_rate": 4.9382785091332625e-05, + "loss": 1.3721, + "step": 912 + }, + { + "epoch": 0.14352807089940853, + "grad_norm": 0.2915900945663452, + "learning_rate": 4.9381419945889776e-05, + "loss": 1.3539, + "step": 913 + }, + { + "epoch": 0.14368527579634105, + "grad_norm": 0.3168608844280243, + "learning_rate": 4.938005331132256e-05, + "loss": 1.224, + "step": 914 + }, + { + "epoch": 0.1438424806932736, + "grad_norm": 0.24886426329612732, + "learning_rate": 4.937868518771445e-05, + "loss": 1.2299, + "step": 915 + }, + { + "epoch": 0.14399968559020612, + "grad_norm": 0.26588642597198486, + "learning_rate": 4.9377315575149e-05, + "loss": 1.1947, + "step": 916 + }, + { + "epoch": 0.14415689048713867, + "grad_norm": 0.28032201528549194, + "learning_rate": 4.937594447370986e-05, + "loss": 1.3756, + "step": 917 + }, + { + "epoch": 0.14431409538407122, + "grad_norm": 0.3017072081565857, + "learning_rate": 4.937457188348078e-05, + "loss": 1.2723, + "step": 918 + }, + { + "epoch": 0.14447130028100374, + "grad_norm": 0.2926197648048401, + "learning_rate": 4.937319780454559e-05, + "loss": 1.2716, + "step": 919 + }, + { + "epoch": 0.1446285051779363, + "grad_norm": 0.24066713452339172, + "learning_rate": 4.937182223698821e-05, + "loss": 1.2828, + "step": 920 + }, + { + "epoch": 0.14478571007486885, + "grad_norm": 0.30001577734947205, + "learning_rate": 4.937044518089266e-05, + "loss": 1.2407, + "step": 921 + }, + { + "epoch": 0.14494291497180137, + "grad_norm": 0.25927406549453735, + "learning_rate": 4.9369066636343044e-05, + "loss": 1.3004, + "step": 922 + }, + { + "epoch": 0.14510011986873392, + "grad_norm": 0.2542930543422699, + "learning_rate": 4.936768660342355e-05, + "loss": 1.3312, + "step": 923 + }, + { + "epoch": 0.14525732476566644, + "grad_norm": 0.25233832001686096, + "learning_rate": 4.936630508221847e-05, + "loss": 1.1879, + "step": 924 + }, + { + "epoch": 0.145414529662599, + "grad_norm": 0.22136953473091125, + "learning_rate": 4.9364922072812185e-05, + "loss": 1.2649, + "step": 925 + }, + { + "epoch": 0.14557173455953154, + "grad_norm": 0.21759863197803497, + "learning_rate": 4.936353757528916e-05, + "loss": 1.2467, + "step": 926 + }, + { + "epoch": 0.14572893945646406, + "grad_norm": 0.27614825963974, + "learning_rate": 4.936215158973396e-05, + "loss": 1.1901, + "step": 927 + }, + { + "epoch": 0.1458861443533966, + "grad_norm": 0.2502923309803009, + "learning_rate": 4.936076411623124e-05, + "loss": 1.3358, + "step": 928 + }, + { + "epoch": 0.14604334925032914, + "grad_norm": 0.2419285923242569, + "learning_rate": 4.935937515486573e-05, + "loss": 1.24, + "step": 929 + }, + { + "epoch": 0.14620055414726169, + "grad_norm": 0.35315272212028503, + "learning_rate": 4.935798470572226e-05, + "loss": 1.2452, + "step": 930 + }, + { + "epoch": 0.14635775904419424, + "grad_norm": 0.28915464878082275, + "learning_rate": 4.935659276888577e-05, + "loss": 1.3369, + "step": 931 + }, + { + "epoch": 0.14651496394112676, + "grad_norm": 0.23898139595985413, + "learning_rate": 4.9355199344441254e-05, + "loss": 1.2328, + "step": 932 + }, + { + "epoch": 0.1466721688380593, + "grad_norm": 0.25197896361351013, + "learning_rate": 4.935380443247384e-05, + "loss": 1.2826, + "step": 933 + }, + { + "epoch": 0.14682937373499186, + "grad_norm": 0.26547369360923767, + "learning_rate": 4.9352408033068695e-05, + "loss": 1.2284, + "step": 934 + }, + { + "epoch": 0.14698657863192438, + "grad_norm": 0.22031289339065552, + "learning_rate": 4.935101014631114e-05, + "loss": 1.2918, + "step": 935 + }, + { + "epoch": 0.14714378352885693, + "grad_norm": 0.2603214979171753, + "learning_rate": 4.9349610772286525e-05, + "loss": 1.1767, + "step": 936 + }, + { + "epoch": 0.14730098842578945, + "grad_norm": 0.29469192028045654, + "learning_rate": 4.934820991108032e-05, + "loss": 1.2845, + "step": 937 + }, + { + "epoch": 0.147458193322722, + "grad_norm": 0.30825692415237427, + "learning_rate": 4.934680756277811e-05, + "loss": 1.1999, + "step": 938 + }, + { + "epoch": 0.14761539821965455, + "grad_norm": 0.25342094898223877, + "learning_rate": 4.934540372746552e-05, + "loss": 1.2285, + "step": 939 + }, + { + "epoch": 0.14777260311658708, + "grad_norm": 0.26036733388900757, + "learning_rate": 4.9343998405228295e-05, + "loss": 1.2367, + "step": 940 + }, + { + "epoch": 0.14792980801351963, + "grad_norm": 0.27401411533355713, + "learning_rate": 4.934259159615228e-05, + "loss": 1.1985, + "step": 941 + }, + { + "epoch": 0.14808701291045215, + "grad_norm": 0.23039095103740692, + "learning_rate": 4.934118330032338e-05, + "loss": 1.2649, + "step": 942 + }, + { + "epoch": 0.1482442178073847, + "grad_norm": 0.29547953605651855, + "learning_rate": 4.933977351782761e-05, + "loss": 1.1345, + "step": 943 + }, + { + "epoch": 0.14840142270431725, + "grad_norm": 0.22598884999752045, + "learning_rate": 4.933836224875109e-05, + "loss": 1.2965, + "step": 944 + }, + { + "epoch": 0.14855862760124977, + "grad_norm": 0.31008240580558777, + "learning_rate": 4.9336949493180006e-05, + "loss": 1.1144, + "step": 945 + }, + { + "epoch": 0.14871583249818232, + "grad_norm": 0.28397658467292786, + "learning_rate": 4.9335535251200636e-05, + "loss": 1.266, + "step": 946 + }, + { + "epoch": 0.14887303739511487, + "grad_norm": 0.2284776121377945, + "learning_rate": 4.933411952289937e-05, + "loss": 1.2164, + "step": 947 + }, + { + "epoch": 0.1490302422920474, + "grad_norm": 0.2107551246881485, + "learning_rate": 4.9332702308362665e-05, + "loss": 1.2719, + "step": 948 + }, + { + "epoch": 0.14918744718897994, + "grad_norm": 0.26652616262435913, + "learning_rate": 4.933128360767709e-05, + "loss": 1.2304, + "step": 949 + }, + { + "epoch": 0.14934465208591247, + "grad_norm": 0.22624680399894714, + "learning_rate": 4.932986342092928e-05, + "loss": 1.2999, + "step": 950 + }, + { + "epoch": 0.14950185698284502, + "grad_norm": 0.20410288870334625, + "learning_rate": 4.932844174820598e-05, + "loss": 1.2269, + "step": 951 + }, + { + "epoch": 0.14965906187977757, + "grad_norm": 0.24987919628620148, + "learning_rate": 4.932701858959403e-05, + "loss": 1.3042, + "step": 952 + }, + { + "epoch": 0.1498162667767101, + "grad_norm": 0.191947340965271, + "learning_rate": 4.932559394518033e-05, + "loss": 1.2803, + "step": 953 + }, + { + "epoch": 0.14997347167364264, + "grad_norm": 0.3396085798740387, + "learning_rate": 4.932416781505191e-05, + "loss": 1.2014, + "step": 954 + }, + { + "epoch": 0.15013067657057516, + "grad_norm": 0.22375932335853577, + "learning_rate": 4.932274019929587e-05, + "loss": 1.242, + "step": 955 + }, + { + "epoch": 0.1502878814675077, + "grad_norm": 0.281097412109375, + "learning_rate": 4.93213110979994e-05, + "loss": 1.2742, + "step": 956 + }, + { + "epoch": 0.15044508636444026, + "grad_norm": 0.24049919843673706, + "learning_rate": 4.931988051124979e-05, + "loss": 1.3166, + "step": 957 + }, + { + "epoch": 0.15060229126137278, + "grad_norm": 0.24433936178684235, + "learning_rate": 4.93184484391344e-05, + "loss": 1.2933, + "step": 958 + }, + { + "epoch": 0.15075949615830533, + "grad_norm": 0.3671477138996124, + "learning_rate": 4.9317014881740706e-05, + "loss": 1.1731, + "step": 959 + }, + { + "epoch": 0.15091670105523788, + "grad_norm": 0.22575189173221588, + "learning_rate": 4.931557983915627e-05, + "loss": 1.2509, + "step": 960 + }, + { + "epoch": 0.15091670105523788, + "eval_loss": 1.2455451488494873, + "eval_runtime": 2308.563, + "eval_samples_per_second": 4.01, + "eval_steps_per_second": 2.005, + "step": 960 + }, + { + "epoch": 0.1510739059521704, + "grad_norm": 0.25838157534599304, + "learning_rate": 4.931414331146873e-05, + "loss": 1.3554, + "step": 961 + }, + { + "epoch": 0.15123111084910296, + "grad_norm": 0.3163435459136963, + "learning_rate": 4.931270529876583e-05, + "loss": 1.3133, + "step": 962 + }, + { + "epoch": 0.15138831574603548, + "grad_norm": 0.30024880170822144, + "learning_rate": 4.9311265801135384e-05, + "loss": 1.2303, + "step": 963 + }, + { + "epoch": 0.15154552064296803, + "grad_norm": 0.3424816429615021, + "learning_rate": 4.9309824818665325e-05, + "loss": 1.1929, + "step": 964 + }, + { + "epoch": 0.15170272553990058, + "grad_norm": 0.27401861548423767, + "learning_rate": 4.930838235144366e-05, + "loss": 1.2198, + "step": 965 + }, + { + "epoch": 0.1518599304368331, + "grad_norm": 0.24110247194766998, + "learning_rate": 4.930693839955848e-05, + "loss": 1.2381, + "step": 966 + }, + { + "epoch": 0.15201713533376565, + "grad_norm": 0.159100741147995, + "learning_rate": 4.9305492963098e-05, + "loss": 1.2666, + "step": 967 + }, + { + "epoch": 0.15217434023069817, + "grad_norm": 0.23810729384422302, + "learning_rate": 4.9304046042150474e-05, + "loss": 1.2592, + "step": 968 + }, + { + "epoch": 0.15233154512763072, + "grad_norm": 0.19887159764766693, + "learning_rate": 4.930259763680429e-05, + "loss": 1.3179, + "step": 969 + }, + { + "epoch": 0.15248875002456327, + "grad_norm": 0.22060149908065796, + "learning_rate": 4.930114774714791e-05, + "loss": 1.2712, + "step": 970 + }, + { + "epoch": 0.1526459549214958, + "grad_norm": 0.2443406730890274, + "learning_rate": 4.929969637326989e-05, + "loss": 1.2021, + "step": 971 + }, + { + "epoch": 0.15280315981842835, + "grad_norm": 0.2185499370098114, + "learning_rate": 4.9298243515258855e-05, + "loss": 1.3017, + "step": 972 + }, + { + "epoch": 0.1529603647153609, + "grad_norm": 0.23159849643707275, + "learning_rate": 4.929678917320357e-05, + "loss": 1.2122, + "step": 973 + }, + { + "epoch": 0.15311756961229342, + "grad_norm": 0.3281627595424652, + "learning_rate": 4.929533334719284e-05, + "loss": 1.1151, + "step": 974 + }, + { + "epoch": 0.15327477450922597, + "grad_norm": 0.24456332623958588, + "learning_rate": 4.929387603731558e-05, + "loss": 1.2107, + "step": 975 + }, + { + "epoch": 0.1534319794061585, + "grad_norm": 0.3623602092266083, + "learning_rate": 4.9292417243660814e-05, + "loss": 1.3041, + "step": 976 + }, + { + "epoch": 0.15358918430309104, + "grad_norm": 0.2319325953722, + "learning_rate": 4.929095696631763e-05, + "loss": 1.2331, + "step": 977 + }, + { + "epoch": 0.1537463892000236, + "grad_norm": 0.324660986661911, + "learning_rate": 4.92894952053752e-05, + "loss": 1.2511, + "step": 978 + }, + { + "epoch": 0.15390359409695611, + "grad_norm": 0.23866824805736542, + "learning_rate": 4.9288031960922834e-05, + "loss": 1.2709, + "step": 979 + }, + { + "epoch": 0.15406079899388866, + "grad_norm": 0.21922123432159424, + "learning_rate": 4.928656723304989e-05, + "loss": 1.3108, + "step": 980 + }, + { + "epoch": 0.1542180038908212, + "grad_norm": 0.2919687032699585, + "learning_rate": 4.92851010218458e-05, + "loss": 1.0465, + "step": 981 + }, + { + "epoch": 0.15437520878775374, + "grad_norm": 0.26380711793899536, + "learning_rate": 4.9283633327400156e-05, + "loss": 1.2066, + "step": 982 + }, + { + "epoch": 0.1545324136846863, + "grad_norm": 0.2607104182243347, + "learning_rate": 4.9282164149802576e-05, + "loss": 1.1227, + "step": 983 + }, + { + "epoch": 0.1546896185816188, + "grad_norm": 0.32302606105804443, + "learning_rate": 4.92806934891428e-05, + "loss": 1.2066, + "step": 984 + }, + { + "epoch": 0.15484682347855136, + "grad_norm": 0.28476232290267944, + "learning_rate": 4.927922134551065e-05, + "loss": 1.1447, + "step": 985 + }, + { + "epoch": 0.1550040283754839, + "grad_norm": 0.253738671541214, + "learning_rate": 4.9277747718996036e-05, + "loss": 1.2438, + "step": 986 + }, + { + "epoch": 0.15516123327241643, + "grad_norm": 0.29659610986709595, + "learning_rate": 4.927627260968896e-05, + "loss": 1.1946, + "step": 987 + }, + { + "epoch": 0.15531843816934898, + "grad_norm": 0.27436473965644836, + "learning_rate": 4.927479601767952e-05, + "loss": 1.2783, + "step": 988 + }, + { + "epoch": 0.1554756430662815, + "grad_norm": 0.21257497370243073, + "learning_rate": 4.9273317943057896e-05, + "loss": 1.204, + "step": 989 + }, + { + "epoch": 0.15563284796321405, + "grad_norm": 0.2546120584011078, + "learning_rate": 4.927183838591437e-05, + "loss": 1.2224, + "step": 990 + }, + { + "epoch": 0.1557900528601466, + "grad_norm": 0.2336018979549408, + "learning_rate": 4.92703573463393e-05, + "loss": 1.2903, + "step": 991 + }, + { + "epoch": 0.15594725775707913, + "grad_norm": 0.2048567533493042, + "learning_rate": 4.926887482442315e-05, + "loss": 1.2529, + "step": 992 + }, + { + "epoch": 0.15610446265401168, + "grad_norm": 0.20950450003147125, + "learning_rate": 4.926739082025646e-05, + "loss": 1.4247, + "step": 993 + }, + { + "epoch": 0.1562616675509442, + "grad_norm": 0.22965438663959503, + "learning_rate": 4.926590533392987e-05, + "loss": 1.2573, + "step": 994 + }, + { + "epoch": 0.15641887244787675, + "grad_norm": 0.25654879212379456, + "learning_rate": 4.9264418365534105e-05, + "loss": 1.2115, + "step": 995 + }, + { + "epoch": 0.1565760773448093, + "grad_norm": 0.26419493556022644, + "learning_rate": 4.9262929915159995e-05, + "loss": 1.2994, + "step": 996 + }, + { + "epoch": 0.15673328224174182, + "grad_norm": 0.3215327560901642, + "learning_rate": 4.926143998289843e-05, + "loss": 1.2268, + "step": 997 + }, + { + "epoch": 0.15689048713867437, + "grad_norm": 0.3190024495124817, + "learning_rate": 4.925994856884042e-05, + "loss": 1.2747, + "step": 998 + }, + { + "epoch": 0.15704769203560692, + "grad_norm": 0.26742681860923767, + "learning_rate": 4.9258455673077065e-05, + "loss": 1.2791, + "step": 999 + }, + { + "epoch": 0.15720489693253945, + "grad_norm": 0.21026930212974548, + "learning_rate": 4.925696129569953e-05, + "loss": 1.3694, + "step": 1000 + }, + { + "epoch": 0.157362101829472, + "grad_norm": 0.2838928699493408, + "learning_rate": 4.925546543679909e-05, + "loss": 1.2589, + "step": 1001 + }, + { + "epoch": 0.15751930672640452, + "grad_norm": 0.3007844090461731, + "learning_rate": 4.9253968096467104e-05, + "loss": 1.2488, + "step": 1002 + }, + { + "epoch": 0.15767651162333707, + "grad_norm": 0.2473154515028, + "learning_rate": 4.925246927479503e-05, + "loss": 1.2288, + "step": 1003 + }, + { + "epoch": 0.15783371652026962, + "grad_norm": 0.2360457479953766, + "learning_rate": 4.925096897187441e-05, + "loss": 1.3148, + "step": 1004 + }, + { + "epoch": 0.15799092141720214, + "grad_norm": 0.3671962320804596, + "learning_rate": 4.924946718779687e-05, + "loss": 1.1999, + "step": 1005 + }, + { + "epoch": 0.1581481263141347, + "grad_norm": 0.28391456604003906, + "learning_rate": 4.924796392265414e-05, + "loss": 1.2296, + "step": 1006 + }, + { + "epoch": 0.1583053312110672, + "grad_norm": 0.2550790011882782, + "learning_rate": 4.924645917653802e-05, + "loss": 1.1923, + "step": 1007 + }, + { + "epoch": 0.15846253610799976, + "grad_norm": 0.301740825176239, + "learning_rate": 4.924495294954044e-05, + "loss": 1.2363, + "step": 1008 + }, + { + "epoch": 0.1586197410049323, + "grad_norm": 0.2608097195625305, + "learning_rate": 4.9243445241753374e-05, + "loss": 1.298, + "step": 1009 + }, + { + "epoch": 0.15877694590186484, + "grad_norm": 0.31052571535110474, + "learning_rate": 4.924193605326891e-05, + "loss": 1.3037, + "step": 1010 + }, + { + "epoch": 0.15893415079879739, + "grad_norm": 0.2420274317264557, + "learning_rate": 4.924042538417923e-05, + "loss": 1.2541, + "step": 1011 + }, + { + "epoch": 0.15909135569572994, + "grad_norm": 0.21056459844112396, + "learning_rate": 4.92389132345766e-05, + "loss": 1.3229, + "step": 1012 + }, + { + "epoch": 0.15924856059266246, + "grad_norm": 0.3283076286315918, + "learning_rate": 4.923739960455337e-05, + "loss": 1.1036, + "step": 1013 + }, + { + "epoch": 0.159405765489595, + "grad_norm": 0.2461448460817337, + "learning_rate": 4.9235884494201987e-05, + "loss": 1.2503, + "step": 1014 + }, + { + "epoch": 0.15956297038652753, + "grad_norm": 0.2919829189777374, + "learning_rate": 4.923436790361499e-05, + "loss": 1.2108, + "step": 1015 + }, + { + "epoch": 0.15972017528346008, + "grad_norm": 0.22029368579387665, + "learning_rate": 4.923284983288501e-05, + "loss": 1.2369, + "step": 1016 + }, + { + "epoch": 0.15987738018039263, + "grad_norm": 0.27863630652427673, + "learning_rate": 4.9231330282104756e-05, + "loss": 1.2555, + "step": 1017 + }, + { + "epoch": 0.16003458507732515, + "grad_norm": 0.28123825788497925, + "learning_rate": 4.9229809251367055e-05, + "loss": 1.1519, + "step": 1018 + }, + { + "epoch": 0.1601917899742577, + "grad_norm": 0.231231227517128, + "learning_rate": 4.922828674076478e-05, + "loss": 1.3488, + "step": 1019 + }, + { + "epoch": 0.16034899487119023, + "grad_norm": 0.2704828977584839, + "learning_rate": 4.9226762750390944e-05, + "loss": 1.1102, + "step": 1020 + }, + { + "epoch": 0.16050619976812278, + "grad_norm": 0.17970627546310425, + "learning_rate": 4.922523728033861e-05, + "loss": 1.2237, + "step": 1021 + }, + { + "epoch": 0.16066340466505533, + "grad_norm": 0.22386445105075836, + "learning_rate": 4.9223710330700956e-05, + "loss": 1.2564, + "step": 1022 + }, + { + "epoch": 0.16082060956198785, + "grad_norm": 0.21347114443778992, + "learning_rate": 4.922218190157124e-05, + "loss": 1.2433, + "step": 1023 + }, + { + "epoch": 0.1609778144589204, + "grad_norm": 0.23873676359653473, + "learning_rate": 4.9220651993042813e-05, + "loss": 1.2018, + "step": 1024 + }, + { + "epoch": 0.16113501935585295, + "grad_norm": 0.26954975724220276, + "learning_rate": 4.921912060520912e-05, + "loss": 1.2118, + "step": 1025 + }, + { + "epoch": 0.16129222425278547, + "grad_norm": 0.3023718595504761, + "learning_rate": 4.9217587738163686e-05, + "loss": 1.2717, + "step": 1026 + }, + { + "epoch": 0.16144942914971802, + "grad_norm": 0.31107062101364136, + "learning_rate": 4.921605339200013e-05, + "loss": 1.2017, + "step": 1027 + }, + { + "epoch": 0.16160663404665054, + "grad_norm": 0.2795855402946472, + "learning_rate": 4.921451756681217e-05, + "loss": 1.3492, + "step": 1028 + }, + { + "epoch": 0.1617638389435831, + "grad_norm": 0.24515774846076965, + "learning_rate": 4.921298026269361e-05, + "loss": 1.2763, + "step": 1029 + }, + { + "epoch": 0.16192104384051564, + "grad_norm": 0.2603877782821655, + "learning_rate": 4.921144147973834e-05, + "loss": 1.3196, + "step": 1030 + }, + { + "epoch": 0.16207824873744817, + "grad_norm": 0.265697181224823, + "learning_rate": 4.9209901218040335e-05, + "loss": 1.3067, + "step": 1031 + }, + { + "epoch": 0.16223545363438072, + "grad_norm": 0.2554314136505127, + "learning_rate": 4.9208359477693686e-05, + "loss": 1.2347, + "step": 1032 + }, + { + "epoch": 0.16239265853131324, + "grad_norm": 0.3272973597049713, + "learning_rate": 4.920681625879254e-05, + "loss": 1.2104, + "step": 1033 + }, + { + "epoch": 0.1625498634282458, + "grad_norm": 0.21259547770023346, + "learning_rate": 4.9205271561431166e-05, + "loss": 1.2857, + "step": 1034 + }, + { + "epoch": 0.16270706832517834, + "grad_norm": 0.2505529820919037, + "learning_rate": 4.92037253857039e-05, + "loss": 1.1988, + "step": 1035 + }, + { + "epoch": 0.16286427322211086, + "grad_norm": 0.2369750738143921, + "learning_rate": 4.920217773170517e-05, + "loss": 1.2384, + "step": 1036 + }, + { + "epoch": 0.1630214781190434, + "grad_norm": 0.26577889919281006, + "learning_rate": 4.920062859952951e-05, + "loss": 1.1521, + "step": 1037 + }, + { + "epoch": 0.16317868301597593, + "grad_norm": 0.2224215418100357, + "learning_rate": 4.919907798927153e-05, + "loss": 1.3065, + "step": 1038 + }, + { + "epoch": 0.16333588791290848, + "grad_norm": 0.1882622092962265, + "learning_rate": 4.9197525901025944e-05, + "loss": 1.2472, + "step": 1039 + }, + { + "epoch": 0.16349309280984103, + "grad_norm": 0.25392916798591614, + "learning_rate": 4.919597233488754e-05, + "loss": 1.3387, + "step": 1040 + }, + { + "epoch": 0.16365029770677356, + "grad_norm": 0.3185995817184448, + "learning_rate": 4.91944172909512e-05, + "loss": 1.161, + "step": 1041 + }, + { + "epoch": 0.1638075026037061, + "grad_norm": 0.26924118399620056, + "learning_rate": 4.919286076931191e-05, + "loss": 1.0659, + "step": 1042 + }, + { + "epoch": 0.16396470750063866, + "grad_norm": 0.2224770486354828, + "learning_rate": 4.919130277006473e-05, + "loss": 1.2303, + "step": 1043 + }, + { + "epoch": 0.16412191239757118, + "grad_norm": 0.24008037149906158, + "learning_rate": 4.918974329330482e-05, + "loss": 1.2762, + "step": 1044 + }, + { + "epoch": 0.16427911729450373, + "grad_norm": 0.2728358209133148, + "learning_rate": 4.918818233912742e-05, + "loss": 1.1582, + "step": 1045 + }, + { + "epoch": 0.16443632219143625, + "grad_norm": 0.24911800026893616, + "learning_rate": 4.918661990762788e-05, + "loss": 1.2155, + "step": 1046 + }, + { + "epoch": 0.1645935270883688, + "grad_norm": 0.2444472759962082, + "learning_rate": 4.918505599890162e-05, + "loss": 1.2838, + "step": 1047 + }, + { + "epoch": 0.16475073198530135, + "grad_norm": 0.2379113882780075, + "learning_rate": 4.918349061304416e-05, + "loss": 1.3043, + "step": 1048 + }, + { + "epoch": 0.16490793688223387, + "grad_norm": 0.3085183799266815, + "learning_rate": 4.9181923750151095e-05, + "loss": 1.2568, + "step": 1049 + }, + { + "epoch": 0.16506514177916642, + "grad_norm": 0.2629674971103668, + "learning_rate": 4.918035541031814e-05, + "loss": 1.2171, + "step": 1050 + }, + { + "epoch": 0.16522234667609895, + "grad_norm": 0.2707282602787018, + "learning_rate": 4.917878559364107e-05, + "loss": 1.1597, + "step": 1051 + }, + { + "epoch": 0.1653795515730315, + "grad_norm": 0.27305370569229126, + "learning_rate": 4.9177214300215784e-05, + "loss": 1.36, + "step": 1052 + }, + { + "epoch": 0.16553675646996405, + "grad_norm": 0.20558474957942963, + "learning_rate": 4.9175641530138226e-05, + "loss": 1.2225, + "step": 1053 + }, + { + "epoch": 0.16569396136689657, + "grad_norm": 0.23680521547794342, + "learning_rate": 4.917406728350448e-05, + "loss": 1.2149, + "step": 1054 + }, + { + "epoch": 0.16585116626382912, + "grad_norm": 0.2101297229528427, + "learning_rate": 4.917249156041066e-05, + "loss": 1.2313, + "step": 1055 + }, + { + "epoch": 0.16600837116076167, + "grad_norm": 0.2601447105407715, + "learning_rate": 4.917091436095304e-05, + "loss": 1.1907, + "step": 1056 + }, + { + "epoch": 0.1661655760576942, + "grad_norm": 0.2113189995288849, + "learning_rate": 4.916933568522793e-05, + "loss": 1.2852, + "step": 1057 + }, + { + "epoch": 0.16632278095462674, + "grad_norm": 0.21135227382183075, + "learning_rate": 4.916775553333176e-05, + "loss": 1.2852, + "step": 1058 + }, + { + "epoch": 0.16647998585155926, + "grad_norm": 0.2743116617202759, + "learning_rate": 4.916617390536102e-05, + "loss": 1.2032, + "step": 1059 + }, + { + "epoch": 0.16663719074849181, + "grad_norm": 0.2520056664943695, + "learning_rate": 4.916459080141234e-05, + "loss": 1.3038, + "step": 1060 + }, + { + "epoch": 0.16679439564542436, + "grad_norm": 0.21614307165145874, + "learning_rate": 4.916300622158239e-05, + "loss": 1.2216, + "step": 1061 + }, + { + "epoch": 0.1669516005423569, + "grad_norm": 0.28615444898605347, + "learning_rate": 4.9161420165967956e-05, + "loss": 1.2162, + "step": 1062 + }, + { + "epoch": 0.16710880543928944, + "grad_norm": 0.33522137999534607, + "learning_rate": 4.91598326346659e-05, + "loss": 1.1458, + "step": 1063 + }, + { + "epoch": 0.16726601033622196, + "grad_norm": 0.30597957968711853, + "learning_rate": 4.9158243627773194e-05, + "loss": 1.2623, + "step": 1064 + }, + { + "epoch": 0.1674232152331545, + "grad_norm": 0.2643260061740875, + "learning_rate": 4.915665314538688e-05, + "loss": 1.2092, + "step": 1065 + }, + { + "epoch": 0.16758042013008706, + "grad_norm": 0.3190208673477173, + "learning_rate": 4.91550611876041e-05, + "loss": 1.0543, + "step": 1066 + }, + { + "epoch": 0.16773762502701958, + "grad_norm": 0.3194860816001892, + "learning_rate": 4.9153467754522095e-05, + "loss": 1.1393, + "step": 1067 + }, + { + "epoch": 0.16789482992395213, + "grad_norm": 0.2031661570072174, + "learning_rate": 4.915187284623817e-05, + "loss": 1.2136, + "step": 1068 + }, + { + "epoch": 0.16805203482088468, + "grad_norm": 0.2047189325094223, + "learning_rate": 4.915027646284974e-05, + "loss": 1.2962, + "step": 1069 + }, + { + "epoch": 0.1682092397178172, + "grad_norm": 0.31565096974372864, + "learning_rate": 4.9148678604454325e-05, + "loss": 1.1979, + "step": 1070 + }, + { + "epoch": 0.16836644461474976, + "grad_norm": 0.23875312507152557, + "learning_rate": 4.914707927114949e-05, + "loss": 1.3002, + "step": 1071 + }, + { + "epoch": 0.16852364951168228, + "grad_norm": 0.24424909055233002, + "learning_rate": 4.9145478463032924e-05, + "loss": 1.1491, + "step": 1072 + }, + { + "epoch": 0.16868085440861483, + "grad_norm": 0.28339752554893494, + "learning_rate": 4.91438761802024e-05, + "loss": 1.2398, + "step": 1073 + }, + { + "epoch": 0.16883805930554738, + "grad_norm": 0.2435888648033142, + "learning_rate": 4.9142272422755786e-05, + "loss": 1.3292, + "step": 1074 + }, + { + "epoch": 0.1689952642024799, + "grad_norm": 0.21540984511375427, + "learning_rate": 4.9140667190791026e-05, + "loss": 1.3665, + "step": 1075 + }, + { + "epoch": 0.16915246909941245, + "grad_norm": 0.2556820809841156, + "learning_rate": 4.913906048440617e-05, + "loss": 1.2557, + "step": 1076 + }, + { + "epoch": 0.16930967399634497, + "grad_norm": 0.23769475519657135, + "learning_rate": 4.913745230369934e-05, + "loss": 1.2163, + "step": 1077 + }, + { + "epoch": 0.16946687889327752, + "grad_norm": 0.31578120589256287, + "learning_rate": 4.913584264876875e-05, + "loss": 1.3176, + "step": 1078 + }, + { + "epoch": 0.16962408379021007, + "grad_norm": 0.22278232872486115, + "learning_rate": 4.913423151971273e-05, + "loss": 1.2206, + "step": 1079 + }, + { + "epoch": 0.1697812886871426, + "grad_norm": 0.31810736656188965, + "learning_rate": 4.913261891662967e-05, + "loss": 1.2254, + "step": 1080 + }, + { + "epoch": 0.16993849358407515, + "grad_norm": 0.22623823583126068, + "learning_rate": 4.913100483961807e-05, + "loss": 1.208, + "step": 1081 + }, + { + "epoch": 0.1700956984810077, + "grad_norm": 0.27108776569366455, + "learning_rate": 4.9129389288776504e-05, + "loss": 1.2989, + "step": 1082 + }, + { + "epoch": 0.17025290337794022, + "grad_norm": 0.194550558924675, + "learning_rate": 4.912777226420365e-05, + "loss": 1.3849, + "step": 1083 + }, + { + "epoch": 0.17041010827487277, + "grad_norm": 0.20856213569641113, + "learning_rate": 4.912615376599826e-05, + "loss": 1.2736, + "step": 1084 + }, + { + "epoch": 0.1705673131718053, + "grad_norm": 0.19355203211307526, + "learning_rate": 4.91245337942592e-05, + "loss": 1.256, + "step": 1085 + }, + { + "epoch": 0.17072451806873784, + "grad_norm": 0.21832303702831268, + "learning_rate": 4.9122912349085395e-05, + "loss": 1.1987, + "step": 1086 + }, + { + "epoch": 0.1708817229656704, + "grad_norm": 0.22642913460731506, + "learning_rate": 4.912128943057589e-05, + "loss": 1.3043, + "step": 1087 + }, + { + "epoch": 0.1710389278626029, + "grad_norm": 0.22713351249694824, + "learning_rate": 4.911966503882981e-05, + "loss": 1.1951, + "step": 1088 + }, + { + "epoch": 0.17119613275953546, + "grad_norm": 0.29707837104797363, + "learning_rate": 4.911803917394634e-05, + "loss": 1.2674, + "step": 1089 + }, + { + "epoch": 0.17135333765646799, + "grad_norm": 0.27017104625701904, + "learning_rate": 4.911641183602481e-05, + "loss": 1.1727, + "step": 1090 + }, + { + "epoch": 0.17151054255340054, + "grad_norm": 0.23867738246917725, + "learning_rate": 4.911478302516461e-05, + "loss": 1.2061, + "step": 1091 + }, + { + "epoch": 0.17166774745033309, + "grad_norm": 0.26270055770874023, + "learning_rate": 4.911315274146521e-05, + "loss": 1.2735, + "step": 1092 + }, + { + "epoch": 0.1718249523472656, + "grad_norm": 0.25167661905288696, + "learning_rate": 4.911152098502617e-05, + "loss": 1.2643, + "step": 1093 + }, + { + "epoch": 0.17198215724419816, + "grad_norm": 0.46500882506370544, + "learning_rate": 4.9109887755947185e-05, + "loss": 1.1743, + "step": 1094 + }, + { + "epoch": 0.1721393621411307, + "grad_norm": 0.2189512848854065, + "learning_rate": 4.910825305432798e-05, + "loss": 1.1232, + "step": 1095 + }, + { + "epoch": 0.17229656703806323, + "grad_norm": 0.21188926696777344, + "learning_rate": 4.9106616880268405e-05, + "loss": 1.2031, + "step": 1096 + }, + { + "epoch": 0.17245377193499578, + "grad_norm": 0.2135314792394638, + "learning_rate": 4.910497923386839e-05, + "loss": 1.2547, + "step": 1097 + }, + { + "epoch": 0.1726109768319283, + "grad_norm": 0.27527931332588196, + "learning_rate": 4.910334011522796e-05, + "loss": 1.119, + "step": 1098 + }, + { + "epoch": 0.17276818172886085, + "grad_norm": 0.23516559600830078, + "learning_rate": 4.910169952444722e-05, + "loss": 1.3006, + "step": 1099 + }, + { + "epoch": 0.1729253866257934, + "grad_norm": 0.23166057467460632, + "learning_rate": 4.910005746162637e-05, + "loss": 1.281, + "step": 1100 + }, + { + "epoch": 0.17308259152272593, + "grad_norm": 0.22281822562217712, + "learning_rate": 4.9098413926865714e-05, + "loss": 1.1526, + "step": 1101 + }, + { + "epoch": 0.17323979641965848, + "grad_norm": 0.32062655687332153, + "learning_rate": 4.909676892026563e-05, + "loss": 1.2388, + "step": 1102 + }, + { + "epoch": 0.173397001316591, + "grad_norm": 0.29868969321250916, + "learning_rate": 4.909512244192657e-05, + "loss": 1.2303, + "step": 1103 + }, + { + "epoch": 0.17355420621352355, + "grad_norm": 0.23143270611763, + "learning_rate": 4.90934744919491e-05, + "loss": 1.2137, + "step": 1104 + }, + { + "epoch": 0.1737114111104561, + "grad_norm": 0.2830474376678467, + "learning_rate": 4.909182507043389e-05, + "loss": 1.2178, + "step": 1105 + }, + { + "epoch": 0.17386861600738862, + "grad_norm": 0.22427986562252045, + "learning_rate": 4.909017417748166e-05, + "loss": 1.3153, + "step": 1106 + }, + { + "epoch": 0.17402582090432117, + "grad_norm": 0.2587423622608185, + "learning_rate": 4.908852181319326e-05, + "loss": 1.2669, + "step": 1107 + }, + { + "epoch": 0.17418302580125372, + "grad_norm": 0.24905993044376373, + "learning_rate": 4.9086867977669594e-05, + "loss": 1.2549, + "step": 1108 + }, + { + "epoch": 0.17434023069818624, + "grad_norm": 0.26877379417419434, + "learning_rate": 4.908521267101167e-05, + "loss": 1.2694, + "step": 1109 + }, + { + "epoch": 0.1744974355951188, + "grad_norm": 0.2501152753829956, + "learning_rate": 4.9083555893320596e-05, + "loss": 1.2241, + "step": 1110 + }, + { + "epoch": 0.17465464049205132, + "grad_norm": 0.27815014123916626, + "learning_rate": 4.908189764469757e-05, + "loss": 1.2152, + "step": 1111 + }, + { + "epoch": 0.17481184538898387, + "grad_norm": 0.32891881465911865, + "learning_rate": 4.9080237925243856e-05, + "loss": 1.2638, + "step": 1112 + }, + { + "epoch": 0.17496905028591642, + "grad_norm": 0.2137015461921692, + "learning_rate": 4.9078576735060825e-05, + "loss": 1.2041, + "step": 1113 + }, + { + "epoch": 0.17512625518284894, + "grad_norm": 0.17862486839294434, + "learning_rate": 4.907691407424995e-05, + "loss": 1.3349, + "step": 1114 + }, + { + "epoch": 0.1752834600797815, + "grad_norm": 0.25791284441947937, + "learning_rate": 4.907524994291276e-05, + "loss": 1.2337, + "step": 1115 + }, + { + "epoch": 0.175440664976714, + "grad_norm": 0.24266491830348969, + "learning_rate": 4.90735843411509e-05, + "loss": 1.0939, + "step": 1116 + }, + { + "epoch": 0.17559786987364656, + "grad_norm": 0.2618250250816345, + "learning_rate": 4.9071917269066114e-05, + "loss": 1.2855, + "step": 1117 + }, + { + "epoch": 0.1757550747705791, + "grad_norm": 0.2477390021085739, + "learning_rate": 4.9070248726760206e-05, + "loss": 1.1675, + "step": 1118 + }, + { + "epoch": 0.17591227966751163, + "grad_norm": 0.29105502367019653, + "learning_rate": 4.906857871433508e-05, + "loss": 1.183, + "step": 1119 + }, + { + "epoch": 0.17606948456444418, + "grad_norm": 0.2923283874988556, + "learning_rate": 4.906690723189275e-05, + "loss": 1.1386, + "step": 1120 + }, + { + "epoch": 0.17606948456444418, + "eval_loss": 1.219694972038269, + "eval_runtime": 2300.2931, + "eval_samples_per_second": 4.025, + "eval_steps_per_second": 2.012, + "step": 1120 + }, + { + "epoch": 0.17622668946137673, + "grad_norm": 0.3278633952140808, + "learning_rate": 4.906523427953529e-05, + "loss": 1.1738, + "step": 1121 + }, + { + "epoch": 0.17638389435830926, + "grad_norm": 0.31546783447265625, + "learning_rate": 4.906355985736488e-05, + "loss": 1.0894, + "step": 1122 + }, + { + "epoch": 0.1765410992552418, + "grad_norm": 0.28350481390953064, + "learning_rate": 4.906188396548379e-05, + "loss": 1.2774, + "step": 1123 + }, + { + "epoch": 0.17669830415217433, + "grad_norm": 0.21374982595443726, + "learning_rate": 4.9060206603994385e-05, + "loss": 1.37, + "step": 1124 + }, + { + "epoch": 0.17685550904910688, + "grad_norm": 0.2343566119670868, + "learning_rate": 4.9058527772999095e-05, + "loss": 1.2065, + "step": 1125 + }, + { + "epoch": 0.17701271394603943, + "grad_norm": 0.29571887850761414, + "learning_rate": 4.905684747260047e-05, + "loss": 1.1967, + "step": 1126 + }, + { + "epoch": 0.17716991884297195, + "grad_norm": 0.2689303457736969, + "learning_rate": 4.905516570290113e-05, + "loss": 1.2337, + "step": 1127 + }, + { + "epoch": 0.1773271237399045, + "grad_norm": 0.22743673622608185, + "learning_rate": 4.90534824640038e-05, + "loss": 1.1673, + "step": 1128 + }, + { + "epoch": 0.17748432863683702, + "grad_norm": 0.36731019616127014, + "learning_rate": 4.905179775601126e-05, + "loss": 1.1397, + "step": 1129 + }, + { + "epoch": 0.17764153353376957, + "grad_norm": 0.2571149468421936, + "learning_rate": 4.905011157902645e-05, + "loss": 1.1166, + "step": 1130 + }, + { + "epoch": 0.17779873843070212, + "grad_norm": 0.2615256905555725, + "learning_rate": 4.904842393315231e-05, + "loss": 1.2095, + "step": 1131 + }, + { + "epoch": 0.17795594332763465, + "grad_norm": 0.28919360041618347, + "learning_rate": 4.904673481849194e-05, + "loss": 1.0976, + "step": 1132 + }, + { + "epoch": 0.1781131482245672, + "grad_norm": 0.3858489990234375, + "learning_rate": 4.90450442351485e-05, + "loss": 1.1934, + "step": 1133 + }, + { + "epoch": 0.17827035312149975, + "grad_norm": 0.2448245733976364, + "learning_rate": 4.904335218322524e-05, + "loss": 1.1604, + "step": 1134 + }, + { + "epoch": 0.17842755801843227, + "grad_norm": 0.2626294195652008, + "learning_rate": 4.9041658662825514e-05, + "loss": 1.1301, + "step": 1135 + }, + { + "epoch": 0.17858476291536482, + "grad_norm": 0.3016091287136078, + "learning_rate": 4.903996367405275e-05, + "loss": 1.2579, + "step": 1136 + }, + { + "epoch": 0.17874196781229734, + "grad_norm": 0.28168612718582153, + "learning_rate": 4.9038267217010455e-05, + "loss": 1.1471, + "step": 1137 + }, + { + "epoch": 0.1788991727092299, + "grad_norm": 0.29256439208984375, + "learning_rate": 4.903656929180228e-05, + "loss": 1.1598, + "step": 1138 + }, + { + "epoch": 0.17905637760616244, + "grad_norm": 0.19786624610424042, + "learning_rate": 4.9034869898531895e-05, + "loss": 1.2115, + "step": 1139 + }, + { + "epoch": 0.17921358250309496, + "grad_norm": 0.17216260731220245, + "learning_rate": 4.9033169037303106e-05, + "loss": 1.2471, + "step": 1140 + }, + { + "epoch": 0.17937078740002751, + "grad_norm": 0.22571730613708496, + "learning_rate": 4.9031466708219785e-05, + "loss": 1.2226, + "step": 1141 + }, + { + "epoch": 0.17952799229696004, + "grad_norm": 0.25510528683662415, + "learning_rate": 4.9029762911385915e-05, + "loss": 1.1428, + "step": 1142 + }, + { + "epoch": 0.1796851971938926, + "grad_norm": 0.19014020264148712, + "learning_rate": 4.902805764690556e-05, + "loss": 1.2268, + "step": 1143 + }, + { + "epoch": 0.17984240209082514, + "grad_norm": 0.25155729055404663, + "learning_rate": 4.902635091488286e-05, + "loss": 1.1943, + "step": 1144 + }, + { + "epoch": 0.17999960698775766, + "grad_norm": 0.3109387159347534, + "learning_rate": 4.902464271542206e-05, + "loss": 1.176, + "step": 1145 + }, + { + "epoch": 0.1801568118846902, + "grad_norm": 0.2269504815340042, + "learning_rate": 4.9022933048627496e-05, + "loss": 1.2166, + "step": 1146 + }, + { + "epoch": 0.18031401678162276, + "grad_norm": 0.20270425081253052, + "learning_rate": 4.902122191460358e-05, + "loss": 1.235, + "step": 1147 + }, + { + "epoch": 0.18047122167855528, + "grad_norm": 0.2519841194152832, + "learning_rate": 4.901950931345481e-05, + "loss": 1.2418, + "step": 1148 + }, + { + "epoch": 0.18062842657548783, + "grad_norm": 0.1967516988515854, + "learning_rate": 4.901779524528582e-05, + "loss": 1.2979, + "step": 1149 + }, + { + "epoch": 0.18078563147242036, + "grad_norm": 0.21120384335517883, + "learning_rate": 4.901607971020127e-05, + "loss": 1.1557, + "step": 1150 + }, + { + "epoch": 0.1809428363693529, + "grad_norm": 0.31649792194366455, + "learning_rate": 4.9014362708305944e-05, + "loss": 1.3237, + "step": 1151 + }, + { + "epoch": 0.18110004126628546, + "grad_norm": 0.24945318698883057, + "learning_rate": 4.901264423970471e-05, + "loss": 1.2099, + "step": 1152 + }, + { + "epoch": 0.18125724616321798, + "grad_norm": 0.30652904510498047, + "learning_rate": 4.901092430450254e-05, + "loss": 1.1918, + "step": 1153 + }, + { + "epoch": 0.18141445106015053, + "grad_norm": 0.2480253279209137, + "learning_rate": 4.900920290280446e-05, + "loss": 1.2675, + "step": 1154 + }, + { + "epoch": 0.18157165595708305, + "grad_norm": 0.3034304976463318, + "learning_rate": 4.900748003471561e-05, + "loss": 1.2012, + "step": 1155 + }, + { + "epoch": 0.1817288608540156, + "grad_norm": 0.2113679200410843, + "learning_rate": 4.900575570034124e-05, + "loss": 1.2824, + "step": 1156 + }, + { + "epoch": 0.18188606575094815, + "grad_norm": 0.34726831316947937, + "learning_rate": 4.9004029899786627e-05, + "loss": 1.1426, + "step": 1157 + }, + { + "epoch": 0.18204327064788067, + "grad_norm": 0.20344194769859314, + "learning_rate": 4.900230263315722e-05, + "loss": 1.2096, + "step": 1158 + }, + { + "epoch": 0.18220047554481322, + "grad_norm": 0.28635072708129883, + "learning_rate": 4.900057390055847e-05, + "loss": 1.166, + "step": 1159 + }, + { + "epoch": 0.18235768044174577, + "grad_norm": 0.21670344471931458, + "learning_rate": 4.8998843702095995e-05, + "loss": 1.2103, + "step": 1160 + }, + { + "epoch": 0.1825148853386783, + "grad_norm": 0.31661516427993774, + "learning_rate": 4.899711203787545e-05, + "loss": 1.2345, + "step": 1161 + }, + { + "epoch": 0.18267209023561085, + "grad_norm": 0.30255556106567383, + "learning_rate": 4.899537890800261e-05, + "loss": 1.2342, + "step": 1162 + }, + { + "epoch": 0.18282929513254337, + "grad_norm": 0.23636944591999054, + "learning_rate": 4.899364431258332e-05, + "loss": 1.1685, + "step": 1163 + }, + { + "epoch": 0.18298650002947592, + "grad_norm": 0.27452319860458374, + "learning_rate": 4.8991908251723524e-05, + "loss": 1.1263, + "step": 1164 + }, + { + "epoch": 0.18314370492640847, + "grad_norm": 0.28636041283607483, + "learning_rate": 4.899017072552926e-05, + "loss": 1.1961, + "step": 1165 + }, + { + "epoch": 0.183300909823341, + "grad_norm": 0.29220953583717346, + "learning_rate": 4.8988431734106635e-05, + "loss": 1.2414, + "step": 1166 + }, + { + "epoch": 0.18345811472027354, + "grad_norm": 0.20738068222999573, + "learning_rate": 4.898669127756188e-05, + "loss": 1.1499, + "step": 1167 + }, + { + "epoch": 0.18361531961720606, + "grad_norm": 0.19913551211357117, + "learning_rate": 4.898494935600127e-05, + "loss": 1.3538, + "step": 1168 + }, + { + "epoch": 0.1837725245141386, + "grad_norm": 0.256979763507843, + "learning_rate": 4.8983205969531234e-05, + "loss": 1.1979, + "step": 1169 + }, + { + "epoch": 0.18392972941107116, + "grad_norm": 0.26307129859924316, + "learning_rate": 4.898146111825821e-05, + "loss": 1.2054, + "step": 1170 + }, + { + "epoch": 0.18408693430800369, + "grad_norm": 0.2451772540807724, + "learning_rate": 4.897971480228879e-05, + "loss": 1.1901, + "step": 1171 + }, + { + "epoch": 0.18424413920493624, + "grad_norm": 0.3223975896835327, + "learning_rate": 4.897796702172962e-05, + "loss": 1.1825, + "step": 1172 + }, + { + "epoch": 0.18440134410186879, + "grad_norm": 0.34991317987442017, + "learning_rate": 4.897621777668746e-05, + "loss": 1.1371, + "step": 1173 + }, + { + "epoch": 0.1845585489988013, + "grad_norm": 0.2680002748966217, + "learning_rate": 4.897446706726915e-05, + "loss": 1.2179, + "step": 1174 + }, + { + "epoch": 0.18471575389573386, + "grad_norm": 0.21509090065956116, + "learning_rate": 4.897271489358159e-05, + "loss": 1.1284, + "step": 1175 + }, + { + "epoch": 0.18487295879266638, + "grad_norm": 0.20545831322669983, + "learning_rate": 4.8970961255731826e-05, + "loss": 1.2188, + "step": 1176 + }, + { + "epoch": 0.18503016368959893, + "grad_norm": 0.23479585349559784, + "learning_rate": 4.896920615382695e-05, + "loss": 1.2947, + "step": 1177 + }, + { + "epoch": 0.18518736858653148, + "grad_norm": 0.2880757749080658, + "learning_rate": 4.896744958797417e-05, + "loss": 1.1443, + "step": 1178 + }, + { + "epoch": 0.185344573483464, + "grad_norm": 0.2431318610906601, + "learning_rate": 4.8965691558280744e-05, + "loss": 1.1123, + "step": 1179 + }, + { + "epoch": 0.18550177838039655, + "grad_norm": 0.21252453327178955, + "learning_rate": 4.896393206485407e-05, + "loss": 1.326, + "step": 1180 + }, + { + "epoch": 0.18565898327732908, + "grad_norm": 0.28821709752082825, + "learning_rate": 4.8962171107801596e-05, + "loss": 1.1508, + "step": 1181 + }, + { + "epoch": 0.18581618817426163, + "grad_norm": 0.2636358141899109, + "learning_rate": 4.8960408687230886e-05, + "loss": 1.1061, + "step": 1182 + }, + { + "epoch": 0.18597339307119418, + "grad_norm": 0.23121225833892822, + "learning_rate": 4.895864480324957e-05, + "loss": 1.2486, + "step": 1183 + }, + { + "epoch": 0.1861305979681267, + "grad_norm": 0.29034245014190674, + "learning_rate": 4.895687945596539e-05, + "loss": 1.186, + "step": 1184 + }, + { + "epoch": 0.18628780286505925, + "grad_norm": 0.3220363259315491, + "learning_rate": 4.895511264548617e-05, + "loss": 1.1727, + "step": 1185 + }, + { + "epoch": 0.1864450077619918, + "grad_norm": 0.2863159477710724, + "learning_rate": 4.89533443719198e-05, + "loss": 1.1946, + "step": 1186 + }, + { + "epoch": 0.18660221265892432, + "grad_norm": 0.27671483159065247, + "learning_rate": 4.89515746353743e-05, + "loss": 1.2271, + "step": 1187 + }, + { + "epoch": 0.18675941755585687, + "grad_norm": 0.2535041570663452, + "learning_rate": 4.894980343595775e-05, + "loss": 1.2437, + "step": 1188 + }, + { + "epoch": 0.1869166224527894, + "grad_norm": 0.34405645728111267, + "learning_rate": 4.894803077377833e-05, + "loss": 1.1397, + "step": 1189 + }, + { + "epoch": 0.18707382734972194, + "grad_norm": 0.28299692273139954, + "learning_rate": 4.8946256648944307e-05, + "loss": 1.1215, + "step": 1190 + }, + { + "epoch": 0.1872310322466545, + "grad_norm": 0.1962118297815323, + "learning_rate": 4.8944481061564035e-05, + "loss": 1.1908, + "step": 1191 + }, + { + "epoch": 0.18738823714358702, + "grad_norm": 0.24563154578208923, + "learning_rate": 4.894270401174597e-05, + "loss": 1.2265, + "step": 1192 + }, + { + "epoch": 0.18754544204051957, + "grad_norm": 0.22452424466609955, + "learning_rate": 4.894092549959862e-05, + "loss": 1.1673, + "step": 1193 + }, + { + "epoch": 0.1877026469374521, + "grad_norm": 0.1847248673439026, + "learning_rate": 4.8939145525230646e-05, + "loss": 1.2706, + "step": 1194 + }, + { + "epoch": 0.18785985183438464, + "grad_norm": 0.2578265964984894, + "learning_rate": 4.893736408875075e-05, + "loss": 1.2011, + "step": 1195 + }, + { + "epoch": 0.1880170567313172, + "grad_norm": 0.2686786353588104, + "learning_rate": 4.893558119026772e-05, + "loss": 1.3191, + "step": 1196 + }, + { + "epoch": 0.1881742616282497, + "grad_norm": 0.27492383122444153, + "learning_rate": 4.893379682989047e-05, + "loss": 1.1755, + "step": 1197 + }, + { + "epoch": 0.18833146652518226, + "grad_norm": 0.2544412612915039, + "learning_rate": 4.8932011007727965e-05, + "loss": 1.1842, + "step": 1198 + }, + { + "epoch": 0.1884886714221148, + "grad_norm": 0.24790935218334198, + "learning_rate": 4.893022372388928e-05, + "loss": 1.2408, + "step": 1199 + }, + { + "epoch": 0.18864587631904733, + "grad_norm": 0.2788006067276001, + "learning_rate": 4.892843497848358e-05, + "loss": 1.2671, + "step": 1200 + }, + { + "epoch": 0.18880308121597988, + "grad_norm": 0.2571476101875305, + "learning_rate": 4.892664477162012e-05, + "loss": 1.1894, + "step": 1201 + }, + { + "epoch": 0.1889602861129124, + "grad_norm": 0.22788426280021667, + "learning_rate": 4.892485310340822e-05, + "loss": 1.2261, + "step": 1202 + }, + { + "epoch": 0.18911749100984496, + "grad_norm": 0.2010507732629776, + "learning_rate": 4.892305997395733e-05, + "loss": 1.2399, + "step": 1203 + }, + { + "epoch": 0.1892746959067775, + "grad_norm": 0.23946425318717957, + "learning_rate": 4.892126538337696e-05, + "loss": 1.2727, + "step": 1204 + }, + { + "epoch": 0.18943190080371003, + "grad_norm": 0.2885929346084595, + "learning_rate": 4.8919469331776714e-05, + "loss": 1.2376, + "step": 1205 + }, + { + "epoch": 0.18958910570064258, + "grad_norm": 0.31879860162734985, + "learning_rate": 4.891767181926629e-05, + "loss": 1.22, + "step": 1206 + }, + { + "epoch": 0.1897463105975751, + "grad_norm": 0.2895459532737732, + "learning_rate": 4.891587284595546e-05, + "loss": 1.2387, + "step": 1207 + }, + { + "epoch": 0.18990351549450765, + "grad_norm": 0.27507272362709045, + "learning_rate": 4.891407241195412e-05, + "loss": 1.1723, + "step": 1208 + }, + { + "epoch": 0.1900607203914402, + "grad_norm": 0.26780039072036743, + "learning_rate": 4.8912270517372224e-05, + "loss": 1.1549, + "step": 1209 + }, + { + "epoch": 0.19021792528837272, + "grad_norm": 0.1915176510810852, + "learning_rate": 4.8910467162319826e-05, + "loss": 1.109, + "step": 1210 + }, + { + "epoch": 0.19037513018530527, + "grad_norm": 0.25054261088371277, + "learning_rate": 4.8908662346907064e-05, + "loss": 1.1197, + "step": 1211 + }, + { + "epoch": 0.19053233508223782, + "grad_norm": 0.24239963293075562, + "learning_rate": 4.8906856071244176e-05, + "loss": 1.2614, + "step": 1212 + }, + { + "epoch": 0.19068953997917035, + "grad_norm": 0.21543578803539276, + "learning_rate": 4.890504833544147e-05, + "loss": 1.3804, + "step": 1213 + }, + { + "epoch": 0.1908467448761029, + "grad_norm": 0.2045610100030899, + "learning_rate": 4.8903239139609376e-05, + "loss": 1.2108, + "step": 1214 + }, + { + "epoch": 0.19100394977303542, + "grad_norm": 0.2209930568933487, + "learning_rate": 4.890142848385838e-05, + "loss": 1.2329, + "step": 1215 + }, + { + "epoch": 0.19116115466996797, + "grad_norm": 0.24921675026416779, + "learning_rate": 4.889961636829906e-05, + "loss": 1.2009, + "step": 1216 + }, + { + "epoch": 0.19131835956690052, + "grad_norm": 0.2356979250907898, + "learning_rate": 4.8897802793042115e-05, + "loss": 1.211, + "step": 1217 + }, + { + "epoch": 0.19147556446383304, + "grad_norm": 0.20199252665042877, + "learning_rate": 4.88959877581983e-05, + "loss": 1.18, + "step": 1218 + }, + { + "epoch": 0.1916327693607656, + "grad_norm": 0.24907195568084717, + "learning_rate": 4.889417126387846e-05, + "loss": 1.2438, + "step": 1219 + }, + { + "epoch": 0.19178997425769811, + "grad_norm": 0.2976427674293518, + "learning_rate": 4.889235331019356e-05, + "loss": 1.1526, + "step": 1220 + }, + { + "epoch": 0.19194717915463066, + "grad_norm": 0.25074872374534607, + "learning_rate": 4.889053389725463e-05, + "loss": 1.1805, + "step": 1221 + }, + { + "epoch": 0.19210438405156322, + "grad_norm": 0.2157672792673111, + "learning_rate": 4.8888713025172776e-05, + "loss": 1.2103, + "step": 1222 + }, + { + "epoch": 0.19226158894849574, + "grad_norm": 0.24573171138763428, + "learning_rate": 4.888689069405923e-05, + "loss": 1.1981, + "step": 1223 + }, + { + "epoch": 0.1924187938454283, + "grad_norm": 0.294160932302475, + "learning_rate": 4.888506690402528e-05, + "loss": 1.2667, + "step": 1224 + }, + { + "epoch": 0.19257599874236084, + "grad_norm": 0.8444136381149292, + "learning_rate": 4.8883241655182314e-05, + "loss": 1.1977, + "step": 1225 + }, + { + "epoch": 0.19273320363929336, + "grad_norm": 0.4191160798072815, + "learning_rate": 4.888141494764182e-05, + "loss": 1.1981, + "step": 1226 + }, + { + "epoch": 0.1928904085362259, + "grad_norm": 0.31621554493904114, + "learning_rate": 4.8879586781515376e-05, + "loss": 1.2224, + "step": 1227 + }, + { + "epoch": 0.19304761343315843, + "grad_norm": 0.2715776860713959, + "learning_rate": 4.887775715691462e-05, + "loss": 1.1029, + "step": 1228 + }, + { + "epoch": 0.19320481833009098, + "grad_norm": 0.2641848623752594, + "learning_rate": 4.88759260739513e-05, + "loss": 1.1738, + "step": 1229 + }, + { + "epoch": 0.19336202322702353, + "grad_norm": 0.2537270188331604, + "learning_rate": 4.887409353273727e-05, + "loss": 1.2847, + "step": 1230 + }, + { + "epoch": 0.19351922812395606, + "grad_norm": 0.2998782694339752, + "learning_rate": 4.8872259533384423e-05, + "loss": 1.1814, + "step": 1231 + }, + { + "epoch": 0.1936764330208886, + "grad_norm": 0.2254815697669983, + "learning_rate": 4.8870424076004806e-05, + "loss": 1.2004, + "step": 1232 + }, + { + "epoch": 0.19383363791782113, + "grad_norm": 0.3711993396282196, + "learning_rate": 4.88685871607105e-05, + "loss": 1.1502, + "step": 1233 + }, + { + "epoch": 0.19399084281475368, + "grad_norm": 0.24783778190612793, + "learning_rate": 4.886674878761371e-05, + "loss": 1.1185, + "step": 1234 + }, + { + "epoch": 0.19414804771168623, + "grad_norm": 0.1896362453699112, + "learning_rate": 4.88649089568267e-05, + "loss": 1.1856, + "step": 1235 + }, + { + "epoch": 0.19430525260861875, + "grad_norm": 0.28106558322906494, + "learning_rate": 4.886306766846187e-05, + "loss": 1.2196, + "step": 1236 + }, + { + "epoch": 0.1944624575055513, + "grad_norm": 0.3023208975791931, + "learning_rate": 4.8861224922631645e-05, + "loss": 1.1836, + "step": 1237 + }, + { + "epoch": 0.19461966240248385, + "grad_norm": 0.36752450466156006, + "learning_rate": 4.8859380719448596e-05, + "loss": 1.1831, + "step": 1238 + }, + { + "epoch": 0.19477686729941637, + "grad_norm": 0.2593975365161896, + "learning_rate": 4.885753505902535e-05, + "loss": 1.1955, + "step": 1239 + }, + { + "epoch": 0.19493407219634892, + "grad_norm": 0.2952882647514343, + "learning_rate": 4.885568794147463e-05, + "loss": 1.108, + "step": 1240 + }, + { + "epoch": 0.19509127709328145, + "grad_norm": 0.2335767149925232, + "learning_rate": 4.885383936690926e-05, + "loss": 1.2389, + "step": 1241 + }, + { + "epoch": 0.195248481990214, + "grad_norm": 0.3618619441986084, + "learning_rate": 4.885198933544214e-05, + "loss": 1.0247, + "step": 1242 + }, + { + "epoch": 0.19540568688714655, + "grad_norm": 0.26691627502441406, + "learning_rate": 4.885013784718626e-05, + "loss": 1.1516, + "step": 1243 + }, + { + "epoch": 0.19556289178407907, + "grad_norm": 0.2977723777294159, + "learning_rate": 4.8848284902254705e-05, + "loss": 1.1617, + "step": 1244 + }, + { + "epoch": 0.19572009668101162, + "grad_norm": 0.33515632152557373, + "learning_rate": 4.884643050076064e-05, + "loss": 1.1789, + "step": 1245 + }, + { + "epoch": 0.19587730157794414, + "grad_norm": 0.275840163230896, + "learning_rate": 4.8844574642817334e-05, + "loss": 1.1103, + "step": 1246 + }, + { + "epoch": 0.1960345064748767, + "grad_norm": 0.26756566762924194, + "learning_rate": 4.884271732853813e-05, + "loss": 1.2101, + "step": 1247 + }, + { + "epoch": 0.19619171137180924, + "grad_norm": 0.20770548284053802, + "learning_rate": 4.884085855803647e-05, + "loss": 1.2506, + "step": 1248 + }, + { + "epoch": 0.19634891626874176, + "grad_norm": 0.2700664699077606, + "learning_rate": 4.883899833142588e-05, + "loss": 1.2034, + "step": 1249 + }, + { + "epoch": 0.1965061211656743, + "grad_norm": 0.2403496950864792, + "learning_rate": 4.883713664881997e-05, + "loss": 1.1622, + "step": 1250 + }, + { + "epoch": 0.19666332606260686, + "grad_norm": 0.2710270881652832, + "learning_rate": 4.883527351033245e-05, + "loss": 1.0679, + "step": 1251 + }, + { + "epoch": 0.19682053095953939, + "grad_norm": 0.2600773870944977, + "learning_rate": 4.8833408916077104e-05, + "loss": 1.3343, + "step": 1252 + }, + { + "epoch": 0.19697773585647194, + "grad_norm": 0.25740665197372437, + "learning_rate": 4.883154286616783e-05, + "loss": 1.2206, + "step": 1253 + }, + { + "epoch": 0.19713494075340446, + "grad_norm": 0.3393601179122925, + "learning_rate": 4.8829675360718585e-05, + "loss": 1.1518, + "step": 1254 + }, + { + "epoch": 0.197292145650337, + "grad_norm": 0.2968616783618927, + "learning_rate": 4.8827806399843444e-05, + "loss": 1.2547, + "step": 1255 + }, + { + "epoch": 0.19744935054726956, + "grad_norm": 0.24990178644657135, + "learning_rate": 4.8825935983656535e-05, + "loss": 1.2733, + "step": 1256 + }, + { + "epoch": 0.19760655544420208, + "grad_norm": 0.31955957412719727, + "learning_rate": 4.882406411227212e-05, + "loss": 1.2138, + "step": 1257 + }, + { + "epoch": 0.19776376034113463, + "grad_norm": 0.22445374727249146, + "learning_rate": 4.88221907858045e-05, + "loss": 1.1845, + "step": 1258 + }, + { + "epoch": 0.19792096523806715, + "grad_norm": 0.32888510823249817, + "learning_rate": 4.8820316004368116e-05, + "loss": 1.2339, + "step": 1259 + }, + { + "epoch": 0.1980781701349997, + "grad_norm": 0.29760921001434326, + "learning_rate": 4.8818439768077456e-05, + "loss": 1.2216, + "step": 1260 + }, + { + "epoch": 0.19823537503193225, + "grad_norm": 0.19965974986553192, + "learning_rate": 4.881656207704712e-05, + "loss": 1.2608, + "step": 1261 + }, + { + "epoch": 0.19839257992886478, + "grad_norm": 0.2538587749004364, + "learning_rate": 4.881468293139179e-05, + "loss": 1.1989, + "step": 1262 + }, + { + "epoch": 0.19854978482579733, + "grad_norm": 0.35299167037010193, + "learning_rate": 4.8812802331226224e-05, + "loss": 1.1426, + "step": 1263 + }, + { + "epoch": 0.19870698972272988, + "grad_norm": 0.3230816423892975, + "learning_rate": 4.8810920276665306e-05, + "loss": 1.2546, + "step": 1264 + }, + { + "epoch": 0.1988641946196624, + "grad_norm": 0.3077559769153595, + "learning_rate": 4.880903676782397e-05, + "loss": 1.1661, + "step": 1265 + }, + { + "epoch": 0.19902139951659495, + "grad_norm": 0.32157936692237854, + "learning_rate": 4.8807151804817254e-05, + "loss": 1.2141, + "step": 1266 + }, + { + "epoch": 0.19917860441352747, + "grad_norm": 0.32653504610061646, + "learning_rate": 4.880526538776029e-05, + "loss": 1.0623, + "step": 1267 + }, + { + "epoch": 0.19933580931046002, + "grad_norm": 0.2675210237503052, + "learning_rate": 4.880337751676828e-05, + "loss": 1.1408, + "step": 1268 + }, + { + "epoch": 0.19949301420739257, + "grad_norm": 0.28380653262138367, + "learning_rate": 4.880148819195654e-05, + "loss": 1.223, + "step": 1269 + }, + { + "epoch": 0.1996502191043251, + "grad_norm": 0.2532847821712494, + "learning_rate": 4.8799597413440466e-05, + "loss": 1.2133, + "step": 1270 + }, + { + "epoch": 0.19980742400125764, + "grad_norm": 0.2972438633441925, + "learning_rate": 4.8797705181335526e-05, + "loss": 1.2806, + "step": 1271 + }, + { + "epoch": 0.19996462889819017, + "grad_norm": 0.2725450098514557, + "learning_rate": 4.8795811495757306e-05, + "loss": 1.1627, + "step": 1272 + }, + { + "epoch": 0.20012183379512272, + "grad_norm": 0.2451506108045578, + "learning_rate": 4.879391635682145e-05, + "loss": 1.3242, + "step": 1273 + }, + { + "epoch": 0.20027903869205527, + "grad_norm": 0.22880415618419647, + "learning_rate": 4.8792019764643714e-05, + "loss": 1.1535, + "step": 1274 + }, + { + "epoch": 0.2004362435889878, + "grad_norm": 0.22470681369304657, + "learning_rate": 4.8790121719339935e-05, + "loss": 1.268, + "step": 1275 + }, + { + "epoch": 0.20059344848592034, + "grad_norm": 0.2413133829832077, + "learning_rate": 4.878822222102604e-05, + "loss": 1.2291, + "step": 1276 + }, + { + "epoch": 0.2007506533828529, + "grad_norm": 0.23373375833034515, + "learning_rate": 4.878632126981804e-05, + "loss": 1.1007, + "step": 1277 + }, + { + "epoch": 0.2009078582797854, + "grad_norm": 0.3018023371696472, + "learning_rate": 4.878441886583203e-05, + "loss": 1.2393, + "step": 1278 + }, + { + "epoch": 0.20106506317671796, + "grad_norm": 0.2107972353696823, + "learning_rate": 4.878251500918421e-05, + "loss": 1.3164, + "step": 1279 + }, + { + "epoch": 0.20122226807365048, + "grad_norm": 0.24787524342536926, + "learning_rate": 4.878060969999087e-05, + "loss": 1.217, + "step": 1280 + }, + { + "epoch": 0.20122226807365048, + "eval_loss": 1.2021143436431885, + "eval_runtime": 2276.1827, + "eval_samples_per_second": 4.067, + "eval_steps_per_second": 2.034, + "step": 1280 + } + ], + "logging_steps": 1, + "max_steps": 12722, + "num_input_tokens_seen": 0, + "num_train_epochs": 2, + "save_steps": 160, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 6.55664759605035e+18, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}