|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.10061113403682524, |
|
"eval_steps": 160, |
|
"global_step": 640, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00015720489693253945, |
|
"grad_norm": 1.3751904964447021, |
|
"learning_rate": 0.0, |
|
"loss": 3.5741, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00015720489693253945, |
|
"eval_loss": 3.4173049926757812, |
|
"eval_runtime": 2315.7248, |
|
"eval_samples_per_second": 3.998, |
|
"eval_steps_per_second": 1.999, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003144097938650789, |
|
"grad_norm": 1.231239676475525, |
|
"learning_rate": 5e-06, |
|
"loss": 3.3021, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00047161469079761836, |
|
"grad_norm": 1.3657807111740112, |
|
"learning_rate": 1e-05, |
|
"loss": 3.6333, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0006288195877301578, |
|
"grad_norm": 1.3117496967315674, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.3731, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0007860244846626972, |
|
"grad_norm": 1.4118576049804688, |
|
"learning_rate": 2e-05, |
|
"loss": 3.612, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0009432293815952367, |
|
"grad_norm": 1.3155895471572876, |
|
"learning_rate": 2.5e-05, |
|
"loss": 3.3296, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.001100434278527776, |
|
"grad_norm": 1.2847192287445068, |
|
"learning_rate": 3e-05, |
|
"loss": 3.2168, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0012576391754603156, |
|
"grad_norm": 1.1421078443527222, |
|
"learning_rate": 3.5e-05, |
|
"loss": 3.085, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0014148440723928551, |
|
"grad_norm": 0.9923035502433777, |
|
"learning_rate": 4e-05, |
|
"loss": 3.0472, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0015720489693253944, |
|
"grad_norm": 0.795043408870697, |
|
"learning_rate": 4.5e-05, |
|
"loss": 2.6666, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.001729253866257934, |
|
"grad_norm": 0.5987974405288696, |
|
"learning_rate": 5e-05, |
|
"loss": 2.473, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0018864587631904734, |
|
"grad_norm": 0.4488905668258667, |
|
"learning_rate": 4.9999999236547564e-05, |
|
"loss": 2.3731, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.002043663660123013, |
|
"grad_norm": 0.3517301380634308, |
|
"learning_rate": 4.999999694619029e-05, |
|
"loss": 2.2158, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002200868557055552, |
|
"grad_norm": 0.3045121431350708, |
|
"learning_rate": 4.999999312892831e-05, |
|
"loss": 2.3351, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.002358073453988092, |
|
"grad_norm": 0.24488244950771332, |
|
"learning_rate": 4.9999987784761884e-05, |
|
"loss": 2.2693, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0025152783509206312, |
|
"grad_norm": 0.22892728447914124, |
|
"learning_rate": 4.999998091369132e-05, |
|
"loss": 2.1006, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0026724832478531705, |
|
"grad_norm": 0.23219206929206848, |
|
"learning_rate": 4.999997251571704e-05, |
|
"loss": 2.215, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0028296881447857102, |
|
"grad_norm": 0.24427154660224915, |
|
"learning_rate": 4.999996259083956e-05, |
|
"loss": 2.1708, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0029868930417182495, |
|
"grad_norm": 0.2640205919742584, |
|
"learning_rate": 4.999995113905947e-05, |
|
"loss": 2.1709, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.003144097938650789, |
|
"grad_norm": 0.26644033193588257, |
|
"learning_rate": 4.999993816037749e-05, |
|
"loss": 2.1733, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0033013028355833285, |
|
"grad_norm": 0.2621535062789917, |
|
"learning_rate": 4.9999923654794414e-05, |
|
"loss": 2.0059, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.003458507732515868, |
|
"grad_norm": 0.2586187422275543, |
|
"learning_rate": 4.999990762231111e-05, |
|
"loss": 2.0336, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.003615712629448407, |
|
"grad_norm": 0.26732271909713745, |
|
"learning_rate": 4.9999890062928566e-05, |
|
"loss": 2.0566, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.003772917526380947, |
|
"grad_norm": 0.2357867807149887, |
|
"learning_rate": 4.999987097664787e-05, |
|
"loss": 1.9529, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.003930122423313486, |
|
"grad_norm": 0.2297009825706482, |
|
"learning_rate": 4.999985036347016e-05, |
|
"loss": 2.0369, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.004087327320246026, |
|
"grad_norm": 0.20529747009277344, |
|
"learning_rate": 4.9999828223396705e-05, |
|
"loss": 1.9781, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.004244532217178565, |
|
"grad_norm": 0.18342873454093933, |
|
"learning_rate": 4.999980455642887e-05, |
|
"loss": 1.9986, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004401737114111104, |
|
"grad_norm": 0.16487397253513336, |
|
"learning_rate": 4.999977936256809e-05, |
|
"loss": 1.9063, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.004558942011043644, |
|
"grad_norm": 0.1762266606092453, |
|
"learning_rate": 4.99997526418159e-05, |
|
"loss": 1.9517, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.004716146907976184, |
|
"grad_norm": 0.16371938586235046, |
|
"learning_rate": 4.999972439417394e-05, |
|
"loss": 1.7734, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004873351804908723, |
|
"grad_norm": 0.17309769988059998, |
|
"learning_rate": 4.999969461964392e-05, |
|
"loss": 1.8732, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0050305567018412625, |
|
"grad_norm": 0.15772338211536407, |
|
"learning_rate": 4.9999663318227683e-05, |
|
"loss": 1.7537, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.005187761598773802, |
|
"grad_norm": 0.17521986365318298, |
|
"learning_rate": 4.9999630489927126e-05, |
|
"loss": 2.0077, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.005344966495706341, |
|
"grad_norm": 0.15462292730808258, |
|
"learning_rate": 4.999959613474425e-05, |
|
"loss": 1.8576, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.005502171392638881, |
|
"grad_norm": 0.15280336141586304, |
|
"learning_rate": 4.999956025268117e-05, |
|
"loss": 1.862, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0056593762895714205, |
|
"grad_norm": 0.14518432319164276, |
|
"learning_rate": 4.999952284374006e-05, |
|
"loss": 1.8893, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.005816581186503959, |
|
"grad_norm": 0.16087624430656433, |
|
"learning_rate": 4.999948390792321e-05, |
|
"loss": 1.8658, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.005973786083436499, |
|
"grad_norm": 0.17504698038101196, |
|
"learning_rate": 4.999944344523301e-05, |
|
"loss": 1.7647, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.006130990980369039, |
|
"grad_norm": 0.17786233127117157, |
|
"learning_rate": 4.999940145567191e-05, |
|
"loss": 1.8133, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.006288195877301578, |
|
"grad_norm": 0.1628972887992859, |
|
"learning_rate": 4.999935793924249e-05, |
|
"loss": 1.7731, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.006445400774234117, |
|
"grad_norm": 0.13461466133594513, |
|
"learning_rate": 4.9999312895947406e-05, |
|
"loss": 1.7558, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.006602605671166657, |
|
"grad_norm": 0.12960125505924225, |
|
"learning_rate": 4.99992663257894e-05, |
|
"loss": 1.7639, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.006759810568099196, |
|
"grad_norm": 0.10991287231445312, |
|
"learning_rate": 4.9999218228771324e-05, |
|
"loss": 1.7538, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.006917015465031736, |
|
"grad_norm": 0.11583230644464493, |
|
"learning_rate": 4.999916860489612e-05, |
|
"loss": 1.715, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.007074220361964275, |
|
"grad_norm": 0.10344280302524567, |
|
"learning_rate": 4.999911745416681e-05, |
|
"loss": 1.6907, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.007231425258896814, |
|
"grad_norm": 0.10546118766069412, |
|
"learning_rate": 4.999906477658651e-05, |
|
"loss": 1.7294, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.007388630155829354, |
|
"grad_norm": 0.11775675415992737, |
|
"learning_rate": 4.9999010572158465e-05, |
|
"loss": 1.7146, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.007545835052761894, |
|
"grad_norm": 0.11109112203121185, |
|
"learning_rate": 4.999895484088596e-05, |
|
"loss": 1.6939, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.007703039949694433, |
|
"grad_norm": 0.1116517186164856, |
|
"learning_rate": 4.999889758277242e-05, |
|
"loss": 1.7271, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.007860244846626972, |
|
"grad_norm": 0.11245547980070114, |
|
"learning_rate": 4.999883879782132e-05, |
|
"loss": 1.7333, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008017449743559512, |
|
"grad_norm": 0.1150551363825798, |
|
"learning_rate": 4.999877848603626e-05, |
|
"loss": 1.7036, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.008174654640492052, |
|
"grad_norm": 0.10856381803750992, |
|
"learning_rate": 4.999871664742093e-05, |
|
"loss": 1.7493, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.008331859537424591, |
|
"grad_norm": 0.10760089010000229, |
|
"learning_rate": 4.9998653281979095e-05, |
|
"loss": 1.6292, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.00848906443435713, |
|
"grad_norm": 0.0932115837931633, |
|
"learning_rate": 4.9998588389714634e-05, |
|
"loss": 1.6608, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.00864626933128967, |
|
"grad_norm": 0.09837482124567032, |
|
"learning_rate": 4.9998521970631504e-05, |
|
"loss": 1.7834, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.008803474228222209, |
|
"grad_norm": 0.08872833847999573, |
|
"learning_rate": 4.9998454024733775e-05, |
|
"loss": 1.6484, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.008960679125154749, |
|
"grad_norm": 0.08829163759946823, |
|
"learning_rate": 4.9998384552025577e-05, |
|
"loss": 1.5913, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.009117884022087288, |
|
"grad_norm": 0.09087682515382767, |
|
"learning_rate": 4.999831355251117e-05, |
|
"loss": 1.6809, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.009275088919019828, |
|
"grad_norm": 0.08675853163003922, |
|
"learning_rate": 4.9998241026194884e-05, |
|
"loss": 1.6519, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.009432293815952368, |
|
"grad_norm": 0.08463481813669205, |
|
"learning_rate": 4.999816697308114e-05, |
|
"loss": 1.6234, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.009589498712884906, |
|
"grad_norm": 0.08403950184583664, |
|
"learning_rate": 4.999809139317448e-05, |
|
"loss": 1.6533, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.009746703609817445, |
|
"grad_norm": 0.08155622333288193, |
|
"learning_rate": 4.99980142864795e-05, |
|
"loss": 1.6726, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.009903908506749985, |
|
"grad_norm": 0.08056480437517166, |
|
"learning_rate": 4.999793565300093e-05, |
|
"loss": 1.5881, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.010061113403682525, |
|
"grad_norm": 0.07879023998975754, |
|
"learning_rate": 4.999785549274355e-05, |
|
"loss": 1.5568, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.010218318300615065, |
|
"grad_norm": 0.07828455418348312, |
|
"learning_rate": 4.9997773805712265e-05, |
|
"loss": 1.6464, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.010375523197547604, |
|
"grad_norm": 0.08054805546998978, |
|
"learning_rate": 4.9997690591912075e-05, |
|
"loss": 1.6213, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.010532728094480142, |
|
"grad_norm": 0.07610727101564407, |
|
"learning_rate": 4.999760585134805e-05, |
|
"loss": 1.5729, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.010689932991412682, |
|
"grad_norm": 0.07693428546190262, |
|
"learning_rate": 4.999751958402537e-05, |
|
"loss": 1.5444, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.010847137888345222, |
|
"grad_norm": 0.0810319185256958, |
|
"learning_rate": 4.99974317899493e-05, |
|
"loss": 1.7045, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.011004342785277762, |
|
"grad_norm": 0.07729896157979965, |
|
"learning_rate": 4.9997342469125205e-05, |
|
"loss": 1.6268, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.011161547682210301, |
|
"grad_norm": 0.07730107754468918, |
|
"learning_rate": 4.999725162155855e-05, |
|
"loss": 1.658, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.011318752579142841, |
|
"grad_norm": 0.08072328567504883, |
|
"learning_rate": 4.9997159247254864e-05, |
|
"loss": 1.5045, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.011475957476075379, |
|
"grad_norm": 0.08120577782392502, |
|
"learning_rate": 4.9997065346219805e-05, |
|
"loss": 1.568, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.011633162373007919, |
|
"grad_norm": 0.08131498098373413, |
|
"learning_rate": 4.99969699184591e-05, |
|
"loss": 1.6035, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.011790367269940458, |
|
"grad_norm": 0.08395873010158539, |
|
"learning_rate": 4.9996872963978584e-05, |
|
"loss": 1.5844, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.011947572166872998, |
|
"grad_norm": 0.08502068370580673, |
|
"learning_rate": 4.999677448278417e-05, |
|
"loss": 1.6661, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.012104777063805538, |
|
"grad_norm": 0.08467952907085419, |
|
"learning_rate": 4.999667447488188e-05, |
|
"loss": 1.5537, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.012261981960738078, |
|
"grad_norm": 0.19682182371616364, |
|
"learning_rate": 4.999657294027782e-05, |
|
"loss": 1.5051, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.012419186857670617, |
|
"grad_norm": 0.08586428314447403, |
|
"learning_rate": 4.999646987897818e-05, |
|
"loss": 1.565, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.012576391754603155, |
|
"grad_norm": 0.08156823366880417, |
|
"learning_rate": 4.999636529098928e-05, |
|
"loss": 1.6627, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.012733596651535695, |
|
"grad_norm": 0.08715341240167618, |
|
"learning_rate": 4.9996259176317486e-05, |
|
"loss": 1.5862, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.012890801548468235, |
|
"grad_norm": 0.09664586186408997, |
|
"learning_rate": 4.999615153496928e-05, |
|
"loss": 1.5741, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.013048006445400774, |
|
"grad_norm": 0.08438891172409058, |
|
"learning_rate": 4.999604236695125e-05, |
|
"loss": 1.5933, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.013205211342333314, |
|
"grad_norm": 0.08333732932806015, |
|
"learning_rate": 4.999593167227006e-05, |
|
"loss": 1.5904, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.013362416239265854, |
|
"grad_norm": 0.07945791631937027, |
|
"learning_rate": 4.9995819450932455e-05, |
|
"loss": 1.5763, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.013519621136198392, |
|
"grad_norm": 0.07682961225509644, |
|
"learning_rate": 4.9995705702945304e-05, |
|
"loss": 1.5197, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.013676826033130932, |
|
"grad_norm": 0.07547677308320999, |
|
"learning_rate": 4.999559042831555e-05, |
|
"loss": 1.6825, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.013834030930063471, |
|
"grad_norm": 0.07293456047773361, |
|
"learning_rate": 4.999547362705025e-05, |
|
"loss": 1.5466, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.013991235826996011, |
|
"grad_norm": 0.07730914652347565, |
|
"learning_rate": 4.999535529915651e-05, |
|
"loss": 1.5775, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.01414844072392855, |
|
"grad_norm": 0.07689664512872696, |
|
"learning_rate": 4.9995235444641565e-05, |
|
"loss": 1.5881, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01430564562086109, |
|
"grad_norm": 0.07754997909069061, |
|
"learning_rate": 4.999511406351275e-05, |
|
"loss": 1.5037, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.014462850517793628, |
|
"grad_norm": 0.07229866087436676, |
|
"learning_rate": 4.999499115577746e-05, |
|
"loss": 1.5077, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.014620055414726168, |
|
"grad_norm": 0.07491567730903625, |
|
"learning_rate": 4.9994866721443215e-05, |
|
"loss": 1.5461, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.014777260311658708, |
|
"grad_norm": 0.07258685678243637, |
|
"learning_rate": 4.9994740760517605e-05, |
|
"loss": 1.5516, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.014934465208591248, |
|
"grad_norm": 0.07643327116966248, |
|
"learning_rate": 4.9994613273008334e-05, |
|
"loss": 1.6223, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.015091670105523787, |
|
"grad_norm": 0.0740588903427124, |
|
"learning_rate": 4.999448425892318e-05, |
|
"loss": 1.5322, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.015248875002456327, |
|
"grad_norm": 0.44172239303588867, |
|
"learning_rate": 4.999435371827003e-05, |
|
"loss": 1.5498, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.015406079899388867, |
|
"grad_norm": 0.0756363570690155, |
|
"learning_rate": 4.999422165105684e-05, |
|
"loss": 1.559, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.015563284796321405, |
|
"grad_norm": 0.07251248508691788, |
|
"learning_rate": 4.99940880572917e-05, |
|
"loss": 1.5903, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.015720489693253945, |
|
"grad_norm": 0.06931837648153305, |
|
"learning_rate": 4.999395293698275e-05, |
|
"loss": 1.4849, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.015877694590186484, |
|
"grad_norm": 0.07403590530157089, |
|
"learning_rate": 4.9993816290138254e-05, |
|
"loss": 1.5191, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.016034899487119024, |
|
"grad_norm": 0.07027724385261536, |
|
"learning_rate": 4.999367811676655e-05, |
|
"loss": 1.5655, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.016192104384051564, |
|
"grad_norm": 0.07320379465818405, |
|
"learning_rate": 4.9993538416876093e-05, |
|
"loss": 1.4869, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.016349309280984103, |
|
"grad_norm": 0.0726180374622345, |
|
"learning_rate": 4.9993397190475396e-05, |
|
"loss": 1.4629, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.016506514177916643, |
|
"grad_norm": 0.07542011886835098, |
|
"learning_rate": 4.999325443757309e-05, |
|
"loss": 1.5976, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.016663719074849183, |
|
"grad_norm": 0.07440067082643509, |
|
"learning_rate": 4.9993110158177895e-05, |
|
"loss": 1.5469, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.016820923971781723, |
|
"grad_norm": 0.07547372579574585, |
|
"learning_rate": 4.999296435229863e-05, |
|
"loss": 1.5328, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.01697812886871426, |
|
"grad_norm": 0.07532137632369995, |
|
"learning_rate": 4.999281701994419e-05, |
|
"loss": 1.6742, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.0171353337656468, |
|
"grad_norm": 0.07249438762664795, |
|
"learning_rate": 4.999266816112358e-05, |
|
"loss": 1.4799, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.01729253866257934, |
|
"grad_norm": 0.07399806380271912, |
|
"learning_rate": 4.999251777584589e-05, |
|
"loss": 1.5438, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.017449743559511878, |
|
"grad_norm": 0.08135057240724564, |
|
"learning_rate": 4.99923658641203e-05, |
|
"loss": 1.5608, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.017606948456444418, |
|
"grad_norm": 0.07508935779333115, |
|
"learning_rate": 4.99922124259561e-05, |
|
"loss": 1.5894, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.017764153353376957, |
|
"grad_norm": 0.07432372123003006, |
|
"learning_rate": 4.999205746136265e-05, |
|
"loss": 1.4818, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.017921358250309497, |
|
"grad_norm": 0.07694194465875626, |
|
"learning_rate": 4.999190097034942e-05, |
|
"loss": 1.5629, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.018078563147242037, |
|
"grad_norm": 0.07384433597326279, |
|
"learning_rate": 4.999174295292597e-05, |
|
"loss": 1.4829, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.018235768044174577, |
|
"grad_norm": 0.07152919471263885, |
|
"learning_rate": 4.999158340910195e-05, |
|
"loss": 1.4748, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.018392972941107116, |
|
"grad_norm": 0.07719701528549194, |
|
"learning_rate": 4.999142233888709e-05, |
|
"loss": 1.5524, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.018550177838039656, |
|
"grad_norm": 0.07540587335824966, |
|
"learning_rate": 4.999125974229125e-05, |
|
"loss": 1.4661, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.018707382734972196, |
|
"grad_norm": 0.0787581130862236, |
|
"learning_rate": 4.9991095619324344e-05, |
|
"loss": 1.6455, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.018864587631904736, |
|
"grad_norm": 0.07454577833414078, |
|
"learning_rate": 4.999092996999641e-05, |
|
"loss": 1.5083, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.019021792528837272, |
|
"grad_norm": 0.0751076266169548, |
|
"learning_rate": 4.9990762794317545e-05, |
|
"loss": 1.4874, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.01917899742576981, |
|
"grad_norm": 0.07733119279146194, |
|
"learning_rate": 4.999059409229798e-05, |
|
"loss": 1.6308, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.01933620232270235, |
|
"grad_norm": 0.07897089421749115, |
|
"learning_rate": 4.999042386394802e-05, |
|
"loss": 1.5906, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.01949340721963489, |
|
"grad_norm": 0.07758141309022903, |
|
"learning_rate": 4.999025210927804e-05, |
|
"loss": 1.5604, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.01965061211656743, |
|
"grad_norm": 0.07845707982778549, |
|
"learning_rate": 4.9990078828298544e-05, |
|
"loss": 1.5901, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.01980781701349997, |
|
"grad_norm": 0.0772818773984909, |
|
"learning_rate": 4.998990402102012e-05, |
|
"loss": 1.4516, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.01996502191043251, |
|
"grad_norm": 0.07795504480600357, |
|
"learning_rate": 4.998972768745344e-05, |
|
"loss": 1.4642, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.02012222680736505, |
|
"grad_norm": 0.0784008800983429, |
|
"learning_rate": 4.998954982760926e-05, |
|
"loss": 1.5936, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.02027943170429759, |
|
"grad_norm": 0.07791212201118469, |
|
"learning_rate": 4.9989370441498465e-05, |
|
"loss": 1.4705, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.02043663660123013, |
|
"grad_norm": 0.07785367220640182, |
|
"learning_rate": 4.9989189529132004e-05, |
|
"loss": 1.5085, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.02059384149816267, |
|
"grad_norm": 0.07916689664125443, |
|
"learning_rate": 4.9989007090520925e-05, |
|
"loss": 1.5365, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.02075104639509521, |
|
"grad_norm": 0.0775083601474762, |
|
"learning_rate": 4.9988823125676367e-05, |
|
"loss": 1.5286, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.020908251292027745, |
|
"grad_norm": 0.08110442757606506, |
|
"learning_rate": 4.998863763460956e-05, |
|
"loss": 1.5779, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.021065456188960285, |
|
"grad_norm": 0.0814640000462532, |
|
"learning_rate": 4.998845061733185e-05, |
|
"loss": 1.4778, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.021222661085892824, |
|
"grad_norm": 0.08069492131471634, |
|
"learning_rate": 4.998826207385465e-05, |
|
"loss": 1.5317, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.021379865982825364, |
|
"grad_norm": 0.07377774268388748, |
|
"learning_rate": 4.998807200418948e-05, |
|
"loss": 1.5258, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.021537070879757904, |
|
"grad_norm": 0.0787922590970993, |
|
"learning_rate": 4.9987880408347945e-05, |
|
"loss": 1.5185, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.021694275776690444, |
|
"grad_norm": 0.07662995159626007, |
|
"learning_rate": 4.9987687286341745e-05, |
|
"loss": 1.4637, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.021851480673622983, |
|
"grad_norm": 0.08528955280780792, |
|
"learning_rate": 4.9987492638182676e-05, |
|
"loss": 1.4776, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.022008685570555523, |
|
"grad_norm": 0.08089053630828857, |
|
"learning_rate": 4.9987296463882626e-05, |
|
"loss": 1.5885, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.022165890467488063, |
|
"grad_norm": 0.08029694855213165, |
|
"learning_rate": 4.998709876345358e-05, |
|
"loss": 1.4557, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.022323095364420602, |
|
"grad_norm": 0.07918502390384674, |
|
"learning_rate": 4.9986899536907614e-05, |
|
"loss": 1.4285, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.022480300261353142, |
|
"grad_norm": 0.0813126415014267, |
|
"learning_rate": 4.998669878425689e-05, |
|
"loss": 1.5958, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.022637505158285682, |
|
"grad_norm": 0.07935188710689545, |
|
"learning_rate": 4.998649650551368e-05, |
|
"loss": 1.5249, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.02279471005521822, |
|
"grad_norm": 0.08163304626941681, |
|
"learning_rate": 4.9986292700690324e-05, |
|
"loss": 1.483, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.022951914952150758, |
|
"grad_norm": 0.08277447521686554, |
|
"learning_rate": 4.998608736979928e-05, |
|
"loss": 1.6212, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.023109119849083298, |
|
"grad_norm": 0.08285827934741974, |
|
"learning_rate": 4.9985880512853076e-05, |
|
"loss": 1.4495, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.023266324746015837, |
|
"grad_norm": 0.082750603556633, |
|
"learning_rate": 4.998567212986437e-05, |
|
"loss": 1.4335, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.023423529642948377, |
|
"grad_norm": 0.07986058294773102, |
|
"learning_rate": 4.998546222084587e-05, |
|
"loss": 1.4704, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.023580734539880917, |
|
"grad_norm": 0.08105576783418655, |
|
"learning_rate": 4.9985250785810396e-05, |
|
"loss": 1.5183, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.023737939436813457, |
|
"grad_norm": 0.08202917128801346, |
|
"learning_rate": 4.9985037824770866e-05, |
|
"loss": 1.5423, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.023895144333745996, |
|
"grad_norm": 0.08937894552946091, |
|
"learning_rate": 4.998482333774029e-05, |
|
"loss": 1.5731, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.024052349230678536, |
|
"grad_norm": 0.08333728462457657, |
|
"learning_rate": 4.9984607324731766e-05, |
|
"loss": 1.5133, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.024209554127611076, |
|
"grad_norm": 0.08529175072908401, |
|
"learning_rate": 4.998438978575849e-05, |
|
"loss": 1.516, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.024366759024543615, |
|
"grad_norm": 0.08508963882923126, |
|
"learning_rate": 4.998417072083374e-05, |
|
"loss": 1.5646, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.024523963921476155, |
|
"grad_norm": 0.08971578627824783, |
|
"learning_rate": 4.99839501299709e-05, |
|
"loss": 1.4714, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.024681168818408695, |
|
"grad_norm": 0.08380109816789627, |
|
"learning_rate": 4.998372801318345e-05, |
|
"loss": 1.4476, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.024838373715341235, |
|
"grad_norm": 0.08533143252134323, |
|
"learning_rate": 4.9983504370484945e-05, |
|
"loss": 1.4866, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.02499557861227377, |
|
"grad_norm": 0.08318709582090378, |
|
"learning_rate": 4.998327920188905e-05, |
|
"loss": 1.5274, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.02515278350920631, |
|
"grad_norm": 0.08486370742321014, |
|
"learning_rate": 4.9983052507409525e-05, |
|
"loss": 1.4713, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.02515278350920631, |
|
"eval_loss": 1.5136528015136719, |
|
"eval_runtime": 2318.8971, |
|
"eval_samples_per_second": 3.992, |
|
"eval_steps_per_second": 1.996, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.02530998840613885, |
|
"grad_norm": 0.08242359757423401, |
|
"learning_rate": 4.9982824287060195e-05, |
|
"loss": 1.5069, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.02546719330307139, |
|
"grad_norm": 0.08547423779964447, |
|
"learning_rate": 4.9982594540855014e-05, |
|
"loss": 1.4973, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.02562439820000393, |
|
"grad_norm": 0.08345580101013184, |
|
"learning_rate": 4.9982363268808016e-05, |
|
"loss": 1.5078, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.02578160309693647, |
|
"grad_norm": 0.0830339640378952, |
|
"learning_rate": 4.9982130470933316e-05, |
|
"loss": 1.4098, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.02593880799386901, |
|
"grad_norm": 0.08568515628576279, |
|
"learning_rate": 4.998189614724514e-05, |
|
"loss": 1.4628, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.02609601289080155, |
|
"grad_norm": 0.08261829614639282, |
|
"learning_rate": 4.998166029775779e-05, |
|
"loss": 1.4492, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.02625321778773409, |
|
"grad_norm": 0.08944887667894363, |
|
"learning_rate": 4.998142292248569e-05, |
|
"loss": 1.5633, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.02641042268466663, |
|
"grad_norm": 0.08632911741733551, |
|
"learning_rate": 4.998118402144332e-05, |
|
"loss": 1.5106, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.026567627581599168, |
|
"grad_norm": 0.08733859658241272, |
|
"learning_rate": 4.998094359464528e-05, |
|
"loss": 1.5607, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.026724832478531708, |
|
"grad_norm": 0.08667927235364914, |
|
"learning_rate": 4.9980701642106245e-05, |
|
"loss": 1.4544, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.026882037375464244, |
|
"grad_norm": 0.08655022084712982, |
|
"learning_rate": 4.9980458163841006e-05, |
|
"loss": 1.5264, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.027039242272396784, |
|
"grad_norm": 0.08899988234043121, |
|
"learning_rate": 4.9980213159864426e-05, |
|
"loss": 1.4778, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.027196447169329323, |
|
"grad_norm": 0.09411856532096863, |
|
"learning_rate": 4.997996663019147e-05, |
|
"loss": 1.5269, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.027353652066261863, |
|
"grad_norm": 0.087191641330719, |
|
"learning_rate": 4.997971857483719e-05, |
|
"loss": 1.5166, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.027510856963194403, |
|
"grad_norm": 0.08959636092185974, |
|
"learning_rate": 4.997946899381675e-05, |
|
"loss": 1.5503, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.027668061860126943, |
|
"grad_norm": 0.0951187014579773, |
|
"learning_rate": 4.997921788714537e-05, |
|
"loss": 1.4879, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.027825266757059482, |
|
"grad_norm": 0.09324768930673599, |
|
"learning_rate": 4.997896525483841e-05, |
|
"loss": 1.5714, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.027982471653992022, |
|
"grad_norm": 0.08633986115455627, |
|
"learning_rate": 4.997871109691129e-05, |
|
"loss": 1.4198, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.028139676550924562, |
|
"grad_norm": 0.08947525173425674, |
|
"learning_rate": 4.9978455413379535e-05, |
|
"loss": 1.4702, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.0282968814478571, |
|
"grad_norm": 0.09275490790605545, |
|
"learning_rate": 4.9978198204258766e-05, |
|
"loss": 1.5252, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.02845408634478964, |
|
"grad_norm": 0.08761609345674515, |
|
"learning_rate": 4.9977939469564676e-05, |
|
"loss": 1.505, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.02861129124172218, |
|
"grad_norm": 0.08683087676763535, |
|
"learning_rate": 4.997767920931308e-05, |
|
"loss": 1.5059, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.02876849613865472, |
|
"grad_norm": 0.08931361883878708, |
|
"learning_rate": 4.997741742351988e-05, |
|
"loss": 1.5003, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.028925701035587257, |
|
"grad_norm": 0.08820109069347382, |
|
"learning_rate": 4.997715411220105e-05, |
|
"loss": 1.5132, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.029082905932519797, |
|
"grad_norm": 0.09284964948892593, |
|
"learning_rate": 4.997688927537268e-05, |
|
"loss": 1.4561, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.029240110829452336, |
|
"grad_norm": 0.09472864121198654, |
|
"learning_rate": 4.997662291305094e-05, |
|
"loss": 1.4729, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.029397315726384876, |
|
"grad_norm": 0.08725330233573914, |
|
"learning_rate": 4.997635502525211e-05, |
|
"loss": 1.3994, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.029554520623317416, |
|
"grad_norm": 0.09085626900196075, |
|
"learning_rate": 4.9976085611992536e-05, |
|
"loss": 1.4695, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.029711725520249956, |
|
"grad_norm": 0.09322400391101837, |
|
"learning_rate": 4.9975814673288684e-05, |
|
"loss": 1.4753, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.029868930417182495, |
|
"grad_norm": 0.08927160501480103, |
|
"learning_rate": 4.99755422091571e-05, |
|
"loss": 1.4465, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.030026135314115035, |
|
"grad_norm": 0.09317070990800858, |
|
"learning_rate": 4.997526821961442e-05, |
|
"loss": 1.5124, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.030183340211047575, |
|
"grad_norm": 0.08911167085170746, |
|
"learning_rate": 4.9974992704677385e-05, |
|
"loss": 1.4515, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.030340545107980114, |
|
"grad_norm": 0.09432853013277054, |
|
"learning_rate": 4.997471566436282e-05, |
|
"loss": 1.4623, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.030497750004912654, |
|
"grad_norm": 0.09417332708835602, |
|
"learning_rate": 4.997443709868764e-05, |
|
"loss": 1.5103, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.030654954901845194, |
|
"grad_norm": 0.09564542025327682, |
|
"learning_rate": 4.997415700766887e-05, |
|
"loss": 1.4929, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.030812159798777734, |
|
"grad_norm": 0.09101004898548126, |
|
"learning_rate": 4.997387539132361e-05, |
|
"loss": 1.4225, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.03096936469571027, |
|
"grad_norm": 0.09196274727582932, |
|
"learning_rate": 4.997359224966906e-05, |
|
"loss": 1.4701, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.03112656959264281, |
|
"grad_norm": 0.09573279321193695, |
|
"learning_rate": 4.997330758272251e-05, |
|
"loss": 1.4425, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.03128377448957535, |
|
"grad_norm": 0.09180758893489838, |
|
"learning_rate": 4.9973021390501354e-05, |
|
"loss": 1.4426, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.03144097938650789, |
|
"grad_norm": 0.09583238512277603, |
|
"learning_rate": 4.997273367302306e-05, |
|
"loss": 1.5158, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03159818428344043, |
|
"grad_norm": 0.09394747018814087, |
|
"learning_rate": 4.997244443030521e-05, |
|
"loss": 1.4306, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.03175538918037297, |
|
"grad_norm": 0.09470199793577194, |
|
"learning_rate": 4.9972153662365474e-05, |
|
"loss": 1.5286, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.031912594077305505, |
|
"grad_norm": 0.09274959564208984, |
|
"learning_rate": 4.997186136922161e-05, |
|
"loss": 1.4803, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.03206979897423805, |
|
"grad_norm": 0.09344369918107986, |
|
"learning_rate": 4.997156755089145e-05, |
|
"loss": 1.5449, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.032227003871170584, |
|
"grad_norm": 0.09794919937849045, |
|
"learning_rate": 4.997127220739296e-05, |
|
"loss": 1.4383, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.03238420876810313, |
|
"grad_norm": 0.09698093682527542, |
|
"learning_rate": 4.997097533874418e-05, |
|
"loss": 1.4462, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.032541413665035664, |
|
"grad_norm": 0.09690559655427933, |
|
"learning_rate": 4.997067694496323e-05, |
|
"loss": 1.4735, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.03269861856196821, |
|
"grad_norm": 0.09657544642686844, |
|
"learning_rate": 4.9970377026068336e-05, |
|
"loss": 1.5672, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.03285582345890074, |
|
"grad_norm": 0.09483659267425537, |
|
"learning_rate": 4.9970075582077825e-05, |
|
"loss": 1.4931, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.033013028355833286, |
|
"grad_norm": 0.09744243323802948, |
|
"learning_rate": 4.9969772613010104e-05, |
|
"loss": 1.4638, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.03317023325276582, |
|
"grad_norm": 0.09521006047725677, |
|
"learning_rate": 4.9969468118883665e-05, |
|
"loss": 1.4127, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.033327438149698366, |
|
"grad_norm": 0.09646004438400269, |
|
"learning_rate": 4.996916209971713e-05, |
|
"loss": 1.5139, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.0334846430466309, |
|
"grad_norm": 0.09292810410261154, |
|
"learning_rate": 4.996885455552916e-05, |
|
"loss": 1.4399, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.033641847943563445, |
|
"grad_norm": 0.09986516088247299, |
|
"learning_rate": 4.996854548633857e-05, |
|
"loss": 1.4637, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.03379905284049598, |
|
"grad_norm": 0.09723702073097229, |
|
"learning_rate": 4.996823489216421e-05, |
|
"loss": 1.5673, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.03395625773742852, |
|
"grad_norm": 0.09608977288007736, |
|
"learning_rate": 4.996792277302507e-05, |
|
"loss": 1.4428, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.03411346263436106, |
|
"grad_norm": 0.09329380095005035, |
|
"learning_rate": 4.99676091289402e-05, |
|
"loss": 1.3892, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0342706675312936, |
|
"grad_norm": 0.0959913358092308, |
|
"learning_rate": 4.996729395992875e-05, |
|
"loss": 1.5219, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.03442787242822614, |
|
"grad_norm": 0.09832671284675598, |
|
"learning_rate": 4.996697726600999e-05, |
|
"loss": 1.5259, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.03458507732515868, |
|
"grad_norm": 0.10061636567115784, |
|
"learning_rate": 4.996665904720325e-05, |
|
"loss": 1.5216, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.03474228222209122, |
|
"grad_norm": 0.09742400050163269, |
|
"learning_rate": 4.9966339303527965e-05, |
|
"loss": 1.3819, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.034899487119023756, |
|
"grad_norm": 0.09629969298839569, |
|
"learning_rate": 4.996601803500367e-05, |
|
"loss": 1.5341, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.0350566920159563, |
|
"grad_norm": 0.09776200354099274, |
|
"learning_rate": 4.996569524164998e-05, |
|
"loss": 1.5054, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.035213896912888835, |
|
"grad_norm": 0.1008530780673027, |
|
"learning_rate": 4.996537092348661e-05, |
|
"loss": 1.5333, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.03537110180982138, |
|
"grad_norm": 0.09749735891819, |
|
"learning_rate": 4.996504508053338e-05, |
|
"loss": 1.3899, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.035528306706753915, |
|
"grad_norm": 0.10522401332855225, |
|
"learning_rate": 4.9964717712810175e-05, |
|
"loss": 1.5413, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.03568551160368646, |
|
"grad_norm": 0.09566272795200348, |
|
"learning_rate": 4.9964388820336996e-05, |
|
"loss": 1.435, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.035842716500618994, |
|
"grad_norm": 0.10133984684944153, |
|
"learning_rate": 4.996405840313393e-05, |
|
"loss": 1.445, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.03599992139755153, |
|
"grad_norm": 0.09702739119529724, |
|
"learning_rate": 4.996372646122116e-05, |
|
"loss": 1.4287, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.036157126294484074, |
|
"grad_norm": 0.1012992411851883, |
|
"learning_rate": 4.996339299461896e-05, |
|
"loss": 1.382, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.03631433119141661, |
|
"grad_norm": 0.09877166152000427, |
|
"learning_rate": 4.99630580033477e-05, |
|
"loss": 1.5729, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.03647153608834915, |
|
"grad_norm": 0.1033129170536995, |
|
"learning_rate": 4.996272148742783e-05, |
|
"loss": 1.4754, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.03662874098528169, |
|
"grad_norm": 0.09901215881109238, |
|
"learning_rate": 4.9962383446879914e-05, |
|
"loss": 1.5153, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.03678594588221423, |
|
"grad_norm": 0.10241983830928802, |
|
"learning_rate": 4.996204388172458e-05, |
|
"loss": 1.5131, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.03694315077914677, |
|
"grad_norm": 0.09574593603610992, |
|
"learning_rate": 4.9961702791982594e-05, |
|
"loss": 1.5285, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.03710035567607931, |
|
"grad_norm": 0.10309838503599167, |
|
"learning_rate": 4.996136017767477e-05, |
|
"loss": 1.5751, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.03725756057301185, |
|
"grad_norm": 0.09928470849990845, |
|
"learning_rate": 4.996101603882204e-05, |
|
"loss": 1.5108, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.03741476546994439, |
|
"grad_norm": 0.10514767467975616, |
|
"learning_rate": 4.996067037544542e-05, |
|
"loss": 1.4206, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.03757197036687693, |
|
"grad_norm": 0.10411518812179565, |
|
"learning_rate": 4.996032318756601e-05, |
|
"loss": 1.5628, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.03772917526380947, |
|
"grad_norm": 0.0989808738231659, |
|
"learning_rate": 4.9959974475205045e-05, |
|
"loss": 1.4444, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.03788638016074201, |
|
"grad_norm": 0.10069911926984787, |
|
"learning_rate": 4.9959624238383804e-05, |
|
"loss": 1.4805, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.038043585057674544, |
|
"grad_norm": 0.10637518763542175, |
|
"learning_rate": 4.995927247712367e-05, |
|
"loss": 1.5289, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.03820078995460709, |
|
"grad_norm": 0.10085684061050415, |
|
"learning_rate": 4.995891919144614e-05, |
|
"loss": 1.5288, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.03835799485153962, |
|
"grad_norm": 0.09989017248153687, |
|
"learning_rate": 4.995856438137279e-05, |
|
"loss": 1.5444, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.038515199748472166, |
|
"grad_norm": 0.10382463037967682, |
|
"learning_rate": 4.9958208046925294e-05, |
|
"loss": 1.4621, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.0386724046454047, |
|
"grad_norm": 0.10208063572645187, |
|
"learning_rate": 4.99578501881254e-05, |
|
"loss": 1.5003, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.038829609542337246, |
|
"grad_norm": 0.1028011366724968, |
|
"learning_rate": 4.9957490804994977e-05, |
|
"loss": 1.516, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.03898681443926978, |
|
"grad_norm": 0.10475701838731766, |
|
"learning_rate": 4.995712989755598e-05, |
|
"loss": 1.5333, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.039144019336202325, |
|
"grad_norm": 0.1038154736161232, |
|
"learning_rate": 4.995676746583044e-05, |
|
"loss": 1.4779, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.03930122423313486, |
|
"grad_norm": 0.10413440316915512, |
|
"learning_rate": 4.99564035098405e-05, |
|
"loss": 1.5241, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.039458429130067404, |
|
"grad_norm": 0.09869382530450821, |
|
"learning_rate": 4.995603802960838e-05, |
|
"loss": 1.442, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.03961563402699994, |
|
"grad_norm": 0.10138234496116638, |
|
"learning_rate": 4.995567102515641e-05, |
|
"loss": 1.5393, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.039772838923932484, |
|
"grad_norm": 0.10225867480039597, |
|
"learning_rate": 4.995530249650701e-05, |
|
"loss": 1.4516, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.03993004382086502, |
|
"grad_norm": 0.09942895174026489, |
|
"learning_rate": 4.995493244368268e-05, |
|
"loss": 1.4543, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.040087248717797556, |
|
"grad_norm": 0.11218860745429993, |
|
"learning_rate": 4.995456086670602e-05, |
|
"loss": 1.4985, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.0402444536147301, |
|
"grad_norm": 0.10839337855577469, |
|
"learning_rate": 4.9954187765599736e-05, |
|
"loss": 1.4805, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.040401658511662636, |
|
"grad_norm": 0.10317599028348923, |
|
"learning_rate": 4.9953813140386595e-05, |
|
"loss": 1.4412, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.04055886340859518, |
|
"grad_norm": 0.10285656154155731, |
|
"learning_rate": 4.99534369910895e-05, |
|
"loss": 1.476, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.040716068305527715, |
|
"grad_norm": 0.10330680012702942, |
|
"learning_rate": 4.995305931773141e-05, |
|
"loss": 1.5157, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.04087327320246026, |
|
"grad_norm": 0.1086694598197937, |
|
"learning_rate": 4.99526801203354e-05, |
|
"loss": 1.4999, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.041030478099392795, |
|
"grad_norm": 0.10800144821405411, |
|
"learning_rate": 4.995229939892464e-05, |
|
"loss": 1.4764, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.04118768299632534, |
|
"grad_norm": 0.10645303875207901, |
|
"learning_rate": 4.9951917153522355e-05, |
|
"loss": 1.4404, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.041344887893257874, |
|
"grad_norm": 0.10440964996814728, |
|
"learning_rate": 4.9951533384151906e-05, |
|
"loss": 1.3678, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.04150209279019042, |
|
"grad_norm": 0.10993078351020813, |
|
"learning_rate": 4.995114809083673e-05, |
|
"loss": 1.5064, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.041659297687122954, |
|
"grad_norm": 0.10710245370864868, |
|
"learning_rate": 4.9950761273600366e-05, |
|
"loss": 1.4134, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.04181650258405549, |
|
"grad_norm": 0.11030582338571548, |
|
"learning_rate": 4.995037293246644e-05, |
|
"loss": 1.5299, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.04197370748098803, |
|
"grad_norm": 0.1058267131447792, |
|
"learning_rate": 4.994998306745866e-05, |
|
"loss": 1.3654, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.04213091237792057, |
|
"grad_norm": 0.10541702806949615, |
|
"learning_rate": 4.994959167860084e-05, |
|
"loss": 1.4297, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.04228811727485311, |
|
"grad_norm": 0.11085420846939087, |
|
"learning_rate": 4.994919876591689e-05, |
|
"loss": 1.4876, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.04244532217178565, |
|
"grad_norm": 0.11054470390081406, |
|
"learning_rate": 4.994880432943081e-05, |
|
"loss": 1.574, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.04260252706871819, |
|
"grad_norm": 0.11234510689973831, |
|
"learning_rate": 4.994840836916668e-05, |
|
"loss": 1.5079, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.04275973196565073, |
|
"grad_norm": 0.11040106415748596, |
|
"learning_rate": 4.994801088514869e-05, |
|
"loss": 1.5091, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.04291693686258327, |
|
"grad_norm": 0.10639887303113937, |
|
"learning_rate": 4.994761187740111e-05, |
|
"loss": 1.4495, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.04307414175951581, |
|
"grad_norm": 0.11268071085214615, |
|
"learning_rate": 4.994721134594833e-05, |
|
"loss": 1.5057, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.04323134665644835, |
|
"grad_norm": 0.10079260170459747, |
|
"learning_rate": 4.994680929081479e-05, |
|
"loss": 1.4145, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.04338855155338089, |
|
"grad_norm": 0.11474710702896118, |
|
"learning_rate": 4.994640571202506e-05, |
|
"loss": 1.5061, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.04354575645031343, |
|
"grad_norm": 0.10946876555681229, |
|
"learning_rate": 4.994600060960377e-05, |
|
"loss": 1.5306, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.04370296134724597, |
|
"grad_norm": 0.11192137002944946, |
|
"learning_rate": 4.994559398357569e-05, |
|
"loss": 1.5347, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.0438601662441785, |
|
"grad_norm": 0.10744784027338028, |
|
"learning_rate": 4.994518583396564e-05, |
|
"loss": 1.4686, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.044017371141111046, |
|
"grad_norm": 0.11113352328538895, |
|
"learning_rate": 4.9944776160798544e-05, |
|
"loss": 1.4101, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.04417457603804358, |
|
"grad_norm": 0.11456230282783508, |
|
"learning_rate": 4.994436496409943e-05, |
|
"loss": 1.4036, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.044331780934976125, |
|
"grad_norm": 0.11608672887086868, |
|
"learning_rate": 4.994395224389342e-05, |
|
"loss": 1.4949, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.04448898583190866, |
|
"grad_norm": 0.1232326403260231, |
|
"learning_rate": 4.9943538000205705e-05, |
|
"loss": 1.5501, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.044646190728841205, |
|
"grad_norm": 0.11791515350341797, |
|
"learning_rate": 4.994312223306159e-05, |
|
"loss": 1.4542, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.04480339562577374, |
|
"grad_norm": 0.11657550930976868, |
|
"learning_rate": 4.9942704942486476e-05, |
|
"loss": 1.4724, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.044960600522706284, |
|
"grad_norm": 0.11560262739658356, |
|
"learning_rate": 4.994228612850584e-05, |
|
"loss": 1.4036, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.04511780541963882, |
|
"grad_norm": 0.10999175906181335, |
|
"learning_rate": 4.994186579114527e-05, |
|
"loss": 1.4489, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.045275010316571364, |
|
"grad_norm": 0.11586826294660568, |
|
"learning_rate": 4.9941443930430436e-05, |
|
"loss": 1.5486, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.0454322152135039, |
|
"grad_norm": 0.11349951475858688, |
|
"learning_rate": 4.994102054638711e-05, |
|
"loss": 1.5698, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.04558942011043644, |
|
"grad_norm": 0.11978698521852493, |
|
"learning_rate": 4.9940595639041134e-05, |
|
"loss": 1.3933, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.04574662500736898, |
|
"grad_norm": 0.11438622325658798, |
|
"learning_rate": 4.994016920841846e-05, |
|
"loss": 1.5005, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.045903829904301516, |
|
"grad_norm": 0.11395915597677231, |
|
"learning_rate": 4.9939741254545155e-05, |
|
"loss": 1.4521, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.04606103480123406, |
|
"grad_norm": 0.11659599095582962, |
|
"learning_rate": 4.993931177744734e-05, |
|
"loss": 1.5166, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.046218239698166595, |
|
"grad_norm": 0.11053171753883362, |
|
"learning_rate": 4.9938880777151254e-05, |
|
"loss": 1.4459, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.04637544459509914, |
|
"grad_norm": 0.11428084224462509, |
|
"learning_rate": 4.993844825368321e-05, |
|
"loss": 1.4448, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.046532649492031675, |
|
"grad_norm": 0.10734150558710098, |
|
"learning_rate": 4.993801420706964e-05, |
|
"loss": 1.3388, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.04668985438896422, |
|
"grad_norm": 0.11137369275093079, |
|
"learning_rate": 4.993757863733703e-05, |
|
"loss": 1.4155, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.046847059285896754, |
|
"grad_norm": 0.1221408098936081, |
|
"learning_rate": 4.993714154451202e-05, |
|
"loss": 1.4884, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.0470042641828293, |
|
"grad_norm": 0.11707969009876251, |
|
"learning_rate": 4.993670292862127e-05, |
|
"loss": 1.4605, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.047161469079761834, |
|
"grad_norm": 0.11751751601696014, |
|
"learning_rate": 4.993626278969158e-05, |
|
"loss": 1.5538, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.04731867397669438, |
|
"grad_norm": 0.11617731302976608, |
|
"learning_rate": 4.993582112774984e-05, |
|
"loss": 1.438, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.04747587887362691, |
|
"grad_norm": 0.15164637565612793, |
|
"learning_rate": 4.993537794282302e-05, |
|
"loss": 1.4607, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.047633083770559456, |
|
"grad_norm": 0.12434446811676025, |
|
"learning_rate": 4.9934933234938193e-05, |
|
"loss": 1.4167, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.04779028866749199, |
|
"grad_norm": 0.12518739700317383, |
|
"learning_rate": 4.993448700412251e-05, |
|
"loss": 1.4003, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.04794749356442453, |
|
"grad_norm": 0.11146944761276245, |
|
"learning_rate": 4.993403925040323e-05, |
|
"loss": 1.3913, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.04810469846135707, |
|
"grad_norm": 0.11682326346635818, |
|
"learning_rate": 4.993358997380771e-05, |
|
"loss": 1.3415, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.04826190335828961, |
|
"grad_norm": 0.1197504773736, |
|
"learning_rate": 4.993313917436336e-05, |
|
"loss": 1.515, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.04841910825522215, |
|
"grad_norm": 0.14647473394870758, |
|
"learning_rate": 4.993268685209775e-05, |
|
"loss": 1.4529, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.04857631315215469, |
|
"grad_norm": 0.12431525439023972, |
|
"learning_rate": 4.9932233007038484e-05, |
|
"loss": 1.5426, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.04873351804908723, |
|
"grad_norm": 0.11715538799762726, |
|
"learning_rate": 4.9931777639213284e-05, |
|
"loss": 1.4615, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.04889072294601977, |
|
"grad_norm": 0.12391876429319382, |
|
"learning_rate": 4.993132074864997e-05, |
|
"loss": 1.4138, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.04904792784295231, |
|
"grad_norm": 0.11894181370735168, |
|
"learning_rate": 4.9930862335376444e-05, |
|
"loss": 1.4383, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.049205132739884846, |
|
"grad_norm": 0.1225295439362526, |
|
"learning_rate": 4.9930402399420695e-05, |
|
"loss": 1.3847, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.04936233763681739, |
|
"grad_norm": 0.11435995995998383, |
|
"learning_rate": 4.9929940940810825e-05, |
|
"loss": 1.4254, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.049519542533749926, |
|
"grad_norm": 0.11988761276006699, |
|
"learning_rate": 4.9929477959575024e-05, |
|
"loss": 1.4787, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.04967674743068247, |
|
"grad_norm": 0.11983373016119003, |
|
"learning_rate": 4.992901345574155e-05, |
|
"loss": 1.4341, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.049833952327615005, |
|
"grad_norm": 0.13395054638385773, |
|
"learning_rate": 4.992854742933878e-05, |
|
"loss": 1.4315, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.04999115722454754, |
|
"grad_norm": 0.12578143179416656, |
|
"learning_rate": 4.9928079880395186e-05, |
|
"loss": 1.4143, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.050148362121480085, |
|
"grad_norm": 0.1401878446340561, |
|
"learning_rate": 4.992761080893932e-05, |
|
"loss": 1.4665, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.05030556701841262, |
|
"grad_norm": 0.13048145174980164, |
|
"learning_rate": 4.9927140214999826e-05, |
|
"loss": 1.4266, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.05030556701841262, |
|
"eval_loss": 1.450086236000061, |
|
"eval_runtime": 2316.1877, |
|
"eval_samples_per_second": 3.997, |
|
"eval_steps_per_second": 1.999, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.050462771915345164, |
|
"grad_norm": 0.13121232390403748, |
|
"learning_rate": 4.992666809860545e-05, |
|
"loss": 1.4946, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.0506199768122777, |
|
"grad_norm": 0.13547195494174957, |
|
"learning_rate": 4.9926194459785015e-05, |
|
"loss": 1.5532, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.050777181709210244, |
|
"grad_norm": 0.11797169595956802, |
|
"learning_rate": 4.992571929856747e-05, |
|
"loss": 1.4118, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.05093438660614278, |
|
"grad_norm": 0.12734922766685486, |
|
"learning_rate": 4.992524261498183e-05, |
|
"loss": 1.4427, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.05109159150307532, |
|
"grad_norm": 0.12444902211427689, |
|
"learning_rate": 4.99247644090572e-05, |
|
"loss": 1.4369, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.05124879640000786, |
|
"grad_norm": 0.12244518846273422, |
|
"learning_rate": 4.99242846808228e-05, |
|
"loss": 1.4587, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.0514060012969404, |
|
"grad_norm": 0.12424397468566895, |
|
"learning_rate": 4.9923803430307916e-05, |
|
"loss": 1.3949, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.05156320619387294, |
|
"grad_norm": 0.1352718621492386, |
|
"learning_rate": 4.9923320657541944e-05, |
|
"loss": 1.504, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.05172041109080548, |
|
"grad_norm": 0.12855666875839233, |
|
"learning_rate": 4.992283636255438e-05, |
|
"loss": 1.4271, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.05187761598773802, |
|
"grad_norm": 0.129829540848732, |
|
"learning_rate": 4.99223505453748e-05, |
|
"loss": 1.455, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.052034820884670555, |
|
"grad_norm": 0.12780050933361053, |
|
"learning_rate": 4.992186320603286e-05, |
|
"loss": 1.4045, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.0521920257816031, |
|
"grad_norm": 0.13515712320804596, |
|
"learning_rate": 4.992137434455834e-05, |
|
"loss": 1.4335, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.052349230678535634, |
|
"grad_norm": 0.15026766061782837, |
|
"learning_rate": 4.99208839609811e-05, |
|
"loss": 1.5386, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.05250643557546818, |
|
"grad_norm": 0.13422101736068726, |
|
"learning_rate": 4.992039205533108e-05, |
|
"loss": 1.454, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.05266364047240071, |
|
"grad_norm": 0.13735777139663696, |
|
"learning_rate": 4.991989862763833e-05, |
|
"loss": 1.4415, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.05282084536933326, |
|
"grad_norm": 0.12985137104988098, |
|
"learning_rate": 4.9919403677932994e-05, |
|
"loss": 1.385, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.05297805026626579, |
|
"grad_norm": 0.1301167607307434, |
|
"learning_rate": 4.9918907206245285e-05, |
|
"loss": 1.4364, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.053135255163198336, |
|
"grad_norm": 0.1407599002122879, |
|
"learning_rate": 4.991840921260553e-05, |
|
"loss": 1.4454, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.05329246006013087, |
|
"grad_norm": 0.12763133645057678, |
|
"learning_rate": 4.9917909697044164e-05, |
|
"loss": 1.4008, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.053449664957063416, |
|
"grad_norm": 0.1443052589893341, |
|
"learning_rate": 4.991740865959167e-05, |
|
"loss": 1.5184, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.05360686985399595, |
|
"grad_norm": 0.13496418297290802, |
|
"learning_rate": 4.991690610027866e-05, |
|
"loss": 1.3888, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.05376407475092849, |
|
"grad_norm": 0.12681293487548828, |
|
"learning_rate": 4.991640201913583e-05, |
|
"loss": 1.42, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.05392127964786103, |
|
"grad_norm": 0.13178062438964844, |
|
"learning_rate": 4.9915896416193965e-05, |
|
"loss": 1.4178, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.05407848454479357, |
|
"grad_norm": 0.14452503621578217, |
|
"learning_rate": 4.991538929148394e-05, |
|
"loss": 1.4248, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.05423568944172611, |
|
"grad_norm": 0.1352955400943756, |
|
"learning_rate": 4.991488064503674e-05, |
|
"loss": 1.4304, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.05439289433865865, |
|
"grad_norm": 0.14846469461917877, |
|
"learning_rate": 4.991437047688343e-05, |
|
"loss": 1.4784, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.05455009923559119, |
|
"grad_norm": 0.12475849688053131, |
|
"learning_rate": 4.9913858787055156e-05, |
|
"loss": 1.4131, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.054707304132523726, |
|
"grad_norm": 0.13835409283638, |
|
"learning_rate": 4.991334557558318e-05, |
|
"loss": 1.4913, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.05486450902945627, |
|
"grad_norm": 0.13921529054641724, |
|
"learning_rate": 4.991283084249885e-05, |
|
"loss": 1.3713, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.055021713926388806, |
|
"grad_norm": 0.13188250362873077, |
|
"learning_rate": 4.9912314587833586e-05, |
|
"loss": 1.3608, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.05517891882332135, |
|
"grad_norm": 0.12457428872585297, |
|
"learning_rate": 4.991179681161895e-05, |
|
"loss": 1.4427, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.055336123720253885, |
|
"grad_norm": 0.12452542781829834, |
|
"learning_rate": 4.9911277513886535e-05, |
|
"loss": 1.4179, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.05549332861718643, |
|
"grad_norm": 0.14799195528030396, |
|
"learning_rate": 4.9910756694668074e-05, |
|
"loss": 1.4532, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.055650533514118965, |
|
"grad_norm": 0.13485541939735413, |
|
"learning_rate": 4.991023435399538e-05, |
|
"loss": 1.4114, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.0558077384110515, |
|
"grad_norm": 0.1422443389892578, |
|
"learning_rate": 4.990971049190034e-05, |
|
"loss": 1.377, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.055964943307984044, |
|
"grad_norm": 0.12994804978370667, |
|
"learning_rate": 4.990918510841496e-05, |
|
"loss": 1.4474, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.05612214820491658, |
|
"grad_norm": 0.1429785192012787, |
|
"learning_rate": 4.990865820357133e-05, |
|
"loss": 1.4435, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.056279353101849124, |
|
"grad_norm": 0.12979790568351746, |
|
"learning_rate": 4.9908129777401625e-05, |
|
"loss": 1.4039, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.05643655799878166, |
|
"grad_norm": 0.1332644671201706, |
|
"learning_rate": 4.990759982993812e-05, |
|
"loss": 1.4377, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.0565937628957142, |
|
"grad_norm": 0.13796579837799072, |
|
"learning_rate": 4.99070683612132e-05, |
|
"loss": 1.3951, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.05675096779264674, |
|
"grad_norm": 0.14315246045589447, |
|
"learning_rate": 4.9906535371259294e-05, |
|
"loss": 1.4042, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.05690817268957928, |
|
"grad_norm": 0.1463768631219864, |
|
"learning_rate": 4.9906000860108974e-05, |
|
"loss": 1.461, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.05706537758651182, |
|
"grad_norm": 0.14041170477867126, |
|
"learning_rate": 4.9905464827794884e-05, |
|
"loss": 1.4147, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.05722258248344436, |
|
"grad_norm": 0.19242697954177856, |
|
"learning_rate": 4.990492727434976e-05, |
|
"loss": 1.3435, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.0573797873803769, |
|
"grad_norm": 0.1556611955165863, |
|
"learning_rate": 4.990438819980644e-05, |
|
"loss": 1.4075, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.05753699227730944, |
|
"grad_norm": 0.13157570362091064, |
|
"learning_rate": 4.990384760419784e-05, |
|
"loss": 1.3334, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.05769419717424198, |
|
"grad_norm": 0.17953743040561676, |
|
"learning_rate": 4.990330548755698e-05, |
|
"loss": 1.4609, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.057851402071174514, |
|
"grad_norm": 0.14179491996765137, |
|
"learning_rate": 4.990276184991697e-05, |
|
"loss": 1.4344, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.05800860696810706, |
|
"grad_norm": 0.16522593796253204, |
|
"learning_rate": 4.9902216691311024e-05, |
|
"loss": 1.3794, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.05816581186503959, |
|
"grad_norm": 0.12736016511917114, |
|
"learning_rate": 4.9901670011772425e-05, |
|
"loss": 1.4167, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.058323016761972137, |
|
"grad_norm": 0.15869787335395813, |
|
"learning_rate": 4.990112181133456e-05, |
|
"loss": 1.4293, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.05848022165890467, |
|
"grad_norm": 0.14410504698753357, |
|
"learning_rate": 4.990057209003093e-05, |
|
"loss": 1.4357, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.058637426555837216, |
|
"grad_norm": 0.1567080020904541, |
|
"learning_rate": 4.9900020847895086e-05, |
|
"loss": 1.4146, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.05879463145276975, |
|
"grad_norm": 0.1430107057094574, |
|
"learning_rate": 4.989946808496071e-05, |
|
"loss": 1.3415, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.058951836349702295, |
|
"grad_norm": 0.146332785487175, |
|
"learning_rate": 4.989891380126156e-05, |
|
"loss": 1.496, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.05910904124663483, |
|
"grad_norm": 0.13674487173557281, |
|
"learning_rate": 4.989835799683149e-05, |
|
"loss": 1.3611, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.059266246143567375, |
|
"grad_norm": 0.1321984827518463, |
|
"learning_rate": 4.989780067170444e-05, |
|
"loss": 1.4695, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.05942345104049991, |
|
"grad_norm": 0.1535942554473877, |
|
"learning_rate": 4.9897241825914464e-05, |
|
"loss": 1.3564, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.059580655937432454, |
|
"grad_norm": 0.1538037806749344, |
|
"learning_rate": 4.989668145949568e-05, |
|
"loss": 1.3502, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.05973786083436499, |
|
"grad_norm": 0.15744829177856445, |
|
"learning_rate": 4.989611957248232e-05, |
|
"loss": 1.4318, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.05989506573129753, |
|
"grad_norm": 0.17178332805633545, |
|
"learning_rate": 4.98955561649087e-05, |
|
"loss": 1.4643, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.06005227062823007, |
|
"grad_norm": 0.15913072228431702, |
|
"learning_rate": 4.989499123680923e-05, |
|
"loss": 1.487, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.060209475525162606, |
|
"grad_norm": 0.15134060382843018, |
|
"learning_rate": 4.9894424788218415e-05, |
|
"loss": 1.4705, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.06036668042209515, |
|
"grad_norm": 0.13704389333724976, |
|
"learning_rate": 4.989385681917085e-05, |
|
"loss": 1.4756, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.060523885319027686, |
|
"grad_norm": 0.14025503396987915, |
|
"learning_rate": 4.989328732970122e-05, |
|
"loss": 1.443, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.06068109021596023, |
|
"grad_norm": 0.1822325438261032, |
|
"learning_rate": 4.9892716319844325e-05, |
|
"loss": 1.3996, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.060838295112892765, |
|
"grad_norm": 0.15639656782150269, |
|
"learning_rate": 4.989214378963502e-05, |
|
"loss": 1.3656, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.06099550000982531, |
|
"grad_norm": 0.15097728371620178, |
|
"learning_rate": 4.989156973910828e-05, |
|
"loss": 1.4055, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.061152704906757845, |
|
"grad_norm": 0.18977142870426178, |
|
"learning_rate": 4.989099416829917e-05, |
|
"loss": 1.4472, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.06130990980369039, |
|
"grad_norm": 0.1596304178237915, |
|
"learning_rate": 4.989041707724284e-05, |
|
"loss": 1.4373, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.061467114700622924, |
|
"grad_norm": 0.171820729970932, |
|
"learning_rate": 4.988983846597454e-05, |
|
"loss": 1.468, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.06162431959755547, |
|
"grad_norm": 0.14266176521778107, |
|
"learning_rate": 4.98892583345296e-05, |
|
"loss": 1.4037, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.061781524494488003, |
|
"grad_norm": 0.13375528156757355, |
|
"learning_rate": 4.988867668294346e-05, |
|
"loss": 1.437, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.06193872939142054, |
|
"grad_norm": 0.13332228362560272, |
|
"learning_rate": 4.988809351125165e-05, |
|
"loss": 1.3892, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.06209593428835308, |
|
"grad_norm": 0.17180980741977692, |
|
"learning_rate": 4.988750881948977e-05, |
|
"loss": 1.3494, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.06225313918528562, |
|
"grad_norm": 0.1419111043214798, |
|
"learning_rate": 4.988692260769355e-05, |
|
"loss": 1.3748, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.06241034408221816, |
|
"grad_norm": 0.17256620526313782, |
|
"learning_rate": 4.9886334875898776e-05, |
|
"loss": 1.3549, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.0625675489791507, |
|
"grad_norm": 0.2243422418832779, |
|
"learning_rate": 4.988574562414137e-05, |
|
"loss": 1.4465, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.06272475387608324, |
|
"grad_norm": 0.15700723230838776, |
|
"learning_rate": 4.9885154852457294e-05, |
|
"loss": 1.4477, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.06288195877301578, |
|
"grad_norm": 0.14497259259223938, |
|
"learning_rate": 4.988456256088264e-05, |
|
"loss": 1.3861, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.06303916366994831, |
|
"grad_norm": 0.14747034013271332, |
|
"learning_rate": 4.988396874945359e-05, |
|
"loss": 1.4206, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.06319636856688086, |
|
"grad_norm": 0.17671054601669312, |
|
"learning_rate": 4.98833734182064e-05, |
|
"loss": 1.2475, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.0633535734638134, |
|
"grad_norm": 0.16974316537380219, |
|
"learning_rate": 4.9882776567177446e-05, |
|
"loss": 1.4955, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.06351077836074594, |
|
"grad_norm": 0.15419775247573853, |
|
"learning_rate": 4.988217819640317e-05, |
|
"loss": 1.4209, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.06366798325767847, |
|
"grad_norm": 0.13987664878368378, |
|
"learning_rate": 4.988157830592012e-05, |
|
"loss": 1.456, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.06382518815461101, |
|
"grad_norm": 0.24560455977916718, |
|
"learning_rate": 4.988097689576493e-05, |
|
"loss": 1.3567, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.06398239305154356, |
|
"grad_norm": 0.13870076835155487, |
|
"learning_rate": 4.9880373965974334e-05, |
|
"loss": 1.3752, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.0641395979484761, |
|
"grad_norm": 0.16167718172073364, |
|
"learning_rate": 4.987976951658517e-05, |
|
"loss": 1.4766, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.06429680284540863, |
|
"grad_norm": 0.1700398474931717, |
|
"learning_rate": 4.9879163547634346e-05, |
|
"loss": 1.427, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.06445400774234117, |
|
"grad_norm": 0.15502458810806274, |
|
"learning_rate": 4.987855605915887e-05, |
|
"loss": 1.3965, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.06461121263927372, |
|
"grad_norm": 0.14834032952785492, |
|
"learning_rate": 4.987794705119584e-05, |
|
"loss": 1.4399, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.06476841753620625, |
|
"grad_norm": 0.22443649172782898, |
|
"learning_rate": 4.987733652378246e-05, |
|
"loss": 1.3736, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.06492562243313879, |
|
"grad_norm": 0.14396560192108154, |
|
"learning_rate": 4.9876724476956015e-05, |
|
"loss": 1.4648, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.06508282733007133, |
|
"grad_norm": 0.15352006256580353, |
|
"learning_rate": 4.987611091075389e-05, |
|
"loss": 1.4988, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.06524003222700388, |
|
"grad_norm": 0.13210074603557587, |
|
"learning_rate": 4.987549582521356e-05, |
|
"loss": 1.3705, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.06539723712393641, |
|
"grad_norm": 0.16056782007217407, |
|
"learning_rate": 4.98748792203726e-05, |
|
"loss": 1.3388, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.06555444202086895, |
|
"grad_norm": 0.18992343544960022, |
|
"learning_rate": 4.9874261096268647e-05, |
|
"loss": 1.3842, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.06571164691780149, |
|
"grad_norm": 0.1789916455745697, |
|
"learning_rate": 4.9873641452939466e-05, |
|
"loss": 1.3622, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.06586885181473402, |
|
"grad_norm": 0.21043789386749268, |
|
"learning_rate": 4.9873020290422915e-05, |
|
"loss": 1.3477, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.06602605671166657, |
|
"grad_norm": 0.15355254709720612, |
|
"learning_rate": 4.987239760875691e-05, |
|
"loss": 1.3643, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.06618326160859911, |
|
"grad_norm": 0.1433190107345581, |
|
"learning_rate": 4.9871773407979496e-05, |
|
"loss": 1.3753, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.06634046650553165, |
|
"grad_norm": 0.17479249835014343, |
|
"learning_rate": 4.987114768812879e-05, |
|
"loss": 1.3809, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.06649767140246418, |
|
"grad_norm": 0.186944842338562, |
|
"learning_rate": 4.987052044924302e-05, |
|
"loss": 1.3616, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.06665487629939673, |
|
"grad_norm": 0.15202952921390533, |
|
"learning_rate": 4.986989169136048e-05, |
|
"loss": 1.4479, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.06681208119632927, |
|
"grad_norm": 0.16295532882213593, |
|
"learning_rate": 4.9869261414519575e-05, |
|
"loss": 1.3713, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.0669692860932618, |
|
"grad_norm": 0.19577625393867493, |
|
"learning_rate": 4.986862961875881e-05, |
|
"loss": 1.4199, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.06712649099019434, |
|
"grad_norm": 0.22768542170524597, |
|
"learning_rate": 4.986799630411677e-05, |
|
"loss": 1.3529, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.06728369588712689, |
|
"grad_norm": 0.25184011459350586, |
|
"learning_rate": 4.986736147063212e-05, |
|
"loss": 1.3944, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.06744090078405943, |
|
"grad_norm": 0.15565118193626404, |
|
"learning_rate": 4.986672511834366e-05, |
|
"loss": 1.4505, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.06759810568099196, |
|
"grad_norm": 0.16559922695159912, |
|
"learning_rate": 4.986608724729024e-05, |
|
"loss": 1.3742, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.0677553105779245, |
|
"grad_norm": 0.14826242625713348, |
|
"learning_rate": 4.986544785751081e-05, |
|
"loss": 1.4008, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.06791251547485704, |
|
"grad_norm": 0.16543184220790863, |
|
"learning_rate": 4.986480694904444e-05, |
|
"loss": 1.3433, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.06806972037178959, |
|
"grad_norm": 0.15332931280136108, |
|
"learning_rate": 4.986416452193027e-05, |
|
"loss": 1.4459, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.06822692526872212, |
|
"grad_norm": 0.18880733847618103, |
|
"learning_rate": 4.986352057620752e-05, |
|
"loss": 1.3902, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.06838413016565466, |
|
"grad_norm": 0.1513829231262207, |
|
"learning_rate": 4.986287511191554e-05, |
|
"loss": 1.3485, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.0685413350625872, |
|
"grad_norm": 0.15241704881191254, |
|
"learning_rate": 4.9862228129093745e-05, |
|
"loss": 1.3051, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.06869853995951974, |
|
"grad_norm": 0.1956702321767807, |
|
"learning_rate": 4.986157962778165e-05, |
|
"loss": 1.4647, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.06885574485645228, |
|
"grad_norm": 0.2027936428785324, |
|
"learning_rate": 4.9860929608018866e-05, |
|
"loss": 1.3602, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.06901294975338482, |
|
"grad_norm": 0.1623186320066452, |
|
"learning_rate": 4.986027806984509e-05, |
|
"loss": 1.4154, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.06917015465031735, |
|
"grad_norm": 0.16111283004283905, |
|
"learning_rate": 4.985962501330011e-05, |
|
"loss": 1.4311, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.0693273595472499, |
|
"grad_norm": 0.16754299402236938, |
|
"learning_rate": 4.985897043842382e-05, |
|
"loss": 1.349, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.06948456444418244, |
|
"grad_norm": 0.1766330897808075, |
|
"learning_rate": 4.985831434525621e-05, |
|
"loss": 1.3714, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.06964176934111498, |
|
"grad_norm": 0.1742810308933258, |
|
"learning_rate": 4.985765673383733e-05, |
|
"loss": 1.4161, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.06979897423804751, |
|
"grad_norm": 0.17025281488895416, |
|
"learning_rate": 4.985699760420736e-05, |
|
"loss": 1.3925, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.06995617913498005, |
|
"grad_norm": 0.19201375544071198, |
|
"learning_rate": 4.985633695640655e-05, |
|
"loss": 1.4158, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.0701133840319126, |
|
"grad_norm": 0.1636267751455307, |
|
"learning_rate": 4.985567479047524e-05, |
|
"loss": 1.4071, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.07027058892884513, |
|
"grad_norm": 0.19676333665847778, |
|
"learning_rate": 4.9855011106453894e-05, |
|
"loss": 1.3449, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.07042779382577767, |
|
"grad_norm": 0.17712907493114471, |
|
"learning_rate": 4.985434590438303e-05, |
|
"loss": 1.3421, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.07058499872271021, |
|
"grad_norm": 0.18515101075172424, |
|
"learning_rate": 4.985367918430329e-05, |
|
"loss": 1.4051, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.07074220361964276, |
|
"grad_norm": 0.17168915271759033, |
|
"learning_rate": 4.985301094625538e-05, |
|
"loss": 1.3093, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.0708994085165753, |
|
"grad_norm": 0.1891397386789322, |
|
"learning_rate": 4.9852341190280127e-05, |
|
"loss": 1.3075, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.07105661341350783, |
|
"grad_norm": 0.17731457948684692, |
|
"learning_rate": 4.985166991641843e-05, |
|
"loss": 1.3986, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.07121381831044037, |
|
"grad_norm": 0.18817296624183655, |
|
"learning_rate": 4.985099712471129e-05, |
|
"loss": 1.3531, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.07137102320737292, |
|
"grad_norm": 0.1782791018486023, |
|
"learning_rate": 4.9850322815199795e-05, |
|
"loss": 1.4064, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.07152822810430545, |
|
"grad_norm": 0.18053874373435974, |
|
"learning_rate": 4.984964698792514e-05, |
|
"loss": 1.4607, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.07168543300123799, |
|
"grad_norm": 0.286338746547699, |
|
"learning_rate": 4.984896964292858e-05, |
|
"loss": 1.3036, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.07184263789817052, |
|
"grad_norm": 0.2560707926750183, |
|
"learning_rate": 4.98482907802515e-05, |
|
"loss": 1.3428, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.07199984279510306, |
|
"grad_norm": 0.19296897947788239, |
|
"learning_rate": 4.984761039993537e-05, |
|
"loss": 1.3502, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.07215704769203561, |
|
"grad_norm": 0.19685949385166168, |
|
"learning_rate": 4.9846928502021725e-05, |
|
"loss": 1.4015, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.07231425258896815, |
|
"grad_norm": 0.1548481583595276, |
|
"learning_rate": 4.984624508655223e-05, |
|
"loss": 1.3698, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.07247145748590068, |
|
"grad_norm": 0.16076034307479858, |
|
"learning_rate": 4.984556015356862e-05, |
|
"loss": 1.3627, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.07262866238283322, |
|
"grad_norm": 0.18571603298187256, |
|
"learning_rate": 4.9844873703112726e-05, |
|
"loss": 1.3506, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.07278586727976577, |
|
"grad_norm": 0.1540035605430603, |
|
"learning_rate": 4.984418573522648e-05, |
|
"loss": 1.4483, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.0729430721766983, |
|
"grad_norm": 0.1730145364999771, |
|
"learning_rate": 4.984349624995188e-05, |
|
"loss": 1.3678, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.07310027707363084, |
|
"grad_norm": 0.26254212856292725, |
|
"learning_rate": 4.984280524733107e-05, |
|
"loss": 1.401, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.07325748197056338, |
|
"grad_norm": 0.2079063057899475, |
|
"learning_rate": 4.984211272740623e-05, |
|
"loss": 1.3655, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.07341468686749593, |
|
"grad_norm": 0.21711499989032745, |
|
"learning_rate": 4.9841418690219653e-05, |
|
"loss": 1.4011, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.07357189176442847, |
|
"grad_norm": 0.18226252496242523, |
|
"learning_rate": 4.984072313581375e-05, |
|
"loss": 1.4213, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.073729096661361, |
|
"grad_norm": 0.1463780552148819, |
|
"learning_rate": 4.9840026064230984e-05, |
|
"loss": 1.4519, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.07388630155829354, |
|
"grad_norm": 0.18232892453670502, |
|
"learning_rate": 4.983932747551394e-05, |
|
"loss": 1.3657, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.07404350645522607, |
|
"grad_norm": 0.19644559919834137, |
|
"learning_rate": 4.9838627369705285e-05, |
|
"loss": 1.3988, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.07420071135215862, |
|
"grad_norm": 0.16292576491832733, |
|
"learning_rate": 4.983792574684776e-05, |
|
"loss": 1.4369, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.07435791624909116, |
|
"grad_norm": 0.2244543433189392, |
|
"learning_rate": 4.983722260698425e-05, |
|
"loss": 1.4269, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.0745151211460237, |
|
"grad_norm": 0.2582489848136902, |
|
"learning_rate": 4.9836517950157666e-05, |
|
"loss": 1.3986, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.07467232604295623, |
|
"grad_norm": 0.15564194321632385, |
|
"learning_rate": 4.983581177641108e-05, |
|
"loss": 1.3871, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.07482953093988878, |
|
"grad_norm": 0.2301008552312851, |
|
"learning_rate": 4.9835104085787596e-05, |
|
"loss": 1.3572, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.07498673583682132, |
|
"grad_norm": 0.21603424847126007, |
|
"learning_rate": 4.9834394878330444e-05, |
|
"loss": 1.3803, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.07514394073375386, |
|
"grad_norm": 0.16744717955589294, |
|
"learning_rate": 4.9833684154082937e-05, |
|
"loss": 1.4233, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.07530114563068639, |
|
"grad_norm": 0.23016415536403656, |
|
"learning_rate": 4.98329719130885e-05, |
|
"loss": 1.3962, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.07545835052761894, |
|
"grad_norm": 0.19687114655971527, |
|
"learning_rate": 4.983225815539061e-05, |
|
"loss": 1.3667, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.07545835052761894, |
|
"eval_loss": 1.3748993873596191, |
|
"eval_runtime": 2315.5952, |
|
"eval_samples_per_second": 3.998, |
|
"eval_steps_per_second": 1.999, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.07561555542455148, |
|
"grad_norm": 0.1833205670118332, |
|
"learning_rate": 4.9831542881032884e-05, |
|
"loss": 1.4365, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.07577276032148401, |
|
"grad_norm": 0.17124423384666443, |
|
"learning_rate": 4.983082609005899e-05, |
|
"loss": 1.3641, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.07592996521841655, |
|
"grad_norm": 0.17352670431137085, |
|
"learning_rate": 4.9830107782512715e-05, |
|
"loss": 1.3415, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.07608717011534909, |
|
"grad_norm": 0.20768220722675323, |
|
"learning_rate": 4.982938795843793e-05, |
|
"loss": 1.3261, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.07624437501228164, |
|
"grad_norm": 0.21459853649139404, |
|
"learning_rate": 4.982866661787859e-05, |
|
"loss": 1.4185, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.07640157990921417, |
|
"grad_norm": 0.26912233233451843, |
|
"learning_rate": 4.982794376087877e-05, |
|
"loss": 1.3941, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.07655878480614671, |
|
"grad_norm": 0.28497114777565, |
|
"learning_rate": 4.982721938748261e-05, |
|
"loss": 1.3201, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.07671598970307925, |
|
"grad_norm": 0.15378472208976746, |
|
"learning_rate": 4.982649349773435e-05, |
|
"loss": 1.3615, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.0768731946000118, |
|
"grad_norm": 0.16169893741607666, |
|
"learning_rate": 4.982576609167831e-05, |
|
"loss": 1.3342, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.07703039949694433, |
|
"grad_norm": 0.24693650007247925, |
|
"learning_rate": 4.982503716935896e-05, |
|
"loss": 1.3788, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.07718760439387687, |
|
"grad_norm": 0.1769181787967682, |
|
"learning_rate": 4.982430673082077e-05, |
|
"loss": 1.3664, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.0773448092908094, |
|
"grad_norm": 0.26325106620788574, |
|
"learning_rate": 4.982357477610839e-05, |
|
"loss": 1.3173, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.07750201418774195, |
|
"grad_norm": 0.2063319832086563, |
|
"learning_rate": 4.9822841305266506e-05, |
|
"loss": 1.4125, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.07765921908467449, |
|
"grad_norm": 0.29141879081726074, |
|
"learning_rate": 4.982210631833992e-05, |
|
"loss": 1.3596, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.07781642398160703, |
|
"grad_norm": 0.18967591226100922, |
|
"learning_rate": 4.982136981537352e-05, |
|
"loss": 1.4128, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.07797362887853956, |
|
"grad_norm": 0.2291795313358307, |
|
"learning_rate": 4.9820631796412287e-05, |
|
"loss": 1.3772, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.0781308337754721, |
|
"grad_norm": 0.200834721326828, |
|
"learning_rate": 4.98198922615013e-05, |
|
"loss": 1.369, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.07828803867240465, |
|
"grad_norm": 0.22960609197616577, |
|
"learning_rate": 4.9819151210685736e-05, |
|
"loss": 1.3979, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.07844524356933719, |
|
"grad_norm": 0.17247427999973297, |
|
"learning_rate": 4.981840864401084e-05, |
|
"loss": 1.3927, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.07860244846626972, |
|
"grad_norm": 0.2623608112335205, |
|
"learning_rate": 4.981766456152198e-05, |
|
"loss": 1.3919, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07875965336320226, |
|
"grad_norm": 0.19911788403987885, |
|
"learning_rate": 4.981691896326459e-05, |
|
"loss": 1.3925, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.07891685826013481, |
|
"grad_norm": 0.24869734048843384, |
|
"learning_rate": 4.9816171849284205e-05, |
|
"loss": 1.3562, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.07907406315706735, |
|
"grad_norm": 0.31372350454330444, |
|
"learning_rate": 4.981542321962647e-05, |
|
"loss": 1.3211, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.07923126805399988, |
|
"grad_norm": 0.21760910749435425, |
|
"learning_rate": 4.981467307433709e-05, |
|
"loss": 1.3042, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.07938847295093242, |
|
"grad_norm": 0.2469843477010727, |
|
"learning_rate": 4.9813921413461906e-05, |
|
"loss": 1.2831, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.07954567784786497, |
|
"grad_norm": 0.24319148063659668, |
|
"learning_rate": 4.981316823704681e-05, |
|
"loss": 1.2703, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.0797028827447975, |
|
"grad_norm": 0.19718031585216522, |
|
"learning_rate": 4.98124135451378e-05, |
|
"loss": 1.3258, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.07986008764173004, |
|
"grad_norm": 0.17459236085414886, |
|
"learning_rate": 4.981165733778098e-05, |
|
"loss": 1.4248, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.08001729253866258, |
|
"grad_norm": 0.17684616148471832, |
|
"learning_rate": 4.981089961502253e-05, |
|
"loss": 1.3939, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.08017449743559511, |
|
"grad_norm": 0.17499729990959167, |
|
"learning_rate": 4.981014037690874e-05, |
|
"loss": 1.4156, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.08033170233252766, |
|
"grad_norm": 0.1901170015335083, |
|
"learning_rate": 4.9809379623485964e-05, |
|
"loss": 1.4209, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.0804889072294602, |
|
"grad_norm": 0.18230682611465454, |
|
"learning_rate": 4.980861735480067e-05, |
|
"loss": 1.4607, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.08064611212639274, |
|
"grad_norm": 0.22843636572360992, |
|
"learning_rate": 4.9807853570899427e-05, |
|
"loss": 1.3671, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.08080331702332527, |
|
"grad_norm": 0.2288489192724228, |
|
"learning_rate": 4.980708827182887e-05, |
|
"loss": 1.3657, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.08096052192025782, |
|
"grad_norm": 0.19647593796253204, |
|
"learning_rate": 4.980632145763575e-05, |
|
"loss": 1.4079, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.08111772681719036, |
|
"grad_norm": 0.20980435609817505, |
|
"learning_rate": 4.98055531283669e-05, |
|
"loss": 1.3746, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.0812749317141229, |
|
"grad_norm": 0.19381123781204224, |
|
"learning_rate": 4.980478328406923e-05, |
|
"loss": 1.3986, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.08143213661105543, |
|
"grad_norm": 0.2224361151456833, |
|
"learning_rate": 4.980401192478979e-05, |
|
"loss": 1.3082, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.08158934150798797, |
|
"grad_norm": 0.20567384362220764, |
|
"learning_rate": 4.9803239050575664e-05, |
|
"loss": 1.4417, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.08174654640492052, |
|
"grad_norm": 0.22890503704547882, |
|
"learning_rate": 4.9802464661474074e-05, |
|
"loss": 1.3034, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.08190375130185305, |
|
"grad_norm": 0.23220910131931305, |
|
"learning_rate": 4.9801688757532304e-05, |
|
"loss": 1.3705, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.08206095619878559, |
|
"grad_norm": 0.29084959626197815, |
|
"learning_rate": 4.980091133879775e-05, |
|
"loss": 1.3246, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.08221816109571813, |
|
"grad_norm": 0.15776456892490387, |
|
"learning_rate": 4.9800132405317895e-05, |
|
"loss": 1.4311, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.08237536599265068, |
|
"grad_norm": 0.2636071443557739, |
|
"learning_rate": 4.9799351957140314e-05, |
|
"loss": 1.3265, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.08253257088958321, |
|
"grad_norm": 0.20042134821414948, |
|
"learning_rate": 4.979856999431266e-05, |
|
"loss": 1.3257, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.08268977578651575, |
|
"grad_norm": 0.24039289355278015, |
|
"learning_rate": 4.9797786516882714e-05, |
|
"loss": 1.3999, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.08284698068344828, |
|
"grad_norm": 0.16932524740695953, |
|
"learning_rate": 4.9797001524898315e-05, |
|
"loss": 1.4113, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.08300418558038083, |
|
"grad_norm": 0.2101370096206665, |
|
"learning_rate": 4.97962150184074e-05, |
|
"loss": 1.3973, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.08316139047731337, |
|
"grad_norm": 0.20983585715293884, |
|
"learning_rate": 4.979542699745803e-05, |
|
"loss": 1.3255, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.08331859537424591, |
|
"grad_norm": 0.20477800071239471, |
|
"learning_rate": 4.97946374620983e-05, |
|
"loss": 1.4349, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.08347580027117844, |
|
"grad_norm": 0.22637289762496948, |
|
"learning_rate": 4.979384641237647e-05, |
|
"loss": 1.3263, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.08363300516811098, |
|
"grad_norm": 0.20332221686840057, |
|
"learning_rate": 4.9793053848340835e-05, |
|
"loss": 1.3411, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.08379021006504353, |
|
"grad_norm": 0.22744616866111755, |
|
"learning_rate": 4.979225977003979e-05, |
|
"loss": 1.4042, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.08394741496197607, |
|
"grad_norm": 0.20091576874256134, |
|
"learning_rate": 4.979146417752185e-05, |
|
"loss": 1.3218, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.0841046198589086, |
|
"grad_norm": 0.2225920408964157, |
|
"learning_rate": 4.9790667070835604e-05, |
|
"loss": 1.4223, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.08426182475584114, |
|
"grad_norm": 0.20447570085525513, |
|
"learning_rate": 4.9789868450029745e-05, |
|
"loss": 1.3884, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.08441902965277369, |
|
"grad_norm": 0.22765719890594482, |
|
"learning_rate": 4.9789068315153035e-05, |
|
"loss": 1.3575, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.08457623454970623, |
|
"grad_norm": 0.18886259198188782, |
|
"learning_rate": 4.9788266666254343e-05, |
|
"loss": 1.2737, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.08473343944663876, |
|
"grad_norm": 0.26551586389541626, |
|
"learning_rate": 4.978746350338264e-05, |
|
"loss": 1.3867, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.0848906443435713, |
|
"grad_norm": 0.29268744587898254, |
|
"learning_rate": 4.9786658826586975e-05, |
|
"loss": 1.4266, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.08504784924050385, |
|
"grad_norm": 0.2537211775779724, |
|
"learning_rate": 4.97858526359165e-05, |
|
"loss": 1.3402, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.08520505413743638, |
|
"grad_norm": 0.20287925004959106, |
|
"learning_rate": 4.978504493142045e-05, |
|
"loss": 1.3148, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.08536225903436892, |
|
"grad_norm": 0.18584851920604706, |
|
"learning_rate": 4.978423571314814e-05, |
|
"loss": 1.3293, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.08551946393130146, |
|
"grad_norm": 0.1944153755903244, |
|
"learning_rate": 4.978342498114903e-05, |
|
"loss": 1.4084, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.08567666882823399, |
|
"grad_norm": 0.18139739334583282, |
|
"learning_rate": 4.978261273547261e-05, |
|
"loss": 1.2734, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.08583387372516654, |
|
"grad_norm": 0.20824116468429565, |
|
"learning_rate": 4.97817989761685e-05, |
|
"loss": 1.3346, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.08599107862209908, |
|
"grad_norm": 0.16180047392845154, |
|
"learning_rate": 4.978098370328639e-05, |
|
"loss": 1.4547, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.08614828351903162, |
|
"grad_norm": 0.17156392335891724, |
|
"learning_rate": 4.978016691687609e-05, |
|
"loss": 1.366, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.08630548841596415, |
|
"grad_norm": 0.17913401126861572, |
|
"learning_rate": 4.977934861698746e-05, |
|
"loss": 1.2771, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.0864626933128967, |
|
"grad_norm": 0.17393502593040466, |
|
"learning_rate": 4.977852880367051e-05, |
|
"loss": 1.3061, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.08661989820982924, |
|
"grad_norm": 0.21741637587547302, |
|
"learning_rate": 4.97777074769753e-05, |
|
"loss": 1.3232, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.08677710310676177, |
|
"grad_norm": 0.26123344898223877, |
|
"learning_rate": 4.977688463695198e-05, |
|
"loss": 1.2678, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.08693430800369431, |
|
"grad_norm": 0.2508600354194641, |
|
"learning_rate": 4.9776060283650826e-05, |
|
"loss": 1.4543, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.08709151290062686, |
|
"grad_norm": 0.18527132272720337, |
|
"learning_rate": 4.977523441712217e-05, |
|
"loss": 1.3359, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.0872487177975594, |
|
"grad_norm": 0.24495406448841095, |
|
"learning_rate": 4.977440703741646e-05, |
|
"loss": 1.2892, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.08740592269449193, |
|
"grad_norm": 0.22759339213371277, |
|
"learning_rate": 4.9773578144584235e-05, |
|
"loss": 1.2212, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.08756312759142447, |
|
"grad_norm": 0.1627693474292755, |
|
"learning_rate": 4.977274773867611e-05, |
|
"loss": 1.3461, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.087720332488357, |
|
"grad_norm": 0.2068985551595688, |
|
"learning_rate": 4.9771915819742804e-05, |
|
"loss": 1.3348, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.08787753738528956, |
|
"grad_norm": 0.19731195271015167, |
|
"learning_rate": 4.9771082387835135e-05, |
|
"loss": 1.3727, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.08803474228222209, |
|
"grad_norm": 0.26571184396743774, |
|
"learning_rate": 4.977024744300399e-05, |
|
"loss": 1.3911, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.08819194717915463, |
|
"grad_norm": 0.23141519725322723, |
|
"learning_rate": 4.976941098530039e-05, |
|
"loss": 1.3978, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.08834915207608716, |
|
"grad_norm": 0.2507224380970001, |
|
"learning_rate": 4.97685730147754e-05, |
|
"loss": 1.3017, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.08850635697301971, |
|
"grad_norm": 0.2453109323978424, |
|
"learning_rate": 4.976773353148022e-05, |
|
"loss": 1.2977, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.08866356186995225, |
|
"grad_norm": 0.2600953280925751, |
|
"learning_rate": 4.9766892535466105e-05, |
|
"loss": 1.4015, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.08882076676688479, |
|
"grad_norm": 0.19863371551036835, |
|
"learning_rate": 4.9766050026784416e-05, |
|
"loss": 1.3593, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.08897797166381732, |
|
"grad_norm": 0.2115338146686554, |
|
"learning_rate": 4.976520600548663e-05, |
|
"loss": 1.2928, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.08913517656074987, |
|
"grad_norm": 0.18994684517383575, |
|
"learning_rate": 4.976436047162429e-05, |
|
"loss": 1.3506, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.08929238145768241, |
|
"grad_norm": 0.22891771793365479, |
|
"learning_rate": 4.976351342524903e-05, |
|
"loss": 1.4449, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.08944958635461495, |
|
"grad_norm": 0.19313135743141174, |
|
"learning_rate": 4.976266486641259e-05, |
|
"loss": 1.2916, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.08960679125154748, |
|
"grad_norm": 0.17697346210479736, |
|
"learning_rate": 4.976181479516679e-05, |
|
"loss": 1.3696, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.08976399614848002, |
|
"grad_norm": 0.22902925312519073, |
|
"learning_rate": 4.976096321156356e-05, |
|
"loss": 1.3688, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.08992120104541257, |
|
"grad_norm": 0.25305554270744324, |
|
"learning_rate": 4.97601101156549e-05, |
|
"loss": 1.3057, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.0900784059423451, |
|
"grad_norm": 0.23255370557308197, |
|
"learning_rate": 4.975925550749293e-05, |
|
"loss": 1.3571, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.09023561083927764, |
|
"grad_norm": 0.25259101390838623, |
|
"learning_rate": 4.9758399387129834e-05, |
|
"loss": 1.3152, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.09039281573621018, |
|
"grad_norm": 0.26062390208244324, |
|
"learning_rate": 4.97575417546179e-05, |
|
"loss": 1.3042, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.09055002063314273, |
|
"grad_norm": 0.16536732017993927, |
|
"learning_rate": 4.9756682610009515e-05, |
|
"loss": 1.2797, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.09070722553007526, |
|
"grad_norm": 0.19088499248027802, |
|
"learning_rate": 4.9755821953357144e-05, |
|
"loss": 1.3774, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.0908644304270078, |
|
"grad_norm": 0.2181147336959839, |
|
"learning_rate": 4.975495978471336e-05, |
|
"loss": 1.3364, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.09102163532394034, |
|
"grad_norm": 0.18012750148773193, |
|
"learning_rate": 4.975409610413082e-05, |
|
"loss": 1.3852, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.09117884022087289, |
|
"grad_norm": 0.18108834326267242, |
|
"learning_rate": 4.975323091166227e-05, |
|
"loss": 1.3214, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.09133604511780542, |
|
"grad_norm": 0.25102898478507996, |
|
"learning_rate": 4.975236420736056e-05, |
|
"loss": 1.3199, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.09149325001473796, |
|
"grad_norm": 0.20121383666992188, |
|
"learning_rate": 4.9751495991278626e-05, |
|
"loss": 1.3328, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.0916504549116705, |
|
"grad_norm": 0.24183815717697144, |
|
"learning_rate": 4.975062626346948e-05, |
|
"loss": 1.3881, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.09180765980860303, |
|
"grad_norm": 0.23274902999401093, |
|
"learning_rate": 4.974975502398626e-05, |
|
"loss": 1.3674, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.09196486470553558, |
|
"grad_norm": 0.224375382065773, |
|
"learning_rate": 4.9748882272882165e-05, |
|
"loss": 1.362, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.09212206960246812, |
|
"grad_norm": 0.2743482291698456, |
|
"learning_rate": 4.97480080102105e-05, |
|
"loss": 1.3028, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.09227927449940065, |
|
"grad_norm": 0.30631452798843384, |
|
"learning_rate": 4.974713223602467e-05, |
|
"loss": 1.3541, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.09243647939633319, |
|
"grad_norm": 0.1999395489692688, |
|
"learning_rate": 4.9746254950378166e-05, |
|
"loss": 1.3515, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.09259368429326574, |
|
"grad_norm": 0.3005799353122711, |
|
"learning_rate": 4.974537615332455e-05, |
|
"loss": 1.3872, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.09275088919019828, |
|
"grad_norm": 0.21795117855072021, |
|
"learning_rate": 4.9744495844917524e-05, |
|
"loss": 1.2804, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.09290809408713081, |
|
"grad_norm": 0.2832283675670624, |
|
"learning_rate": 4.9743614025210825e-05, |
|
"loss": 1.3209, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.09306529898406335, |
|
"grad_norm": 0.21391350030899048, |
|
"learning_rate": 4.9742730694258334e-05, |
|
"loss": 1.3041, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.0932225038809959, |
|
"grad_norm": 0.21651242673397064, |
|
"learning_rate": 4.974184585211399e-05, |
|
"loss": 1.2529, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.09337970877792844, |
|
"grad_norm": 0.22796374559402466, |
|
"learning_rate": 4.974095949883183e-05, |
|
"loss": 1.3999, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.09353691367486097, |
|
"grad_norm": 0.21013247966766357, |
|
"learning_rate": 4.9740071634466e-05, |
|
"loss": 1.3626, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.09369411857179351, |
|
"grad_norm": 0.31589969992637634, |
|
"learning_rate": 4.973918225907073e-05, |
|
"loss": 1.4096, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.09385132346872604, |
|
"grad_norm": 0.2923184931278229, |
|
"learning_rate": 4.973829137270033e-05, |
|
"loss": 1.2116, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.0940085283656586, |
|
"grad_norm": 0.2147187739610672, |
|
"learning_rate": 4.9737398975409224e-05, |
|
"loss": 1.3909, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.09416573326259113, |
|
"grad_norm": 0.20287127792835236, |
|
"learning_rate": 4.9736505067251896e-05, |
|
"loss": 1.3621, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.09432293815952367, |
|
"grad_norm": 0.24703876674175262, |
|
"learning_rate": 4.9735609648282965e-05, |
|
"loss": 1.3525, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.0944801430564562, |
|
"grad_norm": 0.25060412287712097, |
|
"learning_rate": 4.97347127185571e-05, |
|
"loss": 1.36, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.09463734795338875, |
|
"grad_norm": 0.214557945728302, |
|
"learning_rate": 4.9733814278129096e-05, |
|
"loss": 1.4372, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.09479455285032129, |
|
"grad_norm": 0.1984785795211792, |
|
"learning_rate": 4.9732914327053825e-05, |
|
"loss": 1.3191, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.09495175774725383, |
|
"grad_norm": 0.2099440097808838, |
|
"learning_rate": 4.9732012865386244e-05, |
|
"loss": 1.313, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.09510896264418636, |
|
"grad_norm": 0.20393683016300201, |
|
"learning_rate": 4.9731109893181423e-05, |
|
"loss": 1.3465, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.09526616754111891, |
|
"grad_norm": 0.25346165895462036, |
|
"learning_rate": 4.97302054104945e-05, |
|
"loss": 1.3379, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.09542337243805145, |
|
"grad_norm": 0.21876423060894012, |
|
"learning_rate": 4.9729299417380725e-05, |
|
"loss": 1.2746, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.09558057733498398, |
|
"grad_norm": 0.21032990515232086, |
|
"learning_rate": 4.9728391913895436e-05, |
|
"loss": 1.3215, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.09573778223191652, |
|
"grad_norm": 0.2550762891769409, |
|
"learning_rate": 4.9727482900094044e-05, |
|
"loss": 1.3239, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.09589498712884906, |
|
"grad_norm": 0.31706327199935913, |
|
"learning_rate": 4.972657237603208e-05, |
|
"loss": 1.3467, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.09605219202578161, |
|
"grad_norm": 0.17176879942417145, |
|
"learning_rate": 4.972566034176516e-05, |
|
"loss": 1.3815, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.09620939692271414, |
|
"grad_norm": 0.22620820999145508, |
|
"learning_rate": 4.972474679734898e-05, |
|
"loss": 1.2593, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.09636660181964668, |
|
"grad_norm": 0.18735802173614502, |
|
"learning_rate": 4.9723831742839334e-05, |
|
"loss": 1.424, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.09652380671657922, |
|
"grad_norm": 0.2582910656929016, |
|
"learning_rate": 4.972291517829211e-05, |
|
"loss": 1.2741, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.09668101161351177, |
|
"grad_norm": 0.19907522201538086, |
|
"learning_rate": 4.97219971037633e-05, |
|
"loss": 1.2045, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.0968382165104443, |
|
"grad_norm": 0.20451949536800385, |
|
"learning_rate": 4.972107751930896e-05, |
|
"loss": 1.3026, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.09699542140737684, |
|
"grad_norm": 0.29682090878486633, |
|
"learning_rate": 4.972015642498527e-05, |
|
"loss": 1.3789, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.09715262630430938, |
|
"grad_norm": 0.27210530638694763, |
|
"learning_rate": 4.9719233820848476e-05, |
|
"loss": 1.3968, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.09730983120124193, |
|
"grad_norm": 0.24241842329502106, |
|
"learning_rate": 4.971830970695493e-05, |
|
"loss": 1.2763, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.09746703609817446, |
|
"grad_norm": 0.2535828649997711, |
|
"learning_rate": 4.9717384083361075e-05, |
|
"loss": 1.3463, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.097624240995107, |
|
"grad_norm": 0.22121217846870422, |
|
"learning_rate": 4.971645695012344e-05, |
|
"loss": 1.3384, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.09778144589203953, |
|
"grad_norm": 0.28840744495391846, |
|
"learning_rate": 4.971552830729866e-05, |
|
"loss": 1.2418, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.09793865078897207, |
|
"grad_norm": 0.1682664453983307, |
|
"learning_rate": 4.971459815494345e-05, |
|
"loss": 1.3658, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.09809585568590462, |
|
"grad_norm": 0.24955761432647705, |
|
"learning_rate": 4.971366649311461e-05, |
|
"loss": 1.2372, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.09825306058283716, |
|
"grad_norm": 0.2756117582321167, |
|
"learning_rate": 4.971273332186906e-05, |
|
"loss": 1.3212, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.09841026547976969, |
|
"grad_norm": 0.2370867133140564, |
|
"learning_rate": 4.971179864126377e-05, |
|
"loss": 1.2879, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.09856747037670223, |
|
"grad_norm": 0.20566895604133606, |
|
"learning_rate": 4.9710862451355846e-05, |
|
"loss": 1.4243, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.09872467527363478, |
|
"grad_norm": 0.1923399120569229, |
|
"learning_rate": 4.970992475220246e-05, |
|
"loss": 1.2639, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.09888188017056732, |
|
"grad_norm": 0.17972147464752197, |
|
"learning_rate": 4.9708985543860896e-05, |
|
"loss": 1.3366, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.09903908506749985, |
|
"grad_norm": 0.1936875432729721, |
|
"learning_rate": 4.97080448263885e-05, |
|
"loss": 1.3496, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.09919628996443239, |
|
"grad_norm": 0.24409984052181244, |
|
"learning_rate": 4.9707102599842735e-05, |
|
"loss": 1.3268, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.09935349486136494, |
|
"grad_norm": 0.21084928512573242, |
|
"learning_rate": 4.970615886428115e-05, |
|
"loss": 1.3421, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.09951069975829747, |
|
"grad_norm": 0.21201804280281067, |
|
"learning_rate": 4.970521361976138e-05, |
|
"loss": 1.3189, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.09966790465523001, |
|
"grad_norm": 0.2698107063770294, |
|
"learning_rate": 4.9704266866341156e-05, |
|
"loss": 1.2193, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.09982510955216255, |
|
"grad_norm": 0.27072674036026, |
|
"learning_rate": 4.970331860407831e-05, |
|
"loss": 1.2694, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.09998231444909508, |
|
"grad_norm": 0.26514896750450134, |
|
"learning_rate": 4.9702368833030754e-05, |
|
"loss": 1.2175, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.10013951934602763, |
|
"grad_norm": 0.21645940840244293, |
|
"learning_rate": 4.970141755325649e-05, |
|
"loss": 1.3099, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.10029672424296017, |
|
"grad_norm": 0.27035385370254517, |
|
"learning_rate": 4.970046476481363e-05, |
|
"loss": 1.2723, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.1004539291398927, |
|
"grad_norm": 0.20999298989772797, |
|
"learning_rate": 4.969951046776036e-05, |
|
"loss": 1.369, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.10061113403682524, |
|
"grad_norm": 0.18554192781448364, |
|
"learning_rate": 4.969855466215497e-05, |
|
"loss": 1.3483, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.10061113403682524, |
|
"eval_loss": 1.314468502998352, |
|
"eval_runtime": 2275.7115, |
|
"eval_samples_per_second": 4.068, |
|
"eval_steps_per_second": 2.034, |
|
"step": 640 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 12722, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 160, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.278323798025175e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|