|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 468, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004273504273504274, |
|
"grad_norm": 20.463779905967794, |
|
"learning_rate": 4.2553191489361704e-07, |
|
"loss": 1.8792, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.021367521367521368, |
|
"grad_norm": 18.25002365272532, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 1.8169, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.042735042735042736, |
|
"grad_norm": 5.520763657053264, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 1.5621, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0641025641025641, |
|
"grad_norm": 2.4039272335616273, |
|
"learning_rate": 6.382978723404256e-06, |
|
"loss": 1.4414, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08547008547008547, |
|
"grad_norm": 1.5018627664657522, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 1.3849, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10683760683760683, |
|
"grad_norm": 1.2879097291220103, |
|
"learning_rate": 1.0638297872340426e-05, |
|
"loss": 1.3222, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 1.2033621208548797, |
|
"learning_rate": 1.2765957446808513e-05, |
|
"loss": 1.3365, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14957264957264957, |
|
"grad_norm": 1.1255514254267025, |
|
"learning_rate": 1.4893617021276596e-05, |
|
"loss": 1.2732, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.17094017094017094, |
|
"grad_norm": 1.243442983004785, |
|
"learning_rate": 1.7021276595744682e-05, |
|
"loss": 1.2952, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 1.398459861930405, |
|
"learning_rate": 1.914893617021277e-05, |
|
"loss": 1.2762, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21367521367521367, |
|
"grad_norm": 1.871011686158811, |
|
"learning_rate": 1.999749429505675e-05, |
|
"loss": 1.2997, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23504273504273504, |
|
"grad_norm": 1.2331867317142813, |
|
"learning_rate": 1.9982186200932964e-05, |
|
"loss": 1.2568, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 1.1946705653350014, |
|
"learning_rate": 1.9952983353325358e-05, |
|
"loss": 1.2763, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 1.1214723834130367, |
|
"learning_rate": 1.990992640128218e-05, |
|
"loss": 1.2938, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.29914529914529914, |
|
"grad_norm": 1.119663106979562, |
|
"learning_rate": 1.9853075278140913e-05, |
|
"loss": 1.2598, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32051282051282054, |
|
"grad_norm": 1.1303932474940224, |
|
"learning_rate": 1.9782509118103773e-05, |
|
"loss": 1.2489, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3418803418803419, |
|
"grad_norm": 1.2099240945757408, |
|
"learning_rate": 1.9698326146086446e-05, |
|
"loss": 1.2813, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.36324786324786323, |
|
"grad_norm": 1.108377660801764, |
|
"learning_rate": 1.9600643540993453e-05, |
|
"loss": 1.2296, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 1.1652590169175916, |
|
"learning_rate": 1.9489597272610377e-05, |
|
"loss": 1.2367, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.405982905982906, |
|
"grad_norm": 1.144744441272463, |
|
"learning_rate": 1.936534191234006e-05, |
|
"loss": 1.2497, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.42735042735042733, |
|
"grad_norm": 1.0797546591912566, |
|
"learning_rate": 1.922805041804617e-05, |
|
"loss": 1.2157, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.44871794871794873, |
|
"grad_norm": 1.1997371054541615, |
|
"learning_rate": 1.907791389330363e-05, |
|
"loss": 1.2393, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4700854700854701, |
|
"grad_norm": 1.234499931127894, |
|
"learning_rate": 1.8915141321391083e-05, |
|
"loss": 1.268, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.49145299145299143, |
|
"grad_norm": 1.2719307847654406, |
|
"learning_rate": 1.873995927439555e-05, |
|
"loss": 1.2318, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 1.0803159349159561, |
|
"learning_rate": 1.855261159783432e-05, |
|
"loss": 1.2422, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5341880341880342, |
|
"grad_norm": 1.1126726338850874, |
|
"learning_rate": 1.8353359071232954e-05, |
|
"loss": 1.2341, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 1.141289352623837, |
|
"learning_rate": 1.8142479045131956e-05, |
|
"loss": 1.2172, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 1.0891200836049277, |
|
"learning_rate": 1.7920265055027285e-05, |
|
"loss": 1.2147, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5982905982905983, |
|
"grad_norm": 0.9983723250381306, |
|
"learning_rate": 1.76870264127822e-05, |
|
"loss": 1.2087, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6196581196581197, |
|
"grad_norm": 1.0642854505735857, |
|
"learning_rate": 1.7443087776079068e-05, |
|
"loss": 1.1872, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 1.0890040938005288, |
|
"learning_rate": 1.7188788696510477e-05, |
|
"loss": 1.2431, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6623931623931624, |
|
"grad_norm": 1.0196805310002042, |
|
"learning_rate": 1.6924483146938756e-05, |
|
"loss": 1.2133, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6837606837606838, |
|
"grad_norm": 1.0422167588733457, |
|
"learning_rate": 1.665053902878167e-05, |
|
"loss": 1.2275, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7051282051282052, |
|
"grad_norm": 1.0099548152705635, |
|
"learning_rate": 1.6367337659910223e-05, |
|
"loss": 1.1997, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7264957264957265, |
|
"grad_norm": 1.034450507226083, |
|
"learning_rate": 1.607527324387137e-05, |
|
"loss": 1.1952, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7478632478632479, |
|
"grad_norm": 1.01064689098683, |
|
"learning_rate": 1.5774752321174428e-05, |
|
"loss": 1.2142, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 1.2440710808413185, |
|
"learning_rate": 1.5466193203405017e-05, |
|
"loss": 1.2272, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7905982905982906, |
|
"grad_norm": 0.9538688514209636, |
|
"learning_rate": 1.5150025390954153e-05, |
|
"loss": 1.1797, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.811965811965812, |
|
"grad_norm": 1.0457627221644799, |
|
"learning_rate": 1.4826688975173085e-05, |
|
"loss": 1.1834, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 1.0494277738904705, |
|
"learning_rate": 1.4496634025785938e-05, |
|
"loss": 1.2098, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.8547008547008547, |
|
"grad_norm": 0.979634991944023, |
|
"learning_rate": 1.4160319964412943e-05, |
|
"loss": 1.1936, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8760683760683761, |
|
"grad_norm": 1.0707731906559956, |
|
"learning_rate": 1.3818214925076226e-05, |
|
"loss": 1.1934, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 0.9902950125875686, |
|
"learning_rate": 1.3470795102578358e-05, |
|
"loss": 1.1875, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9188034188034188, |
|
"grad_norm": 1.1906219730782999, |
|
"learning_rate": 1.3118544089660635e-05, |
|
"loss": 1.1828, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.9401709401709402, |
|
"grad_norm": 1.029497385097088, |
|
"learning_rate": 1.2761952203863759e-05, |
|
"loss": 1.2032, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 1.0145349544318898, |
|
"learning_rate": 1.2401515805027924e-05, |
|
"loss": 1.1918, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9829059829059829, |
|
"grad_norm": 0.9338647888184574, |
|
"learning_rate": 1.2037736604382279e-05, |
|
"loss": 1.1667, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.1897046566009521, |
|
"eval_runtime": 4.2537, |
|
"eval_samples_per_second": 34.323, |
|
"eval_steps_per_second": 0.705, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.0042735042735043, |
|
"grad_norm": 2.061617781988495, |
|
"learning_rate": 1.1671120966185486e-05, |
|
"loss": 1.1272, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.0256410256410255, |
|
"grad_norm": 1.5757999937786025, |
|
"learning_rate": 1.1302179202889505e-05, |
|
"loss": 0.8684, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.047008547008547, |
|
"grad_norm": 1.162411965496565, |
|
"learning_rate": 1.0931424864807624e-05, |
|
"loss": 0.8433, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.0683760683760684, |
|
"grad_norm": 1.083844459735895, |
|
"learning_rate": 1.0559374025275597e-05, |
|
"loss": 0.8588, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0897435897435896, |
|
"grad_norm": 1.0166749425497479, |
|
"learning_rate": 1.0186544562300766e-05, |
|
"loss": 0.8936, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 1.0711811120340167, |
|
"learning_rate": 9.813455437699238e-06, |
|
"loss": 0.8633, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.1324786324786325, |
|
"grad_norm": 1.0038627368222364, |
|
"learning_rate": 9.440625974724408e-06, |
|
"loss": 0.8821, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 1.0431997146297058, |
|
"learning_rate": 9.068575135192377e-06, |
|
"loss": 0.8856, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.1752136752136753, |
|
"grad_norm": 1.4331559261347666, |
|
"learning_rate": 8.697820797110499e-06, |
|
"loss": 0.8645, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.1965811965811965, |
|
"grad_norm": 1.0197402946112883, |
|
"learning_rate": 8.328879033814516e-06, |
|
"loss": 0.8512, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.217948717948718, |
|
"grad_norm": 1.01821032251223, |
|
"learning_rate": 7.962263395617724e-06, |
|
"loss": 0.8661, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.2393162393162394, |
|
"grad_norm": 1.0267748605334936, |
|
"learning_rate": 7.598484194972076e-06, |
|
"loss": 0.8601, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.2606837606837606, |
|
"grad_norm": 1.0208240145129162, |
|
"learning_rate": 7.238047796136247e-06, |
|
"loss": 0.8627, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.282051282051282, |
|
"grad_norm": 1.003007647188531, |
|
"learning_rate": 6.881455910339369e-06, |
|
"loss": 0.8463, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.3034188034188035, |
|
"grad_norm": 1.0588099942878915, |
|
"learning_rate": 6.529204897421644e-06, |
|
"loss": 0.8591, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.3247863247863247, |
|
"grad_norm": 0.9819416599015215, |
|
"learning_rate": 6.181785074923778e-06, |
|
"loss": 0.863, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.3461538461538463, |
|
"grad_norm": 0.9892062975951394, |
|
"learning_rate": 5.839680035587061e-06, |
|
"loss": 0.8594, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.3675213675213675, |
|
"grad_norm": 0.9795643152120956, |
|
"learning_rate": 5.503365974214059e-06, |
|
"loss": 0.8613, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.3888888888888888, |
|
"grad_norm": 0.9542734758696676, |
|
"learning_rate": 5.173311024826916e-06, |
|
"loss": 0.866, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.4102564102564101, |
|
"grad_norm": 0.9352942170714407, |
|
"learning_rate": 4.849974609045849e-06, |
|
"loss": 0.8469, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.4316239316239316, |
|
"grad_norm": 0.943859757278146, |
|
"learning_rate": 4.533806796594989e-06, |
|
"loss": 0.8729, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.452991452991453, |
|
"grad_norm": 1.058117869970705, |
|
"learning_rate": 4.2252476788255735e-06, |
|
"loss": 0.8614, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.4743589743589745, |
|
"grad_norm": 1.007136340214763, |
|
"learning_rate": 3.924726756128632e-06, |
|
"loss": 0.8314, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.4957264957264957, |
|
"grad_norm": 1.0001072453743785, |
|
"learning_rate": 3.6326623400897797e-06, |
|
"loss": 0.8263, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.517094017094017, |
|
"grad_norm": 11.67014141174618, |
|
"learning_rate": 3.3494609712183323e-06, |
|
"loss": 0.9014, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.9396806102918359, |
|
"learning_rate": 3.0755168530612444e-06, |
|
"loss": 0.8294, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.5598290598290598, |
|
"grad_norm": 0.9899965091834676, |
|
"learning_rate": 2.8112113034895273e-06, |
|
"loss": 0.8451, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.5811965811965814, |
|
"grad_norm": 1.0046619245072979, |
|
"learning_rate": 2.5569122239209366e-06, |
|
"loss": 0.8635, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.6025641025641026, |
|
"grad_norm": 0.9398590773976044, |
|
"learning_rate": 2.312973587217798e-06, |
|
"loss": 0.8468, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.623931623931624, |
|
"grad_norm": 0.9897283589814032, |
|
"learning_rate": 2.079734944972717e-06, |
|
"loss": 0.8615, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.6452991452991452, |
|
"grad_norm": 0.9311555025619238, |
|
"learning_rate": 1.8575209548680472e-06, |
|
"loss": 0.8616, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.9902450799068546, |
|
"learning_rate": 1.646640928767047e-06, |
|
"loss": 0.8401, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.688034188034188, |
|
"grad_norm": 0.9397904063208168, |
|
"learning_rate": 1.447388402165686e-06, |
|
"loss": 0.8287, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.7094017094017095, |
|
"grad_norm": 0.9334825682417511, |
|
"learning_rate": 1.2600407256044544e-06, |
|
"loss": 0.8492, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.7307692307692308, |
|
"grad_norm": 0.9901247743535753, |
|
"learning_rate": 1.084858678608922e-06, |
|
"loss": 0.8695, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.7521367521367521, |
|
"grad_norm": 0.9223418850643341, |
|
"learning_rate": 9.220861066963715e-07, |
|
"loss": 0.8476, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.7735042735042734, |
|
"grad_norm": 0.9421948230776865, |
|
"learning_rate": 7.719495819538325e-07, |
|
"loss": 0.8412, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.7948717948717947, |
|
"grad_norm": 0.9548113796103942, |
|
"learning_rate": 6.346580876599395e-07, |
|
"loss": 0.8633, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.8162393162393162, |
|
"grad_norm": 0.9286412348527092, |
|
"learning_rate": 5.104027273896239e-07, |
|
"loss": 0.8659, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.8376068376068377, |
|
"grad_norm": 0.9726444202093633, |
|
"learning_rate": 3.9935645900654906e-07, |
|
"loss": 0.8533, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.858974358974359, |
|
"grad_norm": 0.9864530246870579, |
|
"learning_rate": 3.016738539135566e-07, |
|
"loss": 0.8377, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.8803418803418803, |
|
"grad_norm": 0.9665706648338857, |
|
"learning_rate": 2.1749088189622848e-07, |
|
"loss": 0.8411, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.9017094017094016, |
|
"grad_norm": 0.8848822721798258, |
|
"learning_rate": 1.4692472185908635e-07, |
|
"loss": 0.8615, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.9643097064713656, |
|
"learning_rate": 9.00735987178214e-08, |
|
"loss": 0.8367, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.9444444444444444, |
|
"grad_norm": 0.9452651983857008, |
|
"learning_rate": 4.701664667464245e-08, |
|
"loss": 0.8577, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.965811965811966, |
|
"grad_norm": 0.9405976480323978, |
|
"learning_rate": 1.781379906703573e-08, |
|
"loss": 0.8518, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.9871794871794872, |
|
"grad_norm": 0.9339891238206331, |
|
"learning_rate": 2.5057049432519744e-09, |
|
"loss": 0.8465, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.2036917209625244, |
|
"eval_runtime": 4.1091, |
|
"eval_samples_per_second": 35.531, |
|
"eval_steps_per_second": 0.73, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 468, |
|
"total_flos": 97989678858240.0, |
|
"train_loss": 1.058635287050508, |
|
"train_runtime": 3412.8477, |
|
"train_samples_per_second": 8.744, |
|
"train_steps_per_second": 0.137 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 468, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 97989678858240.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|