|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 740, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.033783783783783786, |
|
"grad_norm": 8.29013495953546, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 0.5181, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06756756756756757, |
|
"grad_norm": 4.196123392105515, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 0.3567, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10135135135135136, |
|
"grad_norm": 3.2047846933733215, |
|
"learning_rate": 2.0270270270270273e-05, |
|
"loss": 0.3399, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 3.152224162446417, |
|
"learning_rate": 2.702702702702703e-05, |
|
"loss": 0.2552, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16891891891891891, |
|
"grad_norm": 3.649791800007489, |
|
"learning_rate": 3.3783783783783784e-05, |
|
"loss": 0.2705, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.20270270270270271, |
|
"grad_norm": 3.4730019079608607, |
|
"learning_rate": 4.0540540540540545e-05, |
|
"loss": 0.2647, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.23648648648648649, |
|
"grad_norm": 5.8436815551949195, |
|
"learning_rate": 4.72972972972973e-05, |
|
"loss": 0.3513, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 2.9019987565526995, |
|
"learning_rate": 4.9997978016429605e-05, |
|
"loss": 0.4888, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.30405405405405406, |
|
"grad_norm": 2.9397780048282027, |
|
"learning_rate": 4.9985622766211935e-05, |
|
"loss": 0.2713, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"grad_norm": 2.8421186668651917, |
|
"learning_rate": 4.996204175076325e-05, |
|
"loss": 0.2629, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3716216216216216, |
|
"grad_norm": 2.798031701878566, |
|
"learning_rate": 4.99272467427147e-05, |
|
"loss": 0.3088, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 2.70572378151596, |
|
"learning_rate": 4.9881255113192526e-05, |
|
"loss": 0.2351, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4391891891891892, |
|
"grad_norm": 2.6573100082008914, |
|
"learning_rate": 4.982408982314565e-05, |
|
"loss": 0.2722, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.47297297297297297, |
|
"grad_norm": 2.613947983515025, |
|
"learning_rate": 4.975577941188258e-05, |
|
"loss": 0.2561, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5067567567567568, |
|
"grad_norm": 2.4731517146590503, |
|
"learning_rate": 4.967635798282344e-05, |
|
"loss": 0.2895, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 2.024939244762624, |
|
"learning_rate": 4.958586518647407e-05, |
|
"loss": 0.2432, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5743243243243243, |
|
"grad_norm": 2.075950925366443, |
|
"learning_rate": 4.9484346200630855e-05, |
|
"loss": 0.272, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.6081081081081081, |
|
"grad_norm": 2.0824590269407257, |
|
"learning_rate": 4.937185170782607e-05, |
|
"loss": 0.2465, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6418918918918919, |
|
"grad_norm": 2.1834298841545765, |
|
"learning_rate": 4.9248437870025035e-05, |
|
"loss": 0.2561, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 3.0028526613842756, |
|
"learning_rate": 4.911416630058772e-05, |
|
"loss": 0.2112, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7094594594594594, |
|
"grad_norm": 6.923011878473891, |
|
"learning_rate": 4.896910403350873e-05, |
|
"loss": 0.2703, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.7432432432432432, |
|
"grad_norm": 1.5341551012824088, |
|
"learning_rate": 4.88133234899512e-05, |
|
"loss": 0.2445, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.777027027027027, |
|
"grad_norm": 1.9243858701641752, |
|
"learning_rate": 4.864690244209105e-05, |
|
"loss": 0.208, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 1.6400221556474566, |
|
"learning_rate": 4.8469923974289874e-05, |
|
"loss": 0.2879, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8445945945945946, |
|
"grad_norm": 2.7494322371602946, |
|
"learning_rate": 4.828247644161577e-05, |
|
"loss": 0.2482, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8783783783783784, |
|
"grad_norm": 1.7599344699077832, |
|
"learning_rate": 4.808465342573274e-05, |
|
"loss": 0.2587, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.9121621621621622, |
|
"grad_norm": 1.6037855553499427, |
|
"learning_rate": 4.787655368818087e-05, |
|
"loss": 0.2024, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 1.3955654859145117, |
|
"learning_rate": 4.765828112107034e-05, |
|
"loss": 0.2557, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9797297297297297, |
|
"grad_norm": 1.288730609348733, |
|
"learning_rate": 4.742994469521421e-05, |
|
"loss": 0.2232, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.0135135135135136, |
|
"grad_norm": 1.7061202812270857, |
|
"learning_rate": 4.719165840572557e-05, |
|
"loss": 0.2171, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0472972972972974, |
|
"grad_norm": 2.1625112154042254, |
|
"learning_rate": 4.694354121510644e-05, |
|
"loss": 0.1566, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 2.211819043240105, |
|
"learning_rate": 4.668571699385668e-05, |
|
"loss": 0.1689, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.114864864864865, |
|
"grad_norm": 1.4126203201911418, |
|
"learning_rate": 4.641831445863265e-05, |
|
"loss": 0.1716, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.1486486486486487, |
|
"grad_norm": 2.0610795868124194, |
|
"learning_rate": 4.614146710798645e-05, |
|
"loss": 0.1605, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1824324324324325, |
|
"grad_norm": 1.7608279284054584, |
|
"learning_rate": 4.585531315571788e-05, |
|
"loss": 0.1594, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.2162162162162162, |
|
"grad_norm": 1.5806659537041632, |
|
"learning_rate": 4.555999546187229e-05, |
|
"loss": 0.1484, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.5935232233887906, |
|
"learning_rate": 4.5255661461418854e-05, |
|
"loss": 0.1383, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2837837837837838, |
|
"grad_norm": 1.609634736806882, |
|
"learning_rate": 4.4942463090644896e-05, |
|
"loss": 0.1547, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.3175675675675675, |
|
"grad_norm": 1.588101249847805, |
|
"learning_rate": 4.462055671130289e-05, |
|
"loss": 0.1482, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.3513513513513513, |
|
"grad_norm": 1.5585026200892942, |
|
"learning_rate": 4.4290103032548094e-05, |
|
"loss": 0.146, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.385135135135135, |
|
"grad_norm": 1.1467440704851377, |
|
"learning_rate": 4.395126703070589e-05, |
|
"loss": 0.1446, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.4189189189189189, |
|
"grad_norm": 0.9207690008726266, |
|
"learning_rate": 4.360421786690862e-05, |
|
"loss": 0.151, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.4527027027027026, |
|
"grad_norm": 1.1893240387302906, |
|
"learning_rate": 4.324912880264326e-05, |
|
"loss": 0.1451, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.4864864864864864, |
|
"grad_norm": 1.1348088575018935, |
|
"learning_rate": 4.288617711325207e-05, |
|
"loss": 0.133, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.5202702702702702, |
|
"grad_norm": 1.2969069765004013, |
|
"learning_rate": 4.251554399942928e-05, |
|
"loss": 0.1482, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.554054054054054, |
|
"grad_norm": 1.1549621334885833, |
|
"learning_rate": 4.21374144967581e-05, |
|
"loss": 0.1589, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.5878378378378377, |
|
"grad_norm": 1.644137743103509, |
|
"learning_rate": 4.1751977383333224e-05, |
|
"loss": 0.1407, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 1.3525975163496182, |
|
"learning_rate": 4.1359425085514906e-05, |
|
"loss": 0.1363, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.6554054054054053, |
|
"grad_norm": 2.0901857582974217, |
|
"learning_rate": 4.095995358186162e-05, |
|
"loss": 0.1772, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.689189189189189, |
|
"grad_norm": 1.6270221012827155, |
|
"learning_rate": 4.055376230528936e-05, |
|
"loss": 0.1387, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.722972972972973, |
|
"grad_norm": 1.430683434287028, |
|
"learning_rate": 4.0141054043506406e-05, |
|
"loss": 0.1655, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.7567567567567568, |
|
"grad_norm": 0.8114050618763757, |
|
"learning_rate": 3.972203483777315e-05, |
|
"loss": 0.1366, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.7905405405405406, |
|
"grad_norm": 1.269223528535312, |
|
"learning_rate": 3.929691388003772e-05, |
|
"loss": 0.1485, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.8243243243243243, |
|
"grad_norm": 1.13503713932199, |
|
"learning_rate": 3.886590340849852e-05, |
|
"loss": 0.1197, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.8581081081081081, |
|
"grad_norm": 0.9877113608753568, |
|
"learning_rate": 3.842921860164607e-05, |
|
"loss": 0.1334, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.8918918918918919, |
|
"grad_norm": 0.8713265904231167, |
|
"learning_rate": 3.798707747083694e-05, |
|
"loss": 0.1334, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.9256756756756757, |
|
"grad_norm": 0.8661557486157838, |
|
"learning_rate": 3.753970075145322e-05, |
|
"loss": 0.1429, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.9594594594594594, |
|
"grad_norm": 1.219122887579692, |
|
"learning_rate": 3.7087311792702265e-05, |
|
"loss": 0.1356, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.9932432432432432, |
|
"grad_norm": 1.1038755273323901, |
|
"learning_rate": 3.663013644611139e-05, |
|
"loss": 0.1349, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.027027027027027, |
|
"grad_norm": 0.7179183269742409, |
|
"learning_rate": 3.616840295277328e-05, |
|
"loss": 0.0975, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.060810810810811, |
|
"grad_norm": 0.7521542943209245, |
|
"learning_rate": 3.5702341829398525e-05, |
|
"loss": 0.096, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.0945945945945947, |
|
"grad_norm": 0.7622848192686278, |
|
"learning_rate": 3.523218575323198e-05, |
|
"loss": 0.0947, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.1283783783783785, |
|
"grad_norm": 1.25703408706443, |
|
"learning_rate": 3.475816944589058e-05, |
|
"loss": 0.0937, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.1621621621621623, |
|
"grad_norm": 0.910003923674188, |
|
"learning_rate": 3.4280529556180404e-05, |
|
"loss": 0.0998, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.195945945945946, |
|
"grad_norm": 0.6770052864732585, |
|
"learning_rate": 3.379950454195172e-05, |
|
"loss": 0.0714, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.22972972972973, |
|
"grad_norm": 0.9501804841017806, |
|
"learning_rate": 3.331533455105084e-05, |
|
"loss": 0.0856, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.2635135135135136, |
|
"grad_norm": 0.9502759678893843, |
|
"learning_rate": 3.2828261301428206e-05, |
|
"loss": 0.0996, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.2972972972972974, |
|
"grad_norm": 1.40596962540061, |
|
"learning_rate": 3.23385279604627e-05, |
|
"loss": 0.0691, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.331081081081081, |
|
"grad_norm": 0.850391778909355, |
|
"learning_rate": 3.18463790235623e-05, |
|
"loss": 0.0828, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.364864864864865, |
|
"grad_norm": 0.9046270921885601, |
|
"learning_rate": 3.135206019210167e-05, |
|
"loss": 0.0829, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.3986486486486487, |
|
"grad_norm": 1.344251528726258, |
|
"learning_rate": 3.085581825075782e-05, |
|
"loss": 0.0761, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.4324324324324325, |
|
"grad_norm": 1.3818381192158777, |
|
"learning_rate": 3.0357900944304774e-05, |
|
"loss": 0.0912, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.4662162162162162, |
|
"grad_norm": 0.8834881506524517, |
|
"learning_rate": 2.9858556853929048e-05, |
|
"loss": 0.0843, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.9107496260787339, |
|
"learning_rate": 2.9358035273127483e-05, |
|
"loss": 0.0825, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.5337837837837838, |
|
"grad_norm": 0.5784107872034662, |
|
"learning_rate": 2.8856586083249487e-05, |
|
"loss": 0.0721, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.5675675675675675, |
|
"grad_norm": 0.7773606211954556, |
|
"learning_rate": 2.83544596287458e-05, |
|
"loss": 0.0874, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.6013513513513513, |
|
"grad_norm": 0.6678593340764952, |
|
"learning_rate": 2.785190659218604e-05, |
|
"loss": 0.0859, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.635135135135135, |
|
"grad_norm": 0.5939162753157207, |
|
"learning_rate": 2.7349177869107462e-05, |
|
"loss": 0.0809, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.668918918918919, |
|
"grad_norm": 0.7283077606960382, |
|
"learning_rate": 2.684652444275741e-05, |
|
"loss": 0.0733, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.7027027027027026, |
|
"grad_norm": 0.6502593242253658, |
|
"learning_rate": 2.634419725879193e-05, |
|
"loss": 0.0726, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.7364864864864864, |
|
"grad_norm": 0.6122029804466594, |
|
"learning_rate": 2.58424470999932e-05, |
|
"loss": 0.088, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.77027027027027, |
|
"grad_norm": 0.799053842544273, |
|
"learning_rate": 2.534152446106825e-05, |
|
"loss": 0.0729, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.804054054054054, |
|
"grad_norm": 0.7095023200640361, |
|
"learning_rate": 2.4841679423591523e-05, |
|
"loss": 0.0668, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.8378378378378377, |
|
"grad_norm": 0.5870222541144963, |
|
"learning_rate": 2.4343161531153647e-05, |
|
"loss": 0.0811, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.8716216216216215, |
|
"grad_norm": 0.7486446531081316, |
|
"learning_rate": 2.3846219664778824e-05, |
|
"loss": 0.0743, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.9054054054054053, |
|
"grad_norm": 0.626979823529596, |
|
"learning_rate": 2.3351101918672985e-05, |
|
"loss": 0.0827, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.939189189189189, |
|
"grad_norm": 0.7519449237853378, |
|
"learning_rate": 2.2858055476364822e-05, |
|
"loss": 0.0626, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.972972972972973, |
|
"grad_norm": 0.6067887360251734, |
|
"learning_rate": 2.2367326487301317e-05, |
|
"loss": 0.065, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.0067567567567566, |
|
"grad_norm": 0.43015841504665747, |
|
"learning_rate": 2.1879159943959686e-05, |
|
"loss": 0.0588, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.0405405405405403, |
|
"grad_norm": 0.5166996425697965, |
|
"learning_rate": 2.139379955953686e-05, |
|
"loss": 0.0403, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.074324324324324, |
|
"grad_norm": 1.533221487720139, |
|
"learning_rate": 2.0911487646277623e-05, |
|
"loss": 0.045, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.108108108108108, |
|
"grad_norm": 1.3591414304325815, |
|
"learning_rate": 2.0432464994502203e-05, |
|
"loss": 0.0428, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.141891891891892, |
|
"grad_norm": 0.4695191476769462, |
|
"learning_rate": 1.995697075239365e-05, |
|
"loss": 0.0397, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.175675675675676, |
|
"grad_norm": 0.7689436070838513, |
|
"learning_rate": 1.9485242306605028e-05, |
|
"loss": 0.0388, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.2094594594594597, |
|
"grad_norm": 0.5707450348910151, |
|
"learning_rate": 1.9017515163746058e-05, |
|
"loss": 0.0398, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.2432432432432434, |
|
"grad_norm": 0.6311825777201817, |
|
"learning_rate": 1.855402283280836e-05, |
|
"loss": 0.0397, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.277027027027027, |
|
"grad_norm": 0.6341709550171472, |
|
"learning_rate": 1.8094996708587958e-05, |
|
"loss": 0.0366, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.310810810810811, |
|
"grad_norm": 0.37977836752911515, |
|
"learning_rate": 1.7640665956163306e-05, |
|
"loss": 0.0364, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.3445945945945947, |
|
"grad_norm": 0.3786127389464406, |
|
"learning_rate": 1.719125739648648e-05, |
|
"loss": 0.0316, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.3783783783783785, |
|
"grad_norm": 0.4003900559035512, |
|
"learning_rate": 1.6746995393144668e-05, |
|
"loss": 0.0348, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.4121621621621623, |
|
"grad_norm": 0.7612366325017373, |
|
"learning_rate": 1.6308101740348433e-05, |
|
"loss": 0.0373, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.445945945945946, |
|
"grad_norm": 0.5347572825256244, |
|
"learning_rate": 1.5874795552202773e-05, |
|
"loss": 0.0409, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.47972972972973, |
|
"grad_norm": 0.3804098138882223, |
|
"learning_rate": 1.5447293153316163e-05, |
|
"loss": 0.0288, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.5135135135135136, |
|
"grad_norm": 0.7846805185343385, |
|
"learning_rate": 1.5025807970802252e-05, |
|
"loss": 0.0367, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.5472972972972974, |
|
"grad_norm": 0.6955590006040003, |
|
"learning_rate": 1.4610550427728103e-05, |
|
"loss": 0.0355, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.581081081081081, |
|
"grad_norm": 0.6081379754765148, |
|
"learning_rate": 1.4201727838062181e-05, |
|
"loss": 0.0384, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.614864864864865, |
|
"grad_norm": 0.6589981768125784, |
|
"learning_rate": 1.3799544303174514e-05, |
|
"loss": 0.0454, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.6486486486486487, |
|
"grad_norm": 0.6007971088374728, |
|
"learning_rate": 1.3404200609940754e-05, |
|
"loss": 0.0397, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.6824324324324325, |
|
"grad_norm": 0.7325992049485736, |
|
"learning_rate": 1.3015894130500977e-05, |
|
"loss": 0.0374, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.7162162162162162, |
|
"grad_norm": 0.37094326467204003, |
|
"learning_rate": 1.2634818723723174e-05, |
|
"loss": 0.0318, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.9146146847365886, |
|
"learning_rate": 1.2261164638420832e-05, |
|
"loss": 0.0328, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.7837837837837838, |
|
"grad_norm": 0.5648038565491271, |
|
"learning_rate": 1.1895118418372734e-05, |
|
"loss": 0.0356, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.8175675675675675, |
|
"grad_norm": 0.37538544226428106, |
|
"learning_rate": 1.1536862809192518e-05, |
|
"loss": 0.0398, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.8513513513513513, |
|
"grad_norm": 0.4038884653715223, |
|
"learning_rate": 1.1186576667094342e-05, |
|
"loss": 0.0271, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.885135135135135, |
|
"grad_norm": 0.6627132587861069, |
|
"learning_rate": 1.0844434869600428e-05, |
|
"loss": 0.0338, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.918918918918919, |
|
"grad_norm": 0.7041617251510983, |
|
"learning_rate": 1.0510608228234848e-05, |
|
"loss": 0.0391, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.9527027027027026, |
|
"grad_norm": 0.8131142697010945, |
|
"learning_rate": 1.0185263403247256e-05, |
|
"loss": 0.0342, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.9864864864864864, |
|
"grad_norm": 0.5920567955107612, |
|
"learning_rate": 9.868562820409103e-06, |
|
"loss": 0.0337, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.02027027027027, |
|
"grad_norm": 0.3120828177149261, |
|
"learning_rate": 9.560664589923895e-06, |
|
"loss": 0.0226, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.054054054054054, |
|
"grad_norm": 0.2900775148851475, |
|
"learning_rate": 9.261722427491953e-06, |
|
"loss": 0.0147, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.087837837837838, |
|
"grad_norm": 0.22784089723926296, |
|
"learning_rate": 8.971885577569058e-06, |
|
"loss": 0.0122, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.121621621621622, |
|
"grad_norm": 0.2693260557173335, |
|
"learning_rate": 8.691298738857432e-06, |
|
"loss": 0.0119, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.155405405405405, |
|
"grad_norm": 0.21618402703820352, |
|
"learning_rate": 8.420101992066028e-06, |
|
"loss": 0.0156, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.1891891891891895, |
|
"grad_norm": 0.24421224725586618, |
|
"learning_rate": 8.158430729976372e-06, |
|
"loss": 0.0119, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.222972972972973, |
|
"grad_norm": 0.33320132947770426, |
|
"learning_rate": 7.906415589848834e-06, |
|
"loss": 0.0146, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.256756756756757, |
|
"grad_norm": 0.33015864806759715, |
|
"learning_rate": 7.664182388203037e-06, |
|
"loss": 0.0145, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.29054054054054, |
|
"grad_norm": 0.36028113378267884, |
|
"learning_rate": 7.4318520580049444e-06, |
|
"loss": 0.015, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.324324324324325, |
|
"grad_norm": 0.26126678042031565, |
|
"learning_rate": 7.209540588292083e-06, |
|
"loss": 0.0134, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.358108108108108, |
|
"grad_norm": 0.549701638585684, |
|
"learning_rate": 6.9973589662669455e-06, |
|
"loss": 0.0136, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.391891891891892, |
|
"grad_norm": 0.30168954386975444, |
|
"learning_rate": 6.7954131218875404e-06, |
|
"loss": 0.0151, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.425675675675675, |
|
"grad_norm": 0.43179940752419366, |
|
"learning_rate": 6.603803874982687e-06, |
|
"loss": 0.017, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.45945945945946, |
|
"grad_norm": 0.26869880474008145, |
|
"learning_rate": 6.422626884918559e-06, |
|
"loss": 0.0131, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.493243243243243, |
|
"grad_norm": 0.4055684732783103, |
|
"learning_rate": 6.2519726028415145e-06, |
|
"loss": 0.017, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.527027027027027, |
|
"grad_norm": 0.334734546863203, |
|
"learning_rate": 6.091926226521089e-06, |
|
"loss": 0.0135, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.5608108108108105, |
|
"grad_norm": 0.5546791873819881, |
|
"learning_rate": 5.942567657815696e-06, |
|
"loss": 0.0138, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.594594594594595, |
|
"grad_norm": 0.4128562858606657, |
|
"learning_rate": 5.8039714627822754e-06, |
|
"loss": 0.0123, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.628378378378378, |
|
"grad_norm": 0.33985067404728464, |
|
"learning_rate": 5.676206834449797e-06, |
|
"loss": 0.014, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.662162162162162, |
|
"grad_norm": 0.3646278429623203, |
|
"learning_rate": 5.55933755827518e-06, |
|
"loss": 0.0105, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.695945945945946, |
|
"grad_norm": 0.2663115053288746, |
|
"learning_rate": 5.453421980298957e-06, |
|
"loss": 0.012, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.72972972972973, |
|
"grad_norm": 0.30374384905412244, |
|
"learning_rate": 5.358512978016445e-06, |
|
"loss": 0.0108, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.763513513513513, |
|
"grad_norm": 0.47429438189169476, |
|
"learning_rate": 5.27465793397911e-06, |
|
"loss": 0.0143, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.797297297297297, |
|
"grad_norm": 0.2683488363965782, |
|
"learning_rate": 5.201898712139201e-06, |
|
"loss": 0.0121, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.831081081081081, |
|
"grad_norm": 0.4245927469089233, |
|
"learning_rate": 5.1402716369495194e-06, |
|
"loss": 0.0125, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.864864864864865, |
|
"grad_norm": 0.40748023019876795, |
|
"learning_rate": 5.089807475228711e-06, |
|
"loss": 0.0151, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.898648648648649, |
|
"grad_norm": 0.2454774836937338, |
|
"learning_rate": 5.050531420801205e-06, |
|
"loss": 0.0112, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.9324324324324325, |
|
"grad_norm": 0.343369387002044, |
|
"learning_rate": 5.022463081919386e-06, |
|
"loss": 0.0126, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.966216216216216, |
|
"grad_norm": 0.28684110351791725, |
|
"learning_rate": 5.005616471474332e-06, |
|
"loss": 0.0116, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.3365561300076367, |
|
"learning_rate": 5e-06, |
|
"loss": 0.013, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 740, |
|
"total_flos": 519543052959744.0, |
|
"train_loss": 0.11201934994676628, |
|
"train_runtime": 13636.3107, |
|
"train_samples_per_second": 3.469, |
|
"train_steps_per_second": 0.054 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 740, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 519543052959744.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|