|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 740, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.033783783783783786, |
|
"grad_norm": 11.91859090228093, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 0.9462, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06756756756756757, |
|
"grad_norm": 4.53680244078793, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 0.4666, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10135135135135136, |
|
"grad_norm": 3.573903925081475, |
|
"learning_rate": 2.0270270270270273e-05, |
|
"loss": 0.3568, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 3.832108029582817, |
|
"learning_rate": 2.702702702702703e-05, |
|
"loss": 0.264, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16891891891891891, |
|
"grad_norm": 2.7780428812660225, |
|
"learning_rate": 3.3783783783783784e-05, |
|
"loss": 0.2717, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.20270270270270271, |
|
"grad_norm": 3.2015639656457204, |
|
"learning_rate": 4.0540540540540545e-05, |
|
"loss": 0.2704, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.23648648648648649, |
|
"grad_norm": 3.8305742015126354, |
|
"learning_rate": 4.72972972972973e-05, |
|
"loss": 0.2625, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 2.4187710692208926, |
|
"learning_rate": 4.9997978016429605e-05, |
|
"loss": 0.2675, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.30405405405405406, |
|
"grad_norm": 2.635487937809038, |
|
"learning_rate": 4.9985622766211935e-05, |
|
"loss": 0.2499, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"grad_norm": 2.316225114816826, |
|
"learning_rate": 4.996204175076325e-05, |
|
"loss": 0.2621, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3716216216216216, |
|
"grad_norm": 2.146594431614266, |
|
"learning_rate": 4.99272467427147e-05, |
|
"loss": 0.2884, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 2.398859815430172, |
|
"learning_rate": 4.9881255113192526e-05, |
|
"loss": 0.2398, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4391891891891892, |
|
"grad_norm": 2.954151145902332, |
|
"learning_rate": 4.982408982314565e-05, |
|
"loss": 0.2543, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.47297297297297297, |
|
"grad_norm": 1.7929864319507005, |
|
"learning_rate": 4.975577941188258e-05, |
|
"loss": 0.2502, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5067567567567568, |
|
"grad_norm": 1.840568109556661, |
|
"learning_rate": 4.967635798282344e-05, |
|
"loss": 0.277, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 1.771348286216397, |
|
"learning_rate": 4.958586518647407e-05, |
|
"loss": 0.2518, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5743243243243243, |
|
"grad_norm": 2.623290643440281, |
|
"learning_rate": 4.9484346200630855e-05, |
|
"loss": 0.269, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.6081081081081081, |
|
"grad_norm": 2.6715602356902712, |
|
"learning_rate": 4.937185170782607e-05, |
|
"loss": 0.2469, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6418918918918919, |
|
"grad_norm": 1.5618566633614335, |
|
"learning_rate": 4.9248437870025035e-05, |
|
"loss": 0.2389, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 1.6043766275036224, |
|
"learning_rate": 4.911416630058772e-05, |
|
"loss": 0.2034, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7094594594594594, |
|
"grad_norm": 1.745799222586184, |
|
"learning_rate": 4.896910403350873e-05, |
|
"loss": 0.2718, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.7432432432432432, |
|
"grad_norm": 1.6346813492093817, |
|
"learning_rate": 4.88133234899512e-05, |
|
"loss": 0.2441, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.777027027027027, |
|
"grad_norm": 2.0436191339698486, |
|
"learning_rate": 4.864690244209105e-05, |
|
"loss": 0.2135, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 1.4009046058211432, |
|
"learning_rate": 4.8469923974289874e-05, |
|
"loss": 0.2855, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8445945945945946, |
|
"grad_norm": 2.1420433385829907, |
|
"learning_rate": 4.828247644161577e-05, |
|
"loss": 0.2443, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8783783783783784, |
|
"grad_norm": 2.029711117623661, |
|
"learning_rate": 4.808465342573274e-05, |
|
"loss": 0.2413, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.9121621621621622, |
|
"grad_norm": 2.216487452899117, |
|
"learning_rate": 4.787655368818087e-05, |
|
"loss": 0.2136, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 1.9397316210901858, |
|
"learning_rate": 4.765828112107034e-05, |
|
"loss": 0.2541, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9797297297297297, |
|
"grad_norm": 1.2829673381620292, |
|
"learning_rate": 4.742994469521421e-05, |
|
"loss": 0.224, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.0135135135135136, |
|
"grad_norm": 1.8944274655064726, |
|
"learning_rate": 4.719165840572557e-05, |
|
"loss": 0.2169, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0472972972972974, |
|
"grad_norm": 1.7790710619051624, |
|
"learning_rate": 4.694354121510644e-05, |
|
"loss": 0.1452, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 1.5418464625119979, |
|
"learning_rate": 4.668571699385668e-05, |
|
"loss": 0.1531, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.114864864864865, |
|
"grad_norm": 1.5780966926016784, |
|
"learning_rate": 4.641831445863265e-05, |
|
"loss": 0.1694, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.1486486486486487, |
|
"grad_norm": 4.6428360428861515, |
|
"learning_rate": 4.614146710798645e-05, |
|
"loss": 0.1619, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1824324324324325, |
|
"grad_norm": 1.4418340555320202, |
|
"learning_rate": 4.585531315571788e-05, |
|
"loss": 0.1611, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.2162162162162162, |
|
"grad_norm": 2.0529270411153333, |
|
"learning_rate": 4.555999546187229e-05, |
|
"loss": 0.1383, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.1879187127096535, |
|
"learning_rate": 4.5255661461418854e-05, |
|
"loss": 0.1448, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2837837837837838, |
|
"grad_norm": 1.0143016739135113, |
|
"learning_rate": 4.4942463090644896e-05, |
|
"loss": 0.1465, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.3175675675675675, |
|
"grad_norm": 2.090466049666836, |
|
"learning_rate": 4.462055671130289e-05, |
|
"loss": 0.1615, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.3513513513513513, |
|
"grad_norm": 1.5608930671166168, |
|
"learning_rate": 4.4290103032548094e-05, |
|
"loss": 0.1412, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.385135135135135, |
|
"grad_norm": 1.2368408671304418, |
|
"learning_rate": 4.395126703070589e-05, |
|
"loss": 0.1571, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.4189189189189189, |
|
"grad_norm": 1.0805073107430125, |
|
"learning_rate": 4.360421786690862e-05, |
|
"loss": 0.1518, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.4527027027027026, |
|
"grad_norm": 1.9178479550142697, |
|
"learning_rate": 4.324912880264326e-05, |
|
"loss": 0.1476, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.4864864864864864, |
|
"grad_norm": 1.2390954008536137, |
|
"learning_rate": 4.288617711325207e-05, |
|
"loss": 0.1436, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.5202702702702702, |
|
"grad_norm": 1.2743622522980955, |
|
"learning_rate": 4.251554399942928e-05, |
|
"loss": 0.1494, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.554054054054054, |
|
"grad_norm": 1.1175039450703055, |
|
"learning_rate": 4.21374144967581e-05, |
|
"loss": 0.1482, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.5878378378378377, |
|
"grad_norm": 0.8990095650984934, |
|
"learning_rate": 4.1751977383333224e-05, |
|
"loss": 0.1412, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 1.242740863487836, |
|
"learning_rate": 4.1359425085514906e-05, |
|
"loss": 0.1366, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.6554054054054053, |
|
"grad_norm": 1.1355786140547892, |
|
"learning_rate": 4.095995358186162e-05, |
|
"loss": 0.1667, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.689189189189189, |
|
"grad_norm": 1.1926029161081027, |
|
"learning_rate": 4.055376230528936e-05, |
|
"loss": 0.1308, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.722972972972973, |
|
"grad_norm": 1.0477428608330859, |
|
"learning_rate": 4.0141054043506406e-05, |
|
"loss": 0.1588, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.7567567567567568, |
|
"grad_norm": 2.434364212868205, |
|
"learning_rate": 3.972203483777315e-05, |
|
"loss": 0.1454, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.7905405405405406, |
|
"grad_norm": 1.025288482977718, |
|
"learning_rate": 3.929691388003772e-05, |
|
"loss": 0.1464, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.8243243243243243, |
|
"grad_norm": 1.0185166028149573, |
|
"learning_rate": 3.886590340849852e-05, |
|
"loss": 0.1228, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.8581081081081081, |
|
"grad_norm": 0.8097111792595093, |
|
"learning_rate": 3.842921860164607e-05, |
|
"loss": 0.13, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.8918918918918919, |
|
"grad_norm": 1.0056347069881086, |
|
"learning_rate": 3.798707747083694e-05, |
|
"loss": 0.1333, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.9256756756756757, |
|
"grad_norm": 1.0801697150500822, |
|
"learning_rate": 3.753970075145322e-05, |
|
"loss": 0.1323, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.9594594594594594, |
|
"grad_norm": 1.1000500869217074, |
|
"learning_rate": 3.7087311792702265e-05, |
|
"loss": 0.1337, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.9932432432432432, |
|
"grad_norm": 1.5122187810870562, |
|
"learning_rate": 3.663013644611139e-05, |
|
"loss": 0.1353, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.027027027027027, |
|
"grad_norm": 0.9578127795346132, |
|
"learning_rate": 3.616840295277328e-05, |
|
"loss": 0.0908, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.060810810810811, |
|
"grad_norm": 0.5934461784804044, |
|
"learning_rate": 3.5702341829398525e-05, |
|
"loss": 0.0851, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.0945945945945947, |
|
"grad_norm": 1.0725119181506966, |
|
"learning_rate": 3.523218575323198e-05, |
|
"loss": 0.0929, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.1283783783783785, |
|
"grad_norm": 0.7850220371401365, |
|
"learning_rate": 3.475816944589058e-05, |
|
"loss": 0.0911, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.1621621621621623, |
|
"grad_norm": 0.8527224202602178, |
|
"learning_rate": 3.4280529556180404e-05, |
|
"loss": 0.0929, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.195945945945946, |
|
"grad_norm": 1.04326596490438, |
|
"learning_rate": 3.379950454195172e-05, |
|
"loss": 0.0758, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.22972972972973, |
|
"grad_norm": 1.0501264893771993, |
|
"learning_rate": 3.331533455105084e-05, |
|
"loss": 0.0818, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.2635135135135136, |
|
"grad_norm": 0.8205870434886383, |
|
"learning_rate": 3.2828261301428206e-05, |
|
"loss": 0.0977, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.2972972972972974, |
|
"grad_norm": 0.8849982112270377, |
|
"learning_rate": 3.23385279604627e-05, |
|
"loss": 0.0719, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.331081081081081, |
|
"grad_norm": 0.7183927930700003, |
|
"learning_rate": 3.18463790235623e-05, |
|
"loss": 0.0873, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.364864864864865, |
|
"grad_norm": 0.7716121512809517, |
|
"learning_rate": 3.135206019210167e-05, |
|
"loss": 0.0771, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.3986486486486487, |
|
"grad_norm": 1.139603479425331, |
|
"learning_rate": 3.085581825075782e-05, |
|
"loss": 0.0709, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.4324324324324325, |
|
"grad_norm": 1.4786085416010715, |
|
"learning_rate": 3.0357900944304774e-05, |
|
"loss": 0.0902, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.4662162162162162, |
|
"grad_norm": 0.9071145610807975, |
|
"learning_rate": 2.9858556853929048e-05, |
|
"loss": 0.0833, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.7033676015416719, |
|
"learning_rate": 2.9358035273127483e-05, |
|
"loss": 0.0713, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.5337837837837838, |
|
"grad_norm": 0.673070630957069, |
|
"learning_rate": 2.8856586083249487e-05, |
|
"loss": 0.0672, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.5675675675675675, |
|
"grad_norm": 0.8537399411767074, |
|
"learning_rate": 2.83544596287458e-05, |
|
"loss": 0.0859, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.6013513513513513, |
|
"grad_norm": 0.5796181912861205, |
|
"learning_rate": 2.785190659218604e-05, |
|
"loss": 0.0794, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.635135135135135, |
|
"grad_norm": 0.6511959977506814, |
|
"learning_rate": 2.7349177869107462e-05, |
|
"loss": 0.078, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.668918918918919, |
|
"grad_norm": 0.975876535380539, |
|
"learning_rate": 2.684652444275741e-05, |
|
"loss": 0.0646, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.7027027027027026, |
|
"grad_norm": 0.6915954278323423, |
|
"learning_rate": 2.634419725879193e-05, |
|
"loss": 0.0753, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.7364864864864864, |
|
"grad_norm": 0.6789310321110773, |
|
"learning_rate": 2.58424470999932e-05, |
|
"loss": 0.0825, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.77027027027027, |
|
"grad_norm": 0.8180023876519836, |
|
"learning_rate": 2.534152446106825e-05, |
|
"loss": 0.0744, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.804054054054054, |
|
"grad_norm": 0.7821847539221951, |
|
"learning_rate": 2.4841679423591523e-05, |
|
"loss": 0.0684, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.8378378378378377, |
|
"grad_norm": 0.6251388492073695, |
|
"learning_rate": 2.4343161531153647e-05, |
|
"loss": 0.0764, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.8716216216216215, |
|
"grad_norm": 0.5954138849180285, |
|
"learning_rate": 2.3846219664778824e-05, |
|
"loss": 0.0739, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.9054054054054053, |
|
"grad_norm": 0.7069008525263534, |
|
"learning_rate": 2.3351101918672985e-05, |
|
"loss": 0.0772, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.939189189189189, |
|
"grad_norm": 0.6296050119251395, |
|
"learning_rate": 2.2858055476364822e-05, |
|
"loss": 0.0675, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.972972972972973, |
|
"grad_norm": 0.6716148252050718, |
|
"learning_rate": 2.2367326487301317e-05, |
|
"loss": 0.0612, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.0067567567567566, |
|
"grad_norm": 0.41293433850786937, |
|
"learning_rate": 2.1879159943959686e-05, |
|
"loss": 0.0583, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.0405405405405403, |
|
"grad_norm": 0.48242208510460555, |
|
"learning_rate": 2.139379955953686e-05, |
|
"loss": 0.04, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.074324324324324, |
|
"grad_norm": 0.5002995507342688, |
|
"learning_rate": 2.0911487646277623e-05, |
|
"loss": 0.0379, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.108108108108108, |
|
"grad_norm": 0.7307976680525754, |
|
"learning_rate": 2.0432464994502203e-05, |
|
"loss": 0.0402, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.141891891891892, |
|
"grad_norm": 0.4456729161007252, |
|
"learning_rate": 1.995697075239365e-05, |
|
"loss": 0.0417, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.175675675675676, |
|
"grad_norm": 0.5552051849040736, |
|
"learning_rate": 1.9485242306605028e-05, |
|
"loss": 0.0368, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.2094594594594597, |
|
"grad_norm": 0.48465471615450373, |
|
"learning_rate": 1.9017515163746058e-05, |
|
"loss": 0.0398, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.2432432432432434, |
|
"grad_norm": 0.40973628373713084, |
|
"learning_rate": 1.855402283280836e-05, |
|
"loss": 0.0395, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.277027027027027, |
|
"grad_norm": 0.5006185178881579, |
|
"learning_rate": 1.8094996708587958e-05, |
|
"loss": 0.0334, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.310810810810811, |
|
"grad_norm": 0.5022161933000103, |
|
"learning_rate": 1.7640665956163306e-05, |
|
"loss": 0.0331, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.3445945945945947, |
|
"grad_norm": 0.45753568314378656, |
|
"learning_rate": 1.719125739648648e-05, |
|
"loss": 0.0317, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.3783783783783785, |
|
"grad_norm": 0.448289241118566, |
|
"learning_rate": 1.6746995393144668e-05, |
|
"loss": 0.0315, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.4121621621621623, |
|
"grad_norm": 0.8295893160466691, |
|
"learning_rate": 1.6308101740348433e-05, |
|
"loss": 0.0386, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.445945945945946, |
|
"grad_norm": 0.5295192301457442, |
|
"learning_rate": 1.5874795552202773e-05, |
|
"loss": 0.0388, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.47972972972973, |
|
"grad_norm": 0.5769268856553119, |
|
"learning_rate": 1.5447293153316163e-05, |
|
"loss": 0.0318, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.5135135135135136, |
|
"grad_norm": 1.7958739927680984, |
|
"learning_rate": 1.5025807970802252e-05, |
|
"loss": 0.0335, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.5472972972972974, |
|
"grad_norm": 0.5185543173255863, |
|
"learning_rate": 1.4610550427728103e-05, |
|
"loss": 0.0323, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.581081081081081, |
|
"grad_norm": 0.6094438977040642, |
|
"learning_rate": 1.4201727838062181e-05, |
|
"loss": 0.0382, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.614864864864865, |
|
"grad_norm": 0.9983111528521604, |
|
"learning_rate": 1.3799544303174514e-05, |
|
"loss": 0.043, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.6486486486486487, |
|
"grad_norm": 0.5800713261881625, |
|
"learning_rate": 1.3404200609940754e-05, |
|
"loss": 0.0357, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.6824324324324325, |
|
"grad_norm": 0.3822107351668231, |
|
"learning_rate": 1.3015894130500977e-05, |
|
"loss": 0.033, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.7162162162162162, |
|
"grad_norm": 0.5208532366640735, |
|
"learning_rate": 1.2634818723723174e-05, |
|
"loss": 0.0336, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.6114967345181951, |
|
"learning_rate": 1.2261164638420832e-05, |
|
"loss": 0.0311, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.7837837837837838, |
|
"grad_norm": 0.5437365394783563, |
|
"learning_rate": 1.1895118418372734e-05, |
|
"loss": 0.0354, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.8175675675675675, |
|
"grad_norm": 0.3780362895703916, |
|
"learning_rate": 1.1536862809192518e-05, |
|
"loss": 0.0346, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.8513513513513513, |
|
"grad_norm": 3.518297058582475, |
|
"learning_rate": 1.1186576667094342e-05, |
|
"loss": 0.0285, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.885135135135135, |
|
"grad_norm": 0.3660586029820935, |
|
"learning_rate": 1.0844434869600428e-05, |
|
"loss": 0.0312, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.918918918918919, |
|
"grad_norm": 0.4588190949336144, |
|
"learning_rate": 1.0510608228234848e-05, |
|
"loss": 0.0327, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.9527027027027026, |
|
"grad_norm": 0.46633602361920473, |
|
"learning_rate": 1.0185263403247256e-05, |
|
"loss": 0.0344, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.9864864864864864, |
|
"grad_norm": 0.48024481445354344, |
|
"learning_rate": 9.868562820409103e-06, |
|
"loss": 0.0312, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.02027027027027, |
|
"grad_norm": 0.3049063524706929, |
|
"learning_rate": 9.560664589923895e-06, |
|
"loss": 0.0236, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.054054054054054, |
|
"grad_norm": 0.36714803546431, |
|
"learning_rate": 9.261722427491953e-06, |
|
"loss": 0.0149, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.087837837837838, |
|
"grad_norm": 0.2893986636191623, |
|
"learning_rate": 8.971885577569058e-06, |
|
"loss": 0.0125, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.121621621621622, |
|
"grad_norm": 0.2225194619419826, |
|
"learning_rate": 8.691298738857432e-06, |
|
"loss": 0.0105, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.155405405405405, |
|
"grad_norm": 0.271632458626091, |
|
"learning_rate": 8.420101992066028e-06, |
|
"loss": 0.0132, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.1891891891891895, |
|
"grad_norm": 0.38853806983200195, |
|
"learning_rate": 8.158430729976372e-06, |
|
"loss": 0.0117, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.222972972972973, |
|
"grad_norm": 0.28121579810924724, |
|
"learning_rate": 7.906415589848834e-06, |
|
"loss": 0.0119, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.256756756756757, |
|
"grad_norm": 0.36950427724679225, |
|
"learning_rate": 7.664182388203037e-06, |
|
"loss": 0.0122, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.29054054054054, |
|
"grad_norm": 0.40982200614975045, |
|
"learning_rate": 7.4318520580049444e-06, |
|
"loss": 0.0144, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.324324324324325, |
|
"grad_norm": 0.30667616284588944, |
|
"learning_rate": 7.209540588292083e-06, |
|
"loss": 0.0149, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.358108108108108, |
|
"grad_norm": 0.27670270061009083, |
|
"learning_rate": 6.9973589662669455e-06, |
|
"loss": 0.0097, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.391891891891892, |
|
"grad_norm": 1.156032111625232, |
|
"learning_rate": 6.7954131218875404e-06, |
|
"loss": 0.0141, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.425675675675675, |
|
"grad_norm": 0.43940200076280117, |
|
"learning_rate": 6.603803874982687e-06, |
|
"loss": 0.0187, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.45945945945946, |
|
"grad_norm": 0.5834566768734173, |
|
"learning_rate": 6.422626884918559e-06, |
|
"loss": 0.0117, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.493243243243243, |
|
"grad_norm": 0.38705922881740046, |
|
"learning_rate": 6.2519726028415145e-06, |
|
"loss": 0.0138, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.527027027027027, |
|
"grad_norm": 0.28730538588311555, |
|
"learning_rate": 6.091926226521089e-06, |
|
"loss": 0.0127, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.5608108108108105, |
|
"grad_norm": 0.3749162518326673, |
|
"learning_rate": 5.942567657815696e-06, |
|
"loss": 0.012, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.594594594594595, |
|
"grad_norm": 0.2566159254010731, |
|
"learning_rate": 5.8039714627822754e-06, |
|
"loss": 0.0089, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.628378378378378, |
|
"grad_norm": 0.4339033109119829, |
|
"learning_rate": 5.676206834449797e-06, |
|
"loss": 0.0113, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 4.662162162162162, |
|
"grad_norm": 0.3802169399499887, |
|
"learning_rate": 5.55933755827518e-06, |
|
"loss": 0.0098, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.695945945945946, |
|
"grad_norm": 0.4733511291475961, |
|
"learning_rate": 5.453421980298957e-06, |
|
"loss": 0.0105, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.72972972972973, |
|
"grad_norm": 0.3236015566148624, |
|
"learning_rate": 5.358512978016445e-06, |
|
"loss": 0.0096, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.763513513513513, |
|
"grad_norm": 0.47392413673934136, |
|
"learning_rate": 5.27465793397911e-06, |
|
"loss": 0.0125, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.797297297297297, |
|
"grad_norm": 0.24957782619954136, |
|
"learning_rate": 5.201898712139201e-06, |
|
"loss": 0.01, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.831081081081081, |
|
"grad_norm": 0.23348067557635, |
|
"learning_rate": 5.1402716369495194e-06, |
|
"loss": 0.0089, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.864864864864865, |
|
"grad_norm": 0.34646806534712726, |
|
"learning_rate": 5.089807475228711e-06, |
|
"loss": 0.0149, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.898648648648649, |
|
"grad_norm": 0.27931985147442506, |
|
"learning_rate": 5.050531420801205e-06, |
|
"loss": 0.0098, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.9324324324324325, |
|
"grad_norm": 0.38114606449587424, |
|
"learning_rate": 5.022463081919386e-06, |
|
"loss": 0.0141, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.966216216216216, |
|
"grad_norm": 0.3191715681389194, |
|
"learning_rate": 5.005616471474332e-06, |
|
"loss": 0.0106, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.29441671421186116, |
|
"learning_rate": 5e-06, |
|
"loss": 0.0107, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 740, |
|
"total_flos": 435868890169344.0, |
|
"train_loss": 0.11167457006066232, |
|
"train_runtime": 12339.9122, |
|
"train_samples_per_second": 3.833, |
|
"train_steps_per_second": 0.06 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 740, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 435868890169344.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|