|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.984251968503937, |
|
"eval_steps": 500, |
|
"global_step": 1266, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03937007874015748, |
|
"grad_norm": 6.157460689544678, |
|
"learning_rate": 3.125e-05, |
|
"loss": 1.4374, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07874015748031496, |
|
"grad_norm": 8.225975036621094, |
|
"learning_rate": 6.25e-05, |
|
"loss": 0.5746, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11811023622047244, |
|
"grad_norm": 2.298436403274536, |
|
"learning_rate": 9.375e-05, |
|
"loss": 0.3466, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15748031496062992, |
|
"grad_norm": 1.926019549369812, |
|
"learning_rate": 0.000125, |
|
"loss": 0.3016, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1968503937007874, |
|
"grad_norm": 1.7295136451721191, |
|
"learning_rate": 0.00015625, |
|
"loss": 0.2241, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23622047244094488, |
|
"grad_norm": 1.6235859394073486, |
|
"learning_rate": 0.0001875, |
|
"loss": 0.2474, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2755905511811024, |
|
"grad_norm": 2.1525580883026123, |
|
"learning_rate": 0.00019998770426725326, |
|
"loss": 0.1819, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.31496062992125984, |
|
"grad_norm": 1.2614878416061401, |
|
"learning_rate": 0.00019991257462799398, |
|
"loss": 0.1753, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3543307086614173, |
|
"grad_norm": 1.5198956727981567, |
|
"learning_rate": 0.00019976919756880076, |
|
"loss": 0.1734, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3937007874015748, |
|
"grad_norm": 1.3017441034317017, |
|
"learning_rate": 0.00019955767102643884, |
|
"loss": 0.1558, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4330708661417323, |
|
"grad_norm": 1.3634755611419678, |
|
"learning_rate": 0.00019927813948863483, |
|
"loss": 0.1396, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.47244094488188976, |
|
"grad_norm": 1.0837377309799194, |
|
"learning_rate": 0.00019893079389538115, |
|
"loss": 0.151, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5118110236220472, |
|
"grad_norm": 0.7570620179176331, |
|
"learning_rate": 0.0001985158715085105, |
|
"loss": 0.1345, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5511811023622047, |
|
"grad_norm": 1.1616774797439575, |
|
"learning_rate": 0.00019803365574962904, |
|
"loss": 0.1193, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5905511811023622, |
|
"grad_norm": 1.0171620845794678, |
|
"learning_rate": 0.00019748447600651922, |
|
"loss": 0.1168, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6299212598425197, |
|
"grad_norm": 1.0031143426895142, |
|
"learning_rate": 0.00019686870740814438, |
|
"loss": 0.1325, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6692913385826772, |
|
"grad_norm": 0.6090546250343323, |
|
"learning_rate": 0.00019618677056840896, |
|
"loss": 0.1154, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7086614173228346, |
|
"grad_norm": 1.0556284189224243, |
|
"learning_rate": 0.00019543913129884918, |
|
"loss": 0.1006, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7480314960629921, |
|
"grad_norm": 0.7367269396781921, |
|
"learning_rate": 0.00019462630029045035, |
|
"loss": 0.1182, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7874015748031497, |
|
"grad_norm": 17.055007934570312, |
|
"learning_rate": 0.00019374883276480865, |
|
"loss": 0.1464, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8267716535433071, |
|
"grad_norm": 8.562920570373535, |
|
"learning_rate": 0.00019280732809487521, |
|
"loss": 0.2103, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8661417322834646, |
|
"grad_norm": 1.041045069694519, |
|
"learning_rate": 0.00019180242939554137, |
|
"loss": 0.1697, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.905511811023622, |
|
"grad_norm": 0.8244247436523438, |
|
"learning_rate": 0.00019073482308434544, |
|
"loss": 0.1478, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9448818897637795, |
|
"grad_norm": 0.9097272753715515, |
|
"learning_rate": 0.00018960523841260053, |
|
"loss": 0.1712, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.984251968503937, |
|
"grad_norm": 1.7566819190979004, |
|
"learning_rate": 0.00018841444696726388, |
|
"loss": 0.1788, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0236220472440944, |
|
"grad_norm": 0.7728438973426819, |
|
"learning_rate": 0.00018716326214388777, |
|
"loss": 0.1519, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0629921259842519, |
|
"grad_norm": 1.4151078462600708, |
|
"learning_rate": 0.00018585253859101247, |
|
"loss": 0.178, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.1023622047244095, |
|
"grad_norm": 0.7322055101394653, |
|
"learning_rate": 0.0001844831716263801, |
|
"loss": 0.1493, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.141732283464567, |
|
"grad_norm": 1.238358974456787, |
|
"learning_rate": 0.00018305609662536888, |
|
"loss": 0.1366, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.1811023622047245, |
|
"grad_norm": 1.0000638961791992, |
|
"learning_rate": 0.0001815722883820649, |
|
"loss": 0.1483, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.220472440944882, |
|
"grad_norm": 0.9049971699714661, |
|
"learning_rate": 0.00018003276044340814, |
|
"loss": 0.1455, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.2598425196850394, |
|
"grad_norm": 1.2286065816879272, |
|
"learning_rate": 0.00017843856441686763, |
|
"loss": 0.1519, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.2992125984251968, |
|
"grad_norm": 0.9089654088020325, |
|
"learning_rate": 0.00017679078925211851, |
|
"loss": 0.1487, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.3385826771653544, |
|
"grad_norm": 1.4565386772155762, |
|
"learning_rate": 0.0001750905604972116, |
|
"loss": 0.1299, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.3779527559055118, |
|
"grad_norm": 1.1536667346954346, |
|
"learning_rate": 0.00017333903952974374, |
|
"loss": 0.1333, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.4173228346456692, |
|
"grad_norm": 1.241052508354187, |
|
"learning_rate": 0.0001715374227635541, |
|
"loss": 0.1422, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.4566929133858268, |
|
"grad_norm": 0.700495719909668, |
|
"learning_rate": 0.00016968694083148805, |
|
"loss": 0.1241, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.4960629921259843, |
|
"grad_norm": 0.8857589960098267, |
|
"learning_rate": 0.00016778885774478713, |
|
"loss": 0.1312, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.5354330708661417, |
|
"grad_norm": 0.8474507927894592, |
|
"learning_rate": 0.00016584447002967903, |
|
"loss": 0.1459, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.574803149606299, |
|
"grad_norm": 1.0673331022262573, |
|
"learning_rate": 0.00016385510584175785, |
|
"loss": 0.1255, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.6141732283464567, |
|
"grad_norm": 1.2963683605194092, |
|
"learning_rate": 0.0001618221240587588, |
|
"loss": 0.1443, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6535433070866141, |
|
"grad_norm": 0.6176678538322449, |
|
"learning_rate": 0.0001597469133523479, |
|
"loss": 0.1328, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.6929133858267718, |
|
"grad_norm": 0.8610302805900574, |
|
"learning_rate": 0.00015763089123956004, |
|
"loss": 0.1315, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.7322834645669292, |
|
"grad_norm": 0.8733415603637695, |
|
"learning_rate": 0.0001554755031145336, |
|
"loss": 0.1354, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.7716535433070866, |
|
"grad_norm": 0.6525992155075073, |
|
"learning_rate": 0.00015328222126120315, |
|
"loss": 0.1306, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.811023622047244, |
|
"grad_norm": 0.6734092831611633, |
|
"learning_rate": 0.00015105254384762453, |
|
"loss": 0.1272, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8503937007874016, |
|
"grad_norm": 0.9078968167304993, |
|
"learning_rate": 0.0001487879939026189, |
|
"loss": 0.1084, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.889763779527559, |
|
"grad_norm": 0.6580955982208252, |
|
"learning_rate": 0.0001464901182754356, |
|
"loss": 0.1116, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.9291338582677167, |
|
"grad_norm": 0.8749574422836304, |
|
"learning_rate": 0.00014416048657914352, |
|
"loss": 0.1345, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.968503937007874, |
|
"grad_norm": 0.8025640845298767, |
|
"learning_rate": 0.00014180069011847347, |
|
"loss": 0.096, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.0078740157480315, |
|
"grad_norm": 0.5700274705886841, |
|
"learning_rate": 0.00013941234080284328, |
|
"loss": 0.1201, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.047244094488189, |
|
"grad_norm": 1.1415060758590698, |
|
"learning_rate": 0.00013699707004530883, |
|
"loss": 0.1206, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.0866141732283463, |
|
"grad_norm": 0.8418262600898743, |
|
"learning_rate": 0.0001345565276481924, |
|
"loss": 0.1257, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.1259842519685037, |
|
"grad_norm": 0.8720746040344238, |
|
"learning_rate": 0.00013209238067614989, |
|
"loss": 0.1169, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.1653543307086616, |
|
"grad_norm": 1.166407585144043, |
|
"learning_rate": 0.0001296063123174467, |
|
"loss": 0.1327, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.204724409448819, |
|
"grad_norm": 0.8546102046966553, |
|
"learning_rate": 0.00012710002073421995, |
|
"loss": 0.1287, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.2440944881889764, |
|
"grad_norm": 0.6846320033073425, |
|
"learning_rate": 0.00012457521790251272, |
|
"loss": 0.1143, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.283464566929134, |
|
"grad_norm": 1.0204697847366333, |
|
"learning_rate": 0.00012203362844287196, |
|
"loss": 0.1261, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.322834645669291, |
|
"grad_norm": 0.7173842191696167, |
|
"learning_rate": 0.00011947698844230972, |
|
"loss": 0.1002, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.362204724409449, |
|
"grad_norm": 0.7630149722099304, |
|
"learning_rate": 0.0001169070442684318, |
|
"loss": 0.1127, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.4015748031496065, |
|
"grad_norm": 0.819685697555542, |
|
"learning_rate": 0.00011432555137654386, |
|
"loss": 0.1149, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.440944881889764, |
|
"grad_norm": 0.5875028371810913, |
|
"learning_rate": 0.00011173427311055041, |
|
"loss": 0.1099, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.4803149606299213, |
|
"grad_norm": 0.5276161432266235, |
|
"learning_rate": 0.00010913497949846483, |
|
"loss": 0.0935, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.5196850393700787, |
|
"grad_norm": 0.8867918848991394, |
|
"learning_rate": 0.00010652944604335413, |
|
"loss": 0.1024, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.559055118110236, |
|
"grad_norm": 0.7481799125671387, |
|
"learning_rate": 0.00010391945251054331, |
|
"loss": 0.1001, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.5984251968503935, |
|
"grad_norm": 1.034694790840149, |
|
"learning_rate": 0.00010130678171190893, |
|
"loss": 0.1066, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.637795275590551, |
|
"grad_norm": 0.744408369064331, |
|
"learning_rate": 9.869321828809109e-05, |
|
"loss": 0.0911, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.677165354330709, |
|
"grad_norm": 0.6600897908210754, |
|
"learning_rate": 9.60805474894567e-05, |
|
"loss": 0.0957, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.716535433070866, |
|
"grad_norm": 0.8174748420715332, |
|
"learning_rate": 9.347055395664589e-05, |
|
"loss": 0.1009, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.7559055118110236, |
|
"grad_norm": 1.1754448413848877, |
|
"learning_rate": 9.08650205015352e-05, |
|
"loss": 0.0994, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.795275590551181, |
|
"grad_norm": 0.6818444728851318, |
|
"learning_rate": 8.826572688944966e-05, |
|
"loss": 0.0978, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.8346456692913384, |
|
"grad_norm": 0.7322255969047546, |
|
"learning_rate": 8.56744486234562e-05, |
|
"loss": 0.0917, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.8740157480314963, |
|
"grad_norm": 0.4844493567943573, |
|
"learning_rate": 8.309295573156825e-05, |
|
"loss": 0.1054, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.9133858267716537, |
|
"grad_norm": 0.4901849031448364, |
|
"learning_rate": 8.052301155769029e-05, |
|
"loss": 0.0933, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.952755905511811, |
|
"grad_norm": 0.6136536002159119, |
|
"learning_rate": 7.796637155712802e-05, |
|
"loss": 0.0898, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.9921259842519685, |
|
"grad_norm": 0.5571744441986084, |
|
"learning_rate": 7.542478209748727e-05, |
|
"loss": 0.0932, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.031496062992126, |
|
"grad_norm": 0.6652244925498962, |
|
"learning_rate": 7.289997926578003e-05, |
|
"loss": 0.0894, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.0708661417322833, |
|
"grad_norm": 0.4437718391418457, |
|
"learning_rate": 7.039368768255335e-05, |
|
"loss": 0.0862, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.1102362204724407, |
|
"grad_norm": 0.6460803747177124, |
|
"learning_rate": 6.790761932385014e-05, |
|
"loss": 0.0909, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.1496062992125986, |
|
"grad_norm": 0.7623637914657593, |
|
"learning_rate": 6.544347235180761e-05, |
|
"loss": 0.1056, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.188976377952756, |
|
"grad_norm": 0.45258432626724243, |
|
"learning_rate": 6.300292995469119e-05, |
|
"loss": 0.0828, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.2283464566929134, |
|
"grad_norm": 0.8195613026618958, |
|
"learning_rate": 6.058765919715674e-05, |
|
"loss": 0.1034, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.267716535433071, |
|
"grad_norm": 0.5008795857429504, |
|
"learning_rate": 5.819930988152658e-05, |
|
"loss": 0.0997, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.3070866141732282, |
|
"grad_norm": 0.593920886516571, |
|
"learning_rate": 5.5839513420856494e-05, |
|
"loss": 0.085, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.3464566929133857, |
|
"grad_norm": 0.5025109052658081, |
|
"learning_rate": 5.3509881724564426e-05, |
|
"loss": 0.0909, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.3858267716535435, |
|
"grad_norm": 0.4997495412826538, |
|
"learning_rate": 5.121200609738113e-05, |
|
"loss": 0.0968, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.425196850393701, |
|
"grad_norm": 1.036964774131775, |
|
"learning_rate": 4.894745615237551e-05, |
|
"loss": 0.1025, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.4645669291338583, |
|
"grad_norm": 0.4552832841873169, |
|
"learning_rate": 4.6717778738796883e-05, |
|
"loss": 0.0839, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.5039370078740157, |
|
"grad_norm": 0.5048990845680237, |
|
"learning_rate": 4.4524496885466436e-05, |
|
"loss": 0.0919, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.543307086614173, |
|
"grad_norm": 0.4812853932380676, |
|
"learning_rate": 4.236910876043999e-05, |
|
"loss": 0.0945, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.5826771653543306, |
|
"grad_norm": 0.6418349146842957, |
|
"learning_rate": 4.02530866476521e-05, |
|
"loss": 0.0893, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.622047244094488, |
|
"grad_norm": 0.9326638579368591, |
|
"learning_rate": 3.817787594124122e-05, |
|
"loss": 0.1007, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.661417322834646, |
|
"grad_norm": 0.9185812473297119, |
|
"learning_rate": 3.6144894158242184e-05, |
|
"loss": 0.0912, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.7007874015748032, |
|
"grad_norm": 0.6465328335762024, |
|
"learning_rate": 3.4155529970320986e-05, |
|
"loss": 0.078, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.7401574803149606, |
|
"grad_norm": 0.5492585897445679, |
|
"learning_rate": 3.22111422552129e-05, |
|
"loss": 0.078, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.779527559055118, |
|
"grad_norm": 0.638911783695221, |
|
"learning_rate": 3.031305916851196e-05, |
|
"loss": 0.083, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.8188976377952755, |
|
"grad_norm": 0.47551003098487854, |
|
"learning_rate": 2.846257723644592e-05, |
|
"loss": 0.0847, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.8582677165354333, |
|
"grad_norm": 0.8244979381561279, |
|
"learning_rate": 2.666096047025628e-05, |
|
"loss": 0.0871, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.8976377952755907, |
|
"grad_norm": 0.2906242609024048, |
|
"learning_rate": 2.4909439502788423e-05, |
|
"loss": 0.0757, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.937007874015748, |
|
"grad_norm": 0.6135310530662537, |
|
"learning_rate": 2.3209210747881515e-05, |
|
"loss": 0.0812, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.9763779527559056, |
|
"grad_norm": 0.5390735864639282, |
|
"learning_rate": 2.1561435583132395e-05, |
|
"loss": 0.0852, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.015748031496063, |
|
"grad_norm": 0.3508215546607971, |
|
"learning_rate": 1.996723955659189e-05, |
|
"loss": 0.0962, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.05511811023622, |
|
"grad_norm": 0.5338996648788452, |
|
"learning_rate": 1.842771161793513e-05, |
|
"loss": 0.0863, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.094488188976378, |
|
"grad_norm": 0.45414066314697266, |
|
"learning_rate": 1.6943903374631143e-05, |
|
"loss": 0.0822, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.133858267716535, |
|
"grad_norm": 0.8013415336608887, |
|
"learning_rate": 1.5516828373619918e-05, |
|
"loss": 0.0717, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.173228346456693, |
|
"grad_norm": 0.8064348697662354, |
|
"learning_rate": 1.4147461408987561e-05, |
|
"loss": 0.0945, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.21259842519685, |
|
"grad_norm": 0.4887343645095825, |
|
"learning_rate": 1.2836737856112246e-05, |
|
"loss": 0.0851, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.251968503937007, |
|
"grad_norm": 0.39578166604042053, |
|
"learning_rate": 1.158555303273614e-05, |
|
"loss": 0.0715, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.291338582677166, |
|
"grad_norm": 0.5196364521980286, |
|
"learning_rate": 1.0394761587399493e-05, |
|
"loss": 0.079, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.330708661417323, |
|
"grad_norm": 0.3454650044441223, |
|
"learning_rate": 9.265176915654595e-06, |
|
"loss": 0.0778, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.3700787401574805, |
|
"grad_norm": 0.44942712783813477, |
|
"learning_rate": 8.19757060445865e-06, |
|
"loss": 0.0725, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.409448818897638, |
|
"grad_norm": 0.5285565257072449, |
|
"learning_rate": 7.192671905124792e-06, |
|
"loss": 0.0685, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.448818897637795, |
|
"grad_norm": 0.5806540250778198, |
|
"learning_rate": 6.251167235191336e-06, |
|
"loss": 0.0759, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.488188976377953, |
|
"grad_norm": 0.592225968837738, |
|
"learning_rate": 5.373699709549673e-06, |
|
"loss": 0.0806, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.52755905511811, |
|
"grad_norm": 0.7170082330703735, |
|
"learning_rate": 4.560868701150844e-06, |
|
"loss": 0.0735, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.566929133858268, |
|
"grad_norm": 0.4995642304420471, |
|
"learning_rate": 3.8132294315910256e-06, |
|
"loss": 0.073, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.606299212598425, |
|
"grad_norm": 0.6979544162750244, |
|
"learning_rate": 3.131292591855628e-06, |
|
"loss": 0.0685, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.645669291338582, |
|
"grad_norm": 0.8452284932136536, |
|
"learning_rate": 2.515523993480806e-06, |
|
"loss": 0.0827, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.68503937007874, |
|
"grad_norm": 0.5433554649353027, |
|
"learning_rate": 1.966344250370977e-06, |
|
"loss": 0.0839, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.724409448818898, |
|
"grad_norm": 0.6554785370826721, |
|
"learning_rate": 1.4841284914895116e-06, |
|
"loss": 0.0906, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.7637795275590555, |
|
"grad_norm": 0.7909722924232483, |
|
"learning_rate": 1.0692061046188696e-06, |
|
"loss": 0.0926, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.803149606299213, |
|
"grad_norm": 0.6115571856498718, |
|
"learning_rate": 7.2186051136518e-07, |
|
"loss": 0.0949, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.84251968503937, |
|
"grad_norm": 0.39121922850608826, |
|
"learning_rate": 4.4232897356116795e-07, |
|
"loss": 0.0866, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.881889763779528, |
|
"grad_norm": 0.506991446018219, |
|
"learning_rate": 2.308024311992618e-07, |
|
"loss": 0.0713, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.921259842519685, |
|
"grad_norm": 0.5368971228599548, |
|
"learning_rate": 8.742537200603362e-08, |
|
"loss": 0.0811, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.960629921259843, |
|
"grad_norm": 0.7474204897880554, |
|
"learning_rate": 1.2295732746747668e-08, |
|
"loss": 0.0754, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.984251968503937, |
|
"step": 1266, |
|
"total_flos": 4.27570483010112e+16, |
|
"train_loss": 0.12990388043495527, |
|
"train_runtime": 571.8261, |
|
"train_samples_per_second": 35.423, |
|
"train_steps_per_second": 2.214 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1266, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.27570483010112e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|