|
{ |
|
"best_metric": 1.0741040706634521, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.04244181933932235, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00028294546226214896, |
|
"grad_norm": 0.18001686036586761, |
|
"learning_rate": 6e-06, |
|
"loss": 1.2994, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00028294546226214896, |
|
"eval_loss": 1.2716970443725586, |
|
"eval_runtime": 846.7814, |
|
"eval_samples_per_second": 7.03, |
|
"eval_steps_per_second": 1.758, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0005658909245242979, |
|
"grad_norm": 0.1731778383255005, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.3714, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0008488363867864469, |
|
"grad_norm": 0.16550840437412262, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.2408, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0011317818490485959, |
|
"grad_norm": 0.16617779433727264, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.398, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.001414727311310745, |
|
"grad_norm": 0.15455280244350433, |
|
"learning_rate": 3e-05, |
|
"loss": 1.2861, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0016976727735728938, |
|
"grad_norm": 0.15682223439216614, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.293, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.001980618235835043, |
|
"grad_norm": 0.14510343968868256, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.2524, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0022635636980971917, |
|
"grad_norm": 0.14818604290485382, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.2562, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0025465091603593406, |
|
"grad_norm": 0.14594592154026031, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.3486, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00282945462262149, |
|
"grad_norm": 0.14386072754859924, |
|
"learning_rate": 6e-05, |
|
"loss": 1.2519, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0031124000848836387, |
|
"grad_norm": 0.1916978359222412, |
|
"learning_rate": 5.999589914977407e-05, |
|
"loss": 1.1521, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0033953455471457876, |
|
"grad_norm": 0.16554145514965057, |
|
"learning_rate": 5.998359772022778e-05, |
|
"loss": 1.248, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0036782910094079364, |
|
"grad_norm": 0.1682235598564148, |
|
"learning_rate": 5.996309907444915e-05, |
|
"loss": 1.2722, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.003961236471670086, |
|
"grad_norm": 0.15518620610237122, |
|
"learning_rate": 5.9934408816563236e-05, |
|
"loss": 1.1769, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.004244181933932234, |
|
"grad_norm": 0.15147370100021362, |
|
"learning_rate": 5.98975347902001e-05, |
|
"loss": 1.3129, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0045271273961943834, |
|
"grad_norm": 0.15708550810813904, |
|
"learning_rate": 5.9852487076350345e-05, |
|
"loss": 1.2437, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.004810072858456533, |
|
"grad_norm": 0.16699694097042084, |
|
"learning_rate": 5.979927799060915e-05, |
|
"loss": 1.2656, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.005093018320718681, |
|
"grad_norm": 0.15669851005077362, |
|
"learning_rate": 5.9737922079809257e-05, |
|
"loss": 1.2197, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0053759637829808304, |
|
"grad_norm": 0.15826715528964996, |
|
"learning_rate": 5.9668436118044054e-05, |
|
"loss": 1.2848, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.00565890924524298, |
|
"grad_norm": 0.15940728783607483, |
|
"learning_rate": 5.959083910208167e-05, |
|
"loss": 1.2357, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005941854707505128, |
|
"grad_norm": 0.14362244307994843, |
|
"learning_rate": 5.9505152246171474e-05, |
|
"loss": 1.1756, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0062248001697672774, |
|
"grad_norm": 0.1448201835155487, |
|
"learning_rate": 5.941139897624428e-05, |
|
"loss": 1.1705, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.006507745632029427, |
|
"grad_norm": 0.14286337792873383, |
|
"learning_rate": 5.9309604923507984e-05, |
|
"loss": 1.256, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.006790691094291575, |
|
"grad_norm": 0.14091116189956665, |
|
"learning_rate": 5.9199797917440176e-05, |
|
"loss": 1.2419, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0070736365565537244, |
|
"grad_norm": 0.1461900919675827, |
|
"learning_rate": 5.908200797817991e-05, |
|
"loss": 1.3056, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.007356582018815873, |
|
"grad_norm": 0.1513061672449112, |
|
"learning_rate": 5.895626730832046e-05, |
|
"loss": 1.2286, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.007639527481078022, |
|
"grad_norm": 0.14222566783428192, |
|
"learning_rate": 5.882261028410545e-05, |
|
"loss": 1.3284, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.007922472943340171, |
|
"grad_norm": 0.14666450023651123, |
|
"learning_rate": 5.8681073446030734e-05, |
|
"loss": 1.1703, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00820541840560232, |
|
"grad_norm": 0.1654808074235916, |
|
"learning_rate": 5.853169548885461e-05, |
|
"loss": 1.1861, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.008488363867864468, |
|
"grad_norm": 0.1394944190979004, |
|
"learning_rate": 5.8374517251019035e-05, |
|
"loss": 1.2514, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.008771309330126618, |
|
"grad_norm": 0.16221028566360474, |
|
"learning_rate": 5.820958170348484e-05, |
|
"loss": 1.2141, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.009054254792388767, |
|
"grad_norm": 0.17233219742774963, |
|
"learning_rate": 5.8036933937983825e-05, |
|
"loss": 1.1461, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.009337200254650915, |
|
"grad_norm": 0.16100017726421356, |
|
"learning_rate": 5.7856621154691217e-05, |
|
"loss": 1.1342, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.009620145716913065, |
|
"grad_norm": 0.16232934594154358, |
|
"learning_rate": 5.766869264932154e-05, |
|
"loss": 1.0513, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.009903091179175214, |
|
"grad_norm": 0.169218510389328, |
|
"learning_rate": 5.747319979965172e-05, |
|
"loss": 1.176, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.010186036641437362, |
|
"grad_norm": 0.18459808826446533, |
|
"learning_rate": 5.727019605147488e-05, |
|
"loss": 1.1991, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.010468982103699512, |
|
"grad_norm": 0.21113476157188416, |
|
"learning_rate": 5.7059736903988775e-05, |
|
"loss": 1.1908, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.010751927565961661, |
|
"grad_norm": 0.255100816488266, |
|
"learning_rate": 5.684187989462291e-05, |
|
"loss": 1.0502, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.01103487302822381, |
|
"grad_norm": 0.2532814145088196, |
|
"learning_rate": 5.661668458330836e-05, |
|
"loss": 1.1723, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.01131781849048596, |
|
"grad_norm": 0.24460916221141815, |
|
"learning_rate": 5.638421253619467e-05, |
|
"loss": 1.092, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.011600763952748108, |
|
"grad_norm": 0.33700716495513916, |
|
"learning_rate": 5.614452730881832e-05, |
|
"loss": 1.0077, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.011883709415010256, |
|
"grad_norm": 0.3086978793144226, |
|
"learning_rate": 5.589769442872722e-05, |
|
"loss": 0.984, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.012166654877272406, |
|
"grad_norm": 0.34127745032310486, |
|
"learning_rate": 5.5643781377566175e-05, |
|
"loss": 0.9426, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.012449600339534555, |
|
"grad_norm": 0.3650180697441101, |
|
"learning_rate": 5.538285757262806e-05, |
|
"loss": 0.9358, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.012732545801796703, |
|
"grad_norm": 0.3661767542362213, |
|
"learning_rate": 5.5114994347875856e-05, |
|
"loss": 0.9236, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.013015491264058853, |
|
"grad_norm": 0.35510310530662537, |
|
"learning_rate": 5.48402649344406e-05, |
|
"loss": 0.9134, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.013298436726321002, |
|
"grad_norm": 0.3974863588809967, |
|
"learning_rate": 5.455874444060078e-05, |
|
"loss": 1.1377, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.01358138218858315, |
|
"grad_norm": 0.3589693009853363, |
|
"learning_rate": 5.427050983124843e-05, |
|
"loss": 1.2237, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.013864327650845299, |
|
"grad_norm": 0.36883828043937683, |
|
"learning_rate": 5.397563990684774e-05, |
|
"loss": 1.1407, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.014147273113107449, |
|
"grad_norm": 0.3884880840778351, |
|
"learning_rate": 5.367421528189181e-05, |
|
"loss": 1.1999, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.014147273113107449, |
|
"eval_loss": 1.1299543380737305, |
|
"eval_runtime": 853.0261, |
|
"eval_samples_per_second": 6.979, |
|
"eval_steps_per_second": 1.746, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.014430218575369597, |
|
"grad_norm": 0.2056826651096344, |
|
"learning_rate": 5.336631836286338e-05, |
|
"loss": 1.2011, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.014713164037631746, |
|
"grad_norm": 0.19640228152275085, |
|
"learning_rate": 5.3052033325705774e-05, |
|
"loss": 1.1475, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.014996109499893896, |
|
"grad_norm": 0.19956982135772705, |
|
"learning_rate": 5.2731446092810044e-05, |
|
"loss": 1.1226, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.015279054962156044, |
|
"grad_norm": 0.15548309683799744, |
|
"learning_rate": 5.240464430952462e-05, |
|
"loss": 1.1501, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.015562000424418193, |
|
"grad_norm": 0.1360340118408203, |
|
"learning_rate": 5.207171732019395e-05, |
|
"loss": 1.1462, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.015844945886680343, |
|
"grad_norm": 0.14059096574783325, |
|
"learning_rate": 5.1732756143732675e-05, |
|
"loss": 1.1009, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.01612789134894249, |
|
"grad_norm": 0.13691364228725433, |
|
"learning_rate": 5.1387853448741916e-05, |
|
"loss": 1.1818, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.01641083681120464, |
|
"grad_norm": 0.13487009704113007, |
|
"learning_rate": 5.103710352817465e-05, |
|
"loss": 1.1531, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.016693782273466788, |
|
"grad_norm": 0.15809845924377441, |
|
"learning_rate": 5.068060227355698e-05, |
|
"loss": 1.1472, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.016976727735728937, |
|
"grad_norm": 0.15477801859378815, |
|
"learning_rate": 5.0318447148772234e-05, |
|
"loss": 1.1809, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01725967319799109, |
|
"grad_norm": 0.1484566628932953, |
|
"learning_rate": 4.995073716341545e-05, |
|
"loss": 1.2081, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.017542618660253237, |
|
"grad_norm": 0.1421346217393875, |
|
"learning_rate": 4.957757284572506e-05, |
|
"loss": 1.224, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.017825564122515385, |
|
"grad_norm": 0.1333300918340683, |
|
"learning_rate": 4.91990562150995e-05, |
|
"loss": 1.1276, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.018108509584777534, |
|
"grad_norm": 0.12150599807500839, |
|
"learning_rate": 4.881529075420611e-05, |
|
"loss": 1.2021, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.018391455047039682, |
|
"grad_norm": 0.12204714864492416, |
|
"learning_rate": 4.8426381380690036e-05, |
|
"loss": 1.114, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01867440050930183, |
|
"grad_norm": 0.12727297842502594, |
|
"learning_rate": 4.8032434418490753e-05, |
|
"loss": 1.1536, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.018957345971563982, |
|
"grad_norm": 0.11255544424057007, |
|
"learning_rate": 4.7633557568774194e-05, |
|
"loss": 1.1206, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.01924029143382613, |
|
"grad_norm": 0.1191386803984642, |
|
"learning_rate": 4.722985988048831e-05, |
|
"loss": 1.1546, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.01952323689608828, |
|
"grad_norm": 0.13010723888874054, |
|
"learning_rate": 4.6821451720550184e-05, |
|
"loss": 1.1694, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.019806182358350428, |
|
"grad_norm": 0.12365347146987915, |
|
"learning_rate": 4.640844474367282e-05, |
|
"loss": 1.136, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.020089127820612576, |
|
"grad_norm": 0.14611800014972687, |
|
"learning_rate": 4.5990951861839815e-05, |
|
"loss": 1.1683, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.020372073282874725, |
|
"grad_norm": 0.1299329400062561, |
|
"learning_rate": 4.5569087213436455e-05, |
|
"loss": 1.2127, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.020655018745136876, |
|
"grad_norm": 0.1349126100540161, |
|
"learning_rate": 4.514296613204532e-05, |
|
"loss": 1.1545, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.020937964207399025, |
|
"grad_norm": 0.12816999852657318, |
|
"learning_rate": 4.471270511491525e-05, |
|
"loss": 1.198, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.021220909669661173, |
|
"grad_norm": 0.14442527294158936, |
|
"learning_rate": 4.427842179111221e-05, |
|
"loss": 1.1331, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.021503855131923322, |
|
"grad_norm": 0.1318608820438385, |
|
"learning_rate": 4.3840234889360634e-05, |
|
"loss": 1.1442, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.02178680059418547, |
|
"grad_norm": 0.13773271441459656, |
|
"learning_rate": 4.33982642055842e-05, |
|
"loss": 1.1503, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.02206974605644762, |
|
"grad_norm": 0.12418694794178009, |
|
"learning_rate": 4.2952630570154785e-05, |
|
"loss": 1.0393, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.022352691518709767, |
|
"grad_norm": 0.14450491964817047, |
|
"learning_rate": 4.250345581485871e-05, |
|
"loss": 1.1194, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.02263563698097192, |
|
"grad_norm": 0.15314346551895142, |
|
"learning_rate": 4.205086273958909e-05, |
|
"loss": 1.0763, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.022918582443234067, |
|
"grad_norm": 0.1480676680803299, |
|
"learning_rate": 4.1594975078773565e-05, |
|
"loss": 1.2132, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.023201527905496216, |
|
"grad_norm": 0.14896009862422943, |
|
"learning_rate": 4.113591746754662e-05, |
|
"loss": 1.1389, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.023484473367758364, |
|
"grad_norm": 0.17366276681423187, |
|
"learning_rate": 4.06738154076755e-05, |
|
"loss": 1.2691, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.023767418830020513, |
|
"grad_norm": 0.16257454454898834, |
|
"learning_rate": 4.020879523324929e-05, |
|
"loss": 1.0568, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.02405036429228266, |
|
"grad_norm": 0.16570693254470825, |
|
"learning_rate": 3.974098407614051e-05, |
|
"loss": 1.1422, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.024333309754544813, |
|
"grad_norm": 0.1792537122964859, |
|
"learning_rate": 3.927050983124842e-05, |
|
"loss": 1.1421, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.02461625521680696, |
|
"grad_norm": 0.18916811048984528, |
|
"learning_rate": 3.8797501121533946e-05, |
|
"loss": 1.1065, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.02489920067906911, |
|
"grad_norm": 0.2118011862039566, |
|
"learning_rate": 3.832208726285534e-05, |
|
"loss": 1.0761, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.025182146141331258, |
|
"grad_norm": 0.21735292673110962, |
|
"learning_rate": 3.784439822861459e-05, |
|
"loss": 0.8525, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.025465091603593407, |
|
"grad_norm": 0.23854799568653107, |
|
"learning_rate": 3.7364564614223976e-05, |
|
"loss": 0.9597, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.025748037065855555, |
|
"grad_norm": 0.23937970399856567, |
|
"learning_rate": 3.688271760140255e-05, |
|
"loss": 0.8657, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.026030982528117707, |
|
"grad_norm": 0.26307588815689087, |
|
"learning_rate": 3.6398988922312406e-05, |
|
"loss": 0.953, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.026313927990379855, |
|
"grad_norm": 0.27083978056907654, |
|
"learning_rate": 3.591351082354441e-05, |
|
"loss": 0.7971, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.026596873452642004, |
|
"grad_norm": 0.2805502116680145, |
|
"learning_rate": 3.54264160299633e-05, |
|
"loss": 0.9744, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.026879818914904152, |
|
"grad_norm": 0.35504117608070374, |
|
"learning_rate": 3.493783770842202e-05, |
|
"loss": 0.9262, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0271627643771663, |
|
"grad_norm": 0.33214688301086426, |
|
"learning_rate": 3.444790943135526e-05, |
|
"loss": 1.0821, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.02744570983942845, |
|
"grad_norm": 0.3750684857368469, |
|
"learning_rate": 3.3956765140262074e-05, |
|
"loss": 0.8112, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.027728655301690597, |
|
"grad_norm": 0.3210698962211609, |
|
"learning_rate": 3.346453910908759e-05, |
|
"loss": 1.004, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.02801160076395275, |
|
"grad_norm": 0.3541051745414734, |
|
"learning_rate": 3.297136590751389e-05, |
|
"loss": 1.16, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.028294546226214898, |
|
"grad_norm": 0.36050865054130554, |
|
"learning_rate": 3.247738036416998e-05, |
|
"loss": 1.1555, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.028294546226214898, |
|
"eval_loss": 1.0897471904754639, |
|
"eval_runtime": 853.3729, |
|
"eval_samples_per_second": 6.976, |
|
"eval_steps_per_second": 1.745, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.028577491688477046, |
|
"grad_norm": 0.1543198525905609, |
|
"learning_rate": 3.1982717529770985e-05, |
|
"loss": 1.2425, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.028860437150739195, |
|
"grad_norm": 0.19346995651721954, |
|
"learning_rate": 3.148751264019667e-05, |
|
"loss": 1.349, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.029143382613001343, |
|
"grad_norm": 0.1380472481250763, |
|
"learning_rate": 3.099190107951924e-05, |
|
"loss": 1.1654, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.02942632807526349, |
|
"grad_norm": 0.1306401491165161, |
|
"learning_rate": 3.049601834299076e-05, |
|
"loss": 1.1968, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.029709273537525643, |
|
"grad_norm": 0.14598701894283295, |
|
"learning_rate": 3e-05, |
|
"loss": 1.1663, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.029992218999787792, |
|
"grad_norm": 0.14115272462368011, |
|
"learning_rate": 2.9503981657009246e-05, |
|
"loss": 1.212, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03027516446204994, |
|
"grad_norm": 0.13328498601913452, |
|
"learning_rate": 2.9008098920480752e-05, |
|
"loss": 1.1183, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.03055810992431209, |
|
"grad_norm": 0.12777693569660187, |
|
"learning_rate": 2.851248735980333e-05, |
|
"loss": 1.113, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.030841055386574237, |
|
"grad_norm": 0.1404535323381424, |
|
"learning_rate": 2.801728247022902e-05, |
|
"loss": 1.128, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.031124000848836385, |
|
"grad_norm": 0.11834380775690079, |
|
"learning_rate": 2.7522619635830034e-05, |
|
"loss": 1.166, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.031406946311098534, |
|
"grad_norm": 0.14574697613716125, |
|
"learning_rate": 2.702863409248612e-05, |
|
"loss": 1.1504, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.031689891773360686, |
|
"grad_norm": 0.13284307718276978, |
|
"learning_rate": 2.6535460890912416e-05, |
|
"loss": 1.1715, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.03197283723562283, |
|
"grad_norm": 0.128324955701828, |
|
"learning_rate": 2.604323485973793e-05, |
|
"loss": 1.0557, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.03225578269788498, |
|
"grad_norm": 0.13001534342765808, |
|
"learning_rate": 2.555209056864474e-05, |
|
"loss": 1.2398, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.032538728160147135, |
|
"grad_norm": 0.13345256447792053, |
|
"learning_rate": 2.5062162291577978e-05, |
|
"loss": 1.2067, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03282167362240928, |
|
"grad_norm": 0.12472221255302429, |
|
"learning_rate": 2.4573583970036712e-05, |
|
"loss": 1.1886, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.03310461908467143, |
|
"grad_norm": 0.14320768415927887, |
|
"learning_rate": 2.4086489176455595e-05, |
|
"loss": 1.2183, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.033387564546933576, |
|
"grad_norm": 0.1415589302778244, |
|
"learning_rate": 2.36010110776876e-05, |
|
"loss": 1.2193, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.03367051000919573, |
|
"grad_norm": 0.14643894135951996, |
|
"learning_rate": 2.3117282398597456e-05, |
|
"loss": 1.2352, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.03395345547145787, |
|
"grad_norm": 0.1440449059009552, |
|
"learning_rate": 2.263543538577603e-05, |
|
"loss": 1.1977, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.034236400933720025, |
|
"grad_norm": 0.14856146275997162, |
|
"learning_rate": 2.215560177138541e-05, |
|
"loss": 1.1351, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.03451934639598218, |
|
"grad_norm": 0.13947530090808868, |
|
"learning_rate": 2.167791273714467e-05, |
|
"loss": 1.169, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.03480229185824432, |
|
"grad_norm": 0.13417986035346985, |
|
"learning_rate": 2.1202498878466062e-05, |
|
"loss": 1.1483, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.035085237320506474, |
|
"grad_norm": 0.13690292835235596, |
|
"learning_rate": 2.072949016875158e-05, |
|
"loss": 1.0759, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.03536818278276862, |
|
"grad_norm": 0.1381928026676178, |
|
"learning_rate": 2.0259015923859498e-05, |
|
"loss": 1.2043, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03565112824503077, |
|
"grad_norm": 0.14060895144939423, |
|
"learning_rate": 1.979120476675071e-05, |
|
"loss": 1.1919, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.03593407370729292, |
|
"grad_norm": 0.12901578843593597, |
|
"learning_rate": 1.9326184592324503e-05, |
|
"loss": 1.0793, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.03621701916955507, |
|
"grad_norm": 0.1504661738872528, |
|
"learning_rate": 1.8864082532453373e-05, |
|
"loss": 1.2077, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.03649996463181722, |
|
"grad_norm": 0.16385625302791595, |
|
"learning_rate": 1.840502492122644e-05, |
|
"loss": 1.1345, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.036782910094079364, |
|
"grad_norm": 0.16052238643169403, |
|
"learning_rate": 1.7949137260410924e-05, |
|
"loss": 1.1935, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.037065855556341516, |
|
"grad_norm": 0.16583813726902008, |
|
"learning_rate": 1.7496544185141295e-05, |
|
"loss": 1.0112, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.03734880101860366, |
|
"grad_norm": 0.17477497458457947, |
|
"learning_rate": 1.7047369429845216e-05, |
|
"loss": 1.0563, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.03763174648086581, |
|
"grad_norm": 0.17088453471660614, |
|
"learning_rate": 1.6601735794415806e-05, |
|
"loss": 1.0716, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.037914691943127965, |
|
"grad_norm": 0.18345321714878082, |
|
"learning_rate": 1.615976511063937e-05, |
|
"loss": 0.9948, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.03819763740539011, |
|
"grad_norm": 0.20441484451293945, |
|
"learning_rate": 1.5721578208887793e-05, |
|
"loss": 0.9666, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03848058286765226, |
|
"grad_norm": 0.1876918077468872, |
|
"learning_rate": 1.5287294885084766e-05, |
|
"loss": 0.9862, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.03876352832991441, |
|
"grad_norm": 0.22819191217422485, |
|
"learning_rate": 1.4857033867954697e-05, |
|
"loss": 1.1309, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.03904647379217656, |
|
"grad_norm": 0.2145112156867981, |
|
"learning_rate": 1.4430912786563554e-05, |
|
"loss": 0.9526, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.039329419254438704, |
|
"grad_norm": 0.21892738342285156, |
|
"learning_rate": 1.4009048138160195e-05, |
|
"loss": 1.0422, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.039612364716700856, |
|
"grad_norm": 0.23647260665893555, |
|
"learning_rate": 1.3591555256327199e-05, |
|
"loss": 0.9467, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03989531017896301, |
|
"grad_norm": 0.25753793120384216, |
|
"learning_rate": 1.3178548279449822e-05, |
|
"loss": 0.8659, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.04017825564122515, |
|
"grad_norm": 0.24101592600345612, |
|
"learning_rate": 1.2770140119511693e-05, |
|
"loss": 0.8691, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.040461201103487304, |
|
"grad_norm": 0.2642737329006195, |
|
"learning_rate": 1.2366442431225809e-05, |
|
"loss": 1.0043, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.04074414656574945, |
|
"grad_norm": 0.27606478333473206, |
|
"learning_rate": 1.1967565581509248e-05, |
|
"loss": 0.8247, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.0410270920280116, |
|
"grad_norm": 0.27429643273353577, |
|
"learning_rate": 1.1573618619309965e-05, |
|
"loss": 0.8922, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.04131003749027375, |
|
"grad_norm": 0.29994338750839233, |
|
"learning_rate": 1.1184709245793889e-05, |
|
"loss": 1.0195, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0415929829525359, |
|
"grad_norm": 0.3414655029773712, |
|
"learning_rate": 1.0800943784900502e-05, |
|
"loss": 0.8346, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.04187592841479805, |
|
"grad_norm": 0.30294448137283325, |
|
"learning_rate": 1.042242715427494e-05, |
|
"loss": 1.1227, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.042158873877060195, |
|
"grad_norm": 0.2909342646598816, |
|
"learning_rate": 1.004926283658455e-05, |
|
"loss": 0.8504, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.04244181933932235, |
|
"grad_norm": 0.35827234387397766, |
|
"learning_rate": 9.681552851227774e-06, |
|
"loss": 1.1022, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04244181933932235, |
|
"eval_loss": 1.0741040706634521, |
|
"eval_runtime": 853.591, |
|
"eval_samples_per_second": 6.974, |
|
"eval_steps_per_second": 1.744, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.95489868899156e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|