|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5450618144809724, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003406636340506077, |
|
"grad_norm": 0.8387678265571594, |
|
"learning_rate": 5.4421768707483e-06, |
|
"loss": 0.8629, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.006813272681012154, |
|
"grad_norm": 0.5580975413322449, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.8351, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010219909021518231, |
|
"grad_norm": 0.4770251214504242, |
|
"learning_rate": 1.6780045351473924e-05, |
|
"loss": 0.8023, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.013626545362024308, |
|
"grad_norm": 0.35437077283859253, |
|
"learning_rate": 2.2448979591836737e-05, |
|
"loss": 0.7808, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.017033181702530386, |
|
"grad_norm": 0.37850669026374817, |
|
"learning_rate": 2.811791383219955e-05, |
|
"loss": 0.7554, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.020439818043036462, |
|
"grad_norm": 0.40117064118385315, |
|
"learning_rate": 3.378684807256236e-05, |
|
"loss": 0.7419, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02384645438354254, |
|
"grad_norm": 0.4868236780166626, |
|
"learning_rate": 3.945578231292517e-05, |
|
"loss": 0.751, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.027253090724048617, |
|
"grad_norm": 0.3966948390007019, |
|
"learning_rate": 4.512471655328798e-05, |
|
"loss": 0.7251, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.030659727064554693, |
|
"grad_norm": 0.3908109664916992, |
|
"learning_rate": 5.0793650793650794e-05, |
|
"loss": 0.7088, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.03406636340506077, |
|
"grad_norm": 0.3687989115715027, |
|
"learning_rate": 5.646258503401361e-05, |
|
"loss": 0.7115, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.03747299974556685, |
|
"grad_norm": 0.3919059634208679, |
|
"learning_rate": 6.213151927437642e-05, |
|
"loss": 0.7026, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.040879636086072924, |
|
"grad_norm": 0.42019009590148926, |
|
"learning_rate": 6.780045351473924e-05, |
|
"loss": 0.6967, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.044286272426579, |
|
"grad_norm": 0.4229620695114136, |
|
"learning_rate": 7.346938775510205e-05, |
|
"loss": 0.7148, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.04769290876708508, |
|
"grad_norm": 0.39575278759002686, |
|
"learning_rate": 7.913832199546486e-05, |
|
"loss": 0.744, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.051099545107591154, |
|
"grad_norm": 0.45732468366622925, |
|
"learning_rate": 8.480725623582767e-05, |
|
"loss": 0.7216, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.054506181448097234, |
|
"grad_norm": 0.3956912159919739, |
|
"learning_rate": 9.047619047619048e-05, |
|
"loss": 0.6953, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.05791281778860331, |
|
"grad_norm": 0.3948104679584503, |
|
"learning_rate": 9.61451247165533e-05, |
|
"loss": 0.7235, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.061319454129109385, |
|
"grad_norm": 0.3913336396217346, |
|
"learning_rate": 9.99438004917457e-05, |
|
"loss": 0.6676, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.06472609046961547, |
|
"grad_norm": 0.3908584713935852, |
|
"learning_rate": 9.9768177028451e-05, |
|
"loss": 0.7158, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.06813272681012154, |
|
"grad_norm": 0.4225063920021057, |
|
"learning_rate": 9.959255356515631e-05, |
|
"loss": 0.7129, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07153936315062762, |
|
"grad_norm": 2.2388832569122314, |
|
"learning_rate": 9.941693010186162e-05, |
|
"loss": 0.7199, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.0749459994911337, |
|
"grad_norm": 0.39503997564315796, |
|
"learning_rate": 9.924130663856692e-05, |
|
"loss": 0.7298, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.07835263583163977, |
|
"grad_norm": 2.1647109985351562, |
|
"learning_rate": 9.906568317527221e-05, |
|
"loss": 0.7499, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.08175927217214585, |
|
"grad_norm": 0.36966434121131897, |
|
"learning_rate": 9.889005971197752e-05, |
|
"loss": 0.7212, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.08516590851265193, |
|
"grad_norm": 0.36990946531295776, |
|
"learning_rate": 9.871443624868283e-05, |
|
"loss": 0.7214, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.088572544853158, |
|
"grad_norm": 0.4158572852611542, |
|
"learning_rate": 9.853881278538813e-05, |
|
"loss": 0.6942, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.09197918119366408, |
|
"grad_norm": 0.3846476972103119, |
|
"learning_rate": 9.836318932209344e-05, |
|
"loss": 0.7218, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.09538581753417016, |
|
"grad_norm": 0.33537471294403076, |
|
"learning_rate": 9.818756585879874e-05, |
|
"loss": 0.7115, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.09879245387467624, |
|
"grad_norm": 0.3672342896461487, |
|
"learning_rate": 9.801194239550405e-05, |
|
"loss": 0.7225, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.10219909021518231, |
|
"grad_norm": 0.3498263955116272, |
|
"learning_rate": 9.783631893220935e-05, |
|
"loss": 0.7124, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.1056057265556884, |
|
"grad_norm": 0.3860284388065338, |
|
"learning_rate": 9.766069546891466e-05, |
|
"loss": 0.7018, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.10901236289619447, |
|
"grad_norm": 0.33633533120155334, |
|
"learning_rate": 9.748507200561996e-05, |
|
"loss": 0.6962, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.11241899923670054, |
|
"grad_norm": 0.3424709439277649, |
|
"learning_rate": 9.730944854232526e-05, |
|
"loss": 0.7068, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.11582563557720663, |
|
"grad_norm": 0.3627208173274994, |
|
"learning_rate": 9.713382507903056e-05, |
|
"loss": 0.6738, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.1192322719177127, |
|
"grad_norm": 0.3304712176322937, |
|
"learning_rate": 9.695820161573587e-05, |
|
"loss": 0.7004, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.12263890825821877, |
|
"grad_norm": 0.37575623393058777, |
|
"learning_rate": 9.678257815244117e-05, |
|
"loss": 0.711, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.12604554459872486, |
|
"grad_norm": 0.37238940596580505, |
|
"learning_rate": 9.660695468914648e-05, |
|
"loss": 0.7172, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.12945218093923094, |
|
"grad_norm": 0.39354655146598816, |
|
"learning_rate": 9.643133122585177e-05, |
|
"loss": 0.6949, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.132858817279737, |
|
"grad_norm": 0.34536346793174744, |
|
"learning_rate": 9.625570776255708e-05, |
|
"loss": 0.6871, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.1362654536202431, |
|
"grad_norm": 0.3518439829349518, |
|
"learning_rate": 9.608008429926238e-05, |
|
"loss": 0.7168, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13967208996074917, |
|
"grad_norm": 0.3994406759738922, |
|
"learning_rate": 9.590446083596769e-05, |
|
"loss": 0.722, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.14307872630125523, |
|
"grad_norm": 0.38314729928970337, |
|
"learning_rate": 9.5728837372673e-05, |
|
"loss": 0.6858, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.14648536264176132, |
|
"grad_norm": 0.3678247630596161, |
|
"learning_rate": 9.55532139093783e-05, |
|
"loss": 0.6895, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.1498919989822674, |
|
"grad_norm": 0.37229785323143005, |
|
"learning_rate": 9.537759044608359e-05, |
|
"loss": 0.6858, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.15329863532277346, |
|
"grad_norm": 0.34786805510520935, |
|
"learning_rate": 9.52019669827889e-05, |
|
"loss": 0.7055, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.15670527166327955, |
|
"grad_norm": 0.34139758348464966, |
|
"learning_rate": 9.50263435194942e-05, |
|
"loss": 0.6959, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.16011190800378564, |
|
"grad_norm": 0.33563101291656494, |
|
"learning_rate": 9.485072005619952e-05, |
|
"loss": 0.6696, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.1635185443442917, |
|
"grad_norm": 0.4068642556667328, |
|
"learning_rate": 9.467509659290483e-05, |
|
"loss": 0.7035, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.16692518068479778, |
|
"grad_norm": 0.3011302053928375, |
|
"learning_rate": 9.449947312961012e-05, |
|
"loss": 0.6759, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.17033181702530387, |
|
"grad_norm": 0.3068943917751312, |
|
"learning_rate": 9.432384966631543e-05, |
|
"loss": 0.6874, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.17373845336580993, |
|
"grad_norm": 0.3331654369831085, |
|
"learning_rate": 9.414822620302073e-05, |
|
"loss": 0.6886, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.177145089706316, |
|
"grad_norm": 0.3533156216144562, |
|
"learning_rate": 9.397260273972604e-05, |
|
"loss": 0.7059, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.1805517260468221, |
|
"grad_norm": 0.3277696669101715, |
|
"learning_rate": 9.379697927643134e-05, |
|
"loss": 0.6793, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.18395836238732816, |
|
"grad_norm": 0.6513714790344238, |
|
"learning_rate": 9.362135581313663e-05, |
|
"loss": 0.6741, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.18736499872783424, |
|
"grad_norm": 0.3339422047138214, |
|
"learning_rate": 9.344573234984194e-05, |
|
"loss": 0.7229, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.19077163506834033, |
|
"grad_norm": 0.27368980646133423, |
|
"learning_rate": 9.327010888654725e-05, |
|
"loss": 0.6889, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.1941782714088464, |
|
"grad_norm": 0.3697713017463684, |
|
"learning_rate": 9.309448542325255e-05, |
|
"loss": 0.7091, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.19758490774935247, |
|
"grad_norm": 0.30846071243286133, |
|
"learning_rate": 9.291886195995786e-05, |
|
"loss": 0.6929, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.20099154408985856, |
|
"grad_norm": 0.32923388481140137, |
|
"learning_rate": 9.274323849666315e-05, |
|
"loss": 0.6828, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.20439818043036462, |
|
"grad_norm": 0.34551820158958435, |
|
"learning_rate": 9.256761503336846e-05, |
|
"loss": 0.7007, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.2078048167708707, |
|
"grad_norm": 0.33527928590774536, |
|
"learning_rate": 9.239199157007376e-05, |
|
"loss": 0.7209, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.2112114531113768, |
|
"grad_norm": 0.30302199721336365, |
|
"learning_rate": 9.221636810677907e-05, |
|
"loss": 0.6802, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.21461808945188285, |
|
"grad_norm": 0.2833121120929718, |
|
"learning_rate": 9.204074464348437e-05, |
|
"loss": 0.6841, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.21802472579238893, |
|
"grad_norm": 0.3293180465698242, |
|
"learning_rate": 9.186512118018968e-05, |
|
"loss": 0.6902, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.22143136213289502, |
|
"grad_norm": 0.3296971023082733, |
|
"learning_rate": 9.168949771689498e-05, |
|
"loss": 0.6796, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.22483799847340108, |
|
"grad_norm": 0.29069727659225464, |
|
"learning_rate": 9.151387425360029e-05, |
|
"loss": 0.7244, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.22824463481390717, |
|
"grad_norm": 0.3884179890155792, |
|
"learning_rate": 9.13382507903056e-05, |
|
"loss": 0.6951, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.23165127115441325, |
|
"grad_norm": 0.30107781291007996, |
|
"learning_rate": 9.11626273270109e-05, |
|
"loss": 0.6873, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.2350579074949193, |
|
"grad_norm": 0.501139760017395, |
|
"learning_rate": 9.098700386371619e-05, |
|
"loss": 0.7114, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.2384645438354254, |
|
"grad_norm": 0.3498416841030121, |
|
"learning_rate": 9.08113804004215e-05, |
|
"loss": 0.6801, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.24187118017593148, |
|
"grad_norm": 0.34361162781715393, |
|
"learning_rate": 9.06357569371268e-05, |
|
"loss": 0.6702, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.24527781651643754, |
|
"grad_norm": 0.39064115285873413, |
|
"learning_rate": 9.046013347383211e-05, |
|
"loss": 0.6805, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.24868445285694363, |
|
"grad_norm": 0.29779884219169617, |
|
"learning_rate": 9.028451001053742e-05, |
|
"loss": 0.6929, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.2520910891974497, |
|
"grad_norm": 0.3189874291419983, |
|
"learning_rate": 9.010888654724272e-05, |
|
"loss": 0.6877, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.2554977255379558, |
|
"grad_norm": 0.33130019903182983, |
|
"learning_rate": 8.993326308394801e-05, |
|
"loss": 0.7176, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.2589043618784619, |
|
"grad_norm": 0.3092060983181, |
|
"learning_rate": 8.975763962065332e-05, |
|
"loss": 0.6872, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.2623109982189679, |
|
"grad_norm": 0.2977277338504791, |
|
"learning_rate": 8.958201615735862e-05, |
|
"loss": 0.6854, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.265717634559474, |
|
"grad_norm": 0.3292854130268097, |
|
"learning_rate": 8.940639269406393e-05, |
|
"loss": 0.6726, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.2691242708999801, |
|
"grad_norm": 0.30629754066467285, |
|
"learning_rate": 8.923076923076924e-05, |
|
"loss": 0.6928, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.2725309072404862, |
|
"grad_norm": 0.38389158248901367, |
|
"learning_rate": 8.905514576747454e-05, |
|
"loss": 0.6804, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.27593754358099226, |
|
"grad_norm": 0.29301708936691284, |
|
"learning_rate": 8.887952230417985e-05, |
|
"loss": 0.6815, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.27934417992149835, |
|
"grad_norm": 0.3136097490787506, |
|
"learning_rate": 8.870389884088515e-05, |
|
"loss": 0.7038, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.2827508162620044, |
|
"grad_norm": 0.4756239950656891, |
|
"learning_rate": 8.852827537759046e-05, |
|
"loss": 0.6772, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.28615745260251046, |
|
"grad_norm": 0.3121085464954376, |
|
"learning_rate": 8.835265191429576e-05, |
|
"loss": 0.6604, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.28956408894301655, |
|
"grad_norm": 0.3641608655452728, |
|
"learning_rate": 8.817702845100106e-05, |
|
"loss": 0.6645, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.29297072528352264, |
|
"grad_norm": 0.34608688950538635, |
|
"learning_rate": 8.800140498770636e-05, |
|
"loss": 0.6702, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.2963773616240287, |
|
"grad_norm": 0.3482177257537842, |
|
"learning_rate": 8.782578152441167e-05, |
|
"loss": 0.7293, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.2997839979645348, |
|
"grad_norm": 0.34053704142570496, |
|
"learning_rate": 8.765015806111697e-05, |
|
"loss": 0.6724, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.30319063430504084, |
|
"grad_norm": 0.32497864961624146, |
|
"learning_rate": 8.747453459782228e-05, |
|
"loss": 0.6511, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.3065972706455469, |
|
"grad_norm": 0.3069411814212799, |
|
"learning_rate": 8.729891113452757e-05, |
|
"loss": 0.6849, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.310003906986053, |
|
"grad_norm": 0.28222939372062683, |
|
"learning_rate": 8.712328767123288e-05, |
|
"loss": 0.66, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.3134105433265591, |
|
"grad_norm": 0.3223375082015991, |
|
"learning_rate": 8.694766420793818e-05, |
|
"loss": 0.6819, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.3168171796670652, |
|
"grad_norm": 0.2859184443950653, |
|
"learning_rate": 8.677204074464349e-05, |
|
"loss": 0.7061, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.32022381600757127, |
|
"grad_norm": 0.30723094940185547, |
|
"learning_rate": 8.659641728134879e-05, |
|
"loss": 0.678, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.3236304523480773, |
|
"grad_norm": 0.3178076446056366, |
|
"learning_rate": 8.642079381805409e-05, |
|
"loss": 0.6797, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.3270370886885834, |
|
"grad_norm": 0.3042934536933899, |
|
"learning_rate": 8.624517035475939e-05, |
|
"loss": 0.6654, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.3304437250290895, |
|
"grad_norm": 0.3517778813838959, |
|
"learning_rate": 8.60695468914647e-05, |
|
"loss": 0.6727, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.33385036136959556, |
|
"grad_norm": 0.2900087833404541, |
|
"learning_rate": 8.589392342817002e-05, |
|
"loss": 0.6886, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.33725699771010165, |
|
"grad_norm": 0.3086068630218506, |
|
"learning_rate": 8.571829996487532e-05, |
|
"loss": 0.6885, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.34066363405060773, |
|
"grad_norm": 0.3216247856616974, |
|
"learning_rate": 8.554267650158061e-05, |
|
"loss": 0.6753, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.34407027039111376, |
|
"grad_norm": 0.3239280581474304, |
|
"learning_rate": 8.536705303828592e-05, |
|
"loss": 0.7092, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.34747690673161985, |
|
"grad_norm": 0.2760443091392517, |
|
"learning_rate": 8.519142957499122e-05, |
|
"loss": 0.7039, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.35088354307212594, |
|
"grad_norm": 0.3144066631793976, |
|
"learning_rate": 8.501580611169653e-05, |
|
"loss": 0.6988, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.354290179412632, |
|
"grad_norm": 0.32952046394348145, |
|
"learning_rate": 8.484018264840184e-05, |
|
"loss": 0.6911, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.3576968157531381, |
|
"grad_norm": 2.4748454093933105, |
|
"learning_rate": 8.466455918510714e-05, |
|
"loss": 0.6748, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.3611034520936442, |
|
"grad_norm": 0.32749420404434204, |
|
"learning_rate": 8.448893572181243e-05, |
|
"loss": 0.6775, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.3645100884341502, |
|
"grad_norm": 0.28332722187042236, |
|
"learning_rate": 8.431331225851774e-05, |
|
"loss": 0.6788, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.3679167247746563, |
|
"grad_norm": 0.29811403155326843, |
|
"learning_rate": 8.413768879522305e-05, |
|
"loss": 0.6954, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.3713233611151624, |
|
"grad_norm": 0.3304058313369751, |
|
"learning_rate": 8.396206533192835e-05, |
|
"loss": 0.6759, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.3747299974556685, |
|
"grad_norm": 0.32398712635040283, |
|
"learning_rate": 8.378644186863366e-05, |
|
"loss": 0.6886, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.37813663379617457, |
|
"grad_norm": 0.2734803259372711, |
|
"learning_rate": 8.361081840533895e-05, |
|
"loss": 0.6839, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.38154327013668066, |
|
"grad_norm": 0.2840772271156311, |
|
"learning_rate": 8.343519494204425e-05, |
|
"loss": 0.6828, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.3849499064771867, |
|
"grad_norm": 0.39391207695007324, |
|
"learning_rate": 8.325957147874956e-05, |
|
"loss": 0.6935, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.3883565428176928, |
|
"grad_norm": 0.27879825234413147, |
|
"learning_rate": 8.308394801545487e-05, |
|
"loss": 0.7002, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.39176317915819886, |
|
"grad_norm": 0.30057233572006226, |
|
"learning_rate": 8.290832455216017e-05, |
|
"loss": 0.6784, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.39516981549870495, |
|
"grad_norm": 0.31198152899742126, |
|
"learning_rate": 8.273270108886548e-05, |
|
"loss": 0.6896, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.39857645183921103, |
|
"grad_norm": 0.2815667390823364, |
|
"learning_rate": 8.255707762557078e-05, |
|
"loss": 0.6649, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.4019830881797171, |
|
"grad_norm": 0.35836973786354065, |
|
"learning_rate": 8.238145416227609e-05, |
|
"loss": 0.6601, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.4053897245202232, |
|
"grad_norm": 0.289037823677063, |
|
"learning_rate": 8.22058306989814e-05, |
|
"loss": 0.6842, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.40879636086072924, |
|
"grad_norm": 0.32901930809020996, |
|
"learning_rate": 8.20302072356867e-05, |
|
"loss": 0.6677, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.4122029972012353, |
|
"grad_norm": 0.34060442447662354, |
|
"learning_rate": 8.185458377239199e-05, |
|
"loss": 0.6728, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.4156096335417414, |
|
"grad_norm": 0.3380946218967438, |
|
"learning_rate": 8.16789603090973e-05, |
|
"loss": 0.7131, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.4190162698822475, |
|
"grad_norm": 0.3136932849884033, |
|
"learning_rate": 8.15033368458026e-05, |
|
"loss": 0.6829, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.4224229062227536, |
|
"grad_norm": 0.2733888328075409, |
|
"learning_rate": 8.132771338250791e-05, |
|
"loss": 0.7026, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.42582954256325967, |
|
"grad_norm": 0.3108291029930115, |
|
"learning_rate": 8.115208991921321e-05, |
|
"loss": 0.689, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.4292361789037657, |
|
"grad_norm": 0.3084828853607178, |
|
"learning_rate": 8.09764664559185e-05, |
|
"loss": 0.6723, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.4326428152442718, |
|
"grad_norm": 0.2837083041667938, |
|
"learning_rate": 8.080084299262381e-05, |
|
"loss": 0.6699, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.43604945158477787, |
|
"grad_norm": 0.3026272654533386, |
|
"learning_rate": 8.062521952932912e-05, |
|
"loss": 0.6771, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.43945608792528396, |
|
"grad_norm": 0.29208138585090637, |
|
"learning_rate": 8.044959606603442e-05, |
|
"loss": 0.6636, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.44286272426579004, |
|
"grad_norm": 0.29942700266838074, |
|
"learning_rate": 8.027397260273973e-05, |
|
"loss": 0.6788, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.44626936060629613, |
|
"grad_norm": 0.262577086687088, |
|
"learning_rate": 8.009834913944503e-05, |
|
"loss": 0.65, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.44967599694680216, |
|
"grad_norm": 0.29627498984336853, |
|
"learning_rate": 7.992272567615034e-05, |
|
"loss": 0.651, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.45308263328730825, |
|
"grad_norm": 0.298234224319458, |
|
"learning_rate": 7.974710221285565e-05, |
|
"loss": 0.6883, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.45648926962781433, |
|
"grad_norm": 0.2771220803260803, |
|
"learning_rate": 7.957147874956095e-05, |
|
"loss": 0.6786, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.4598959059683204, |
|
"grad_norm": 0.29765036702156067, |
|
"learning_rate": 7.939585528626626e-05, |
|
"loss": 0.6835, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.4633025423088265, |
|
"grad_norm": 0.31564295291900635, |
|
"learning_rate": 7.922023182297156e-05, |
|
"loss": 0.6546, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.4667091786493326, |
|
"grad_norm": 0.2750963270664215, |
|
"learning_rate": 7.904460835967685e-05, |
|
"loss": 0.6765, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.4701158149898386, |
|
"grad_norm": 0.28891250491142273, |
|
"learning_rate": 7.886898489638216e-05, |
|
"loss": 0.6719, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.4735224513303447, |
|
"grad_norm": 0.3090450167655945, |
|
"learning_rate": 7.869336143308747e-05, |
|
"loss": 0.6629, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.4769290876708508, |
|
"grad_norm": 0.2829868495464325, |
|
"learning_rate": 7.851773796979277e-05, |
|
"loss": 0.676, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.4803357240113569, |
|
"grad_norm": 0.2897719144821167, |
|
"learning_rate": 7.834211450649808e-05, |
|
"loss": 0.6853, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.48374236035186297, |
|
"grad_norm": 0.31082597374916077, |
|
"learning_rate": 7.816649104320337e-05, |
|
"loss": 0.6818, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.48714899669236905, |
|
"grad_norm": 0.29339250922203064, |
|
"learning_rate": 7.799086757990867e-05, |
|
"loss": 0.673, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.4905556330328751, |
|
"grad_norm": 0.29037582874298096, |
|
"learning_rate": 7.781524411661398e-05, |
|
"loss": 0.6726, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.49396226937338117, |
|
"grad_norm": 0.2868305742740631, |
|
"learning_rate": 7.763962065331929e-05, |
|
"loss": 0.6894, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.49736890571388725, |
|
"grad_norm": 0.28504157066345215, |
|
"learning_rate": 7.746399719002459e-05, |
|
"loss": 0.6479, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.5007755420543933, |
|
"grad_norm": 0.28001776337623596, |
|
"learning_rate": 7.728837372672988e-05, |
|
"loss": 0.6571, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.5041821783948994, |
|
"grad_norm": 0.3145054280757904, |
|
"learning_rate": 7.711275026343519e-05, |
|
"loss": 0.6638, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.5075888147354055, |
|
"grad_norm": 0.3753933012485504, |
|
"learning_rate": 7.693712680014051e-05, |
|
"loss": 0.6617, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.5109954510759116, |
|
"grad_norm": 0.2966824173927307, |
|
"learning_rate": 7.676150333684581e-05, |
|
"loss": 0.6666, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.5144020874164177, |
|
"grad_norm": 0.26844897866249084, |
|
"learning_rate": 7.658587987355112e-05, |
|
"loss": 0.6646, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.5178087237569238, |
|
"grad_norm": 0.3304543197154999, |
|
"learning_rate": 7.641025641025641e-05, |
|
"loss": 0.6704, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.5212153600974297, |
|
"grad_norm": 0.31017178297042847, |
|
"learning_rate": 7.623463294696172e-05, |
|
"loss": 0.6846, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.5246219964379358, |
|
"grad_norm": 0.3583391308784485, |
|
"learning_rate": 7.605900948366702e-05, |
|
"loss": 0.6759, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.5280286327784419, |
|
"grad_norm": 0.28191253542900085, |
|
"learning_rate": 7.588338602037233e-05, |
|
"loss": 0.6369, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.531435269118948, |
|
"grad_norm": 0.27828291058540344, |
|
"learning_rate": 7.570776255707763e-05, |
|
"loss": 0.6818, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.5348419054594541, |
|
"grad_norm": 0.3020898997783661, |
|
"learning_rate": 7.553213909378293e-05, |
|
"loss": 0.6578, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.5382485417999602, |
|
"grad_norm": 0.266043484210968, |
|
"learning_rate": 7.535651563048823e-05, |
|
"loss": 0.6652, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.5416551781404663, |
|
"grad_norm": 0.2994140684604645, |
|
"learning_rate": 7.518089216719354e-05, |
|
"loss": 0.6639, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.5450618144809724, |
|
"grad_norm": 0.29508110880851746, |
|
"learning_rate": 7.500526870389884e-05, |
|
"loss": 0.7025, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 14676, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.7015593282841567e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|