|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.49886782570371, |
|
"eval_steps": 500, |
|
"global_step": 11000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003406636340506077, |
|
"grad_norm": 0.8387678265571594, |
|
"learning_rate": 5.4421768707483e-06, |
|
"loss": 0.8629, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.006813272681012154, |
|
"grad_norm": 0.5580975413322449, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.8351, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.010219909021518231, |
|
"grad_norm": 0.4770251214504242, |
|
"learning_rate": 1.6780045351473924e-05, |
|
"loss": 0.8023, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.013626545362024308, |
|
"grad_norm": 0.35437077283859253, |
|
"learning_rate": 2.2448979591836737e-05, |
|
"loss": 0.7808, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.017033181702530386, |
|
"grad_norm": 0.37850669026374817, |
|
"learning_rate": 2.811791383219955e-05, |
|
"loss": 0.7554, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.020439818043036462, |
|
"grad_norm": 0.40117064118385315, |
|
"learning_rate": 3.378684807256236e-05, |
|
"loss": 0.7419, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02384645438354254, |
|
"grad_norm": 0.4868236780166626, |
|
"learning_rate": 3.945578231292517e-05, |
|
"loss": 0.751, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.027253090724048617, |
|
"grad_norm": 0.3966948390007019, |
|
"learning_rate": 4.512471655328798e-05, |
|
"loss": 0.7251, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.030659727064554693, |
|
"grad_norm": 0.3908109664916992, |
|
"learning_rate": 5.0793650793650794e-05, |
|
"loss": 0.7088, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.03406636340506077, |
|
"grad_norm": 0.3687989115715027, |
|
"learning_rate": 5.646258503401361e-05, |
|
"loss": 0.7115, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.03747299974556685, |
|
"grad_norm": 0.3919059634208679, |
|
"learning_rate": 6.213151927437642e-05, |
|
"loss": 0.7026, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.040879636086072924, |
|
"grad_norm": 0.42019009590148926, |
|
"learning_rate": 6.780045351473924e-05, |
|
"loss": 0.6967, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.044286272426579, |
|
"grad_norm": 0.4229620695114136, |
|
"learning_rate": 7.346938775510205e-05, |
|
"loss": 0.7148, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.04769290876708508, |
|
"grad_norm": 0.39575278759002686, |
|
"learning_rate": 7.913832199546486e-05, |
|
"loss": 0.744, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.051099545107591154, |
|
"grad_norm": 0.45732468366622925, |
|
"learning_rate": 8.480725623582767e-05, |
|
"loss": 0.7216, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.054506181448097234, |
|
"grad_norm": 0.3956912159919739, |
|
"learning_rate": 9.047619047619048e-05, |
|
"loss": 0.6953, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.05791281778860331, |
|
"grad_norm": 0.3948104679584503, |
|
"learning_rate": 9.61451247165533e-05, |
|
"loss": 0.7235, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.061319454129109385, |
|
"grad_norm": 0.3913336396217346, |
|
"learning_rate": 9.99438004917457e-05, |
|
"loss": 0.6676, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.06472609046961547, |
|
"grad_norm": 0.3908584713935852, |
|
"learning_rate": 9.9768177028451e-05, |
|
"loss": 0.7158, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.06813272681012154, |
|
"grad_norm": 0.4225063920021057, |
|
"learning_rate": 9.959255356515631e-05, |
|
"loss": 0.7129, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07153936315062762, |
|
"grad_norm": 2.2388832569122314, |
|
"learning_rate": 9.941693010186162e-05, |
|
"loss": 0.7199, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.0749459994911337, |
|
"grad_norm": 0.39503997564315796, |
|
"learning_rate": 9.924130663856692e-05, |
|
"loss": 0.7298, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.07835263583163977, |
|
"grad_norm": 2.1647109985351562, |
|
"learning_rate": 9.906568317527221e-05, |
|
"loss": 0.7499, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.08175927217214585, |
|
"grad_norm": 0.36966434121131897, |
|
"learning_rate": 9.889005971197752e-05, |
|
"loss": 0.7212, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.08516590851265193, |
|
"grad_norm": 0.36990946531295776, |
|
"learning_rate": 9.871443624868283e-05, |
|
"loss": 0.7214, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.088572544853158, |
|
"grad_norm": 0.4158572852611542, |
|
"learning_rate": 9.853881278538813e-05, |
|
"loss": 0.6942, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.09197918119366408, |
|
"grad_norm": 0.3846476972103119, |
|
"learning_rate": 9.836318932209344e-05, |
|
"loss": 0.7218, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.09538581753417016, |
|
"grad_norm": 0.33537471294403076, |
|
"learning_rate": 9.818756585879874e-05, |
|
"loss": 0.7115, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.09879245387467624, |
|
"grad_norm": 0.3672342896461487, |
|
"learning_rate": 9.801194239550405e-05, |
|
"loss": 0.7225, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.10219909021518231, |
|
"grad_norm": 0.3498263955116272, |
|
"learning_rate": 9.783631893220935e-05, |
|
"loss": 0.7124, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.1056057265556884, |
|
"grad_norm": 0.3860284388065338, |
|
"learning_rate": 9.766069546891466e-05, |
|
"loss": 0.7018, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.10901236289619447, |
|
"grad_norm": 0.33633533120155334, |
|
"learning_rate": 9.748507200561996e-05, |
|
"loss": 0.6962, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.11241899923670054, |
|
"grad_norm": 0.3424709439277649, |
|
"learning_rate": 9.730944854232526e-05, |
|
"loss": 0.7068, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.11582563557720663, |
|
"grad_norm": 0.3627208173274994, |
|
"learning_rate": 9.713382507903056e-05, |
|
"loss": 0.6738, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.1192322719177127, |
|
"grad_norm": 0.3304712176322937, |
|
"learning_rate": 9.695820161573587e-05, |
|
"loss": 0.7004, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.12263890825821877, |
|
"grad_norm": 0.37575623393058777, |
|
"learning_rate": 9.678257815244117e-05, |
|
"loss": 0.711, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.12604554459872486, |
|
"grad_norm": 0.37238940596580505, |
|
"learning_rate": 9.660695468914648e-05, |
|
"loss": 0.7172, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.12945218093923094, |
|
"grad_norm": 0.39354655146598816, |
|
"learning_rate": 9.643133122585177e-05, |
|
"loss": 0.6949, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.132858817279737, |
|
"grad_norm": 0.34536346793174744, |
|
"learning_rate": 9.625570776255708e-05, |
|
"loss": 0.6871, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.1362654536202431, |
|
"grad_norm": 0.3518439829349518, |
|
"learning_rate": 9.608008429926238e-05, |
|
"loss": 0.7168, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13967208996074917, |
|
"grad_norm": 0.3994406759738922, |
|
"learning_rate": 9.590446083596769e-05, |
|
"loss": 0.722, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.14307872630125523, |
|
"grad_norm": 0.38314729928970337, |
|
"learning_rate": 9.5728837372673e-05, |
|
"loss": 0.6858, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.14648536264176132, |
|
"grad_norm": 0.3678247630596161, |
|
"learning_rate": 9.55532139093783e-05, |
|
"loss": 0.6895, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.1498919989822674, |
|
"grad_norm": 0.37229785323143005, |
|
"learning_rate": 9.537759044608359e-05, |
|
"loss": 0.6858, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.15329863532277346, |
|
"grad_norm": 0.34786805510520935, |
|
"learning_rate": 9.52019669827889e-05, |
|
"loss": 0.7055, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.15670527166327955, |
|
"grad_norm": 0.34139758348464966, |
|
"learning_rate": 9.50263435194942e-05, |
|
"loss": 0.6959, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.16011190800378564, |
|
"grad_norm": 0.33563101291656494, |
|
"learning_rate": 9.485072005619952e-05, |
|
"loss": 0.6696, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.1635185443442917, |
|
"grad_norm": 0.4068642556667328, |
|
"learning_rate": 9.467509659290483e-05, |
|
"loss": 0.7035, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.16692518068479778, |
|
"grad_norm": 0.3011302053928375, |
|
"learning_rate": 9.449947312961012e-05, |
|
"loss": 0.6759, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.17033181702530387, |
|
"grad_norm": 0.3068943917751312, |
|
"learning_rate": 9.432384966631543e-05, |
|
"loss": 0.6874, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.17373845336580993, |
|
"grad_norm": 0.3331654369831085, |
|
"learning_rate": 9.414822620302073e-05, |
|
"loss": 0.6886, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.177145089706316, |
|
"grad_norm": 0.3533156216144562, |
|
"learning_rate": 9.397260273972604e-05, |
|
"loss": 0.7059, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.1805517260468221, |
|
"grad_norm": 0.3277696669101715, |
|
"learning_rate": 9.379697927643134e-05, |
|
"loss": 0.6793, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.18395836238732816, |
|
"grad_norm": 0.6513714790344238, |
|
"learning_rate": 9.362135581313663e-05, |
|
"loss": 0.6741, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.18736499872783424, |
|
"grad_norm": 0.3339422047138214, |
|
"learning_rate": 9.344573234984194e-05, |
|
"loss": 0.7229, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.19077163506834033, |
|
"grad_norm": 0.27368980646133423, |
|
"learning_rate": 9.327010888654725e-05, |
|
"loss": 0.6889, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.1941782714088464, |
|
"grad_norm": 0.3697713017463684, |
|
"learning_rate": 9.309448542325255e-05, |
|
"loss": 0.7091, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.19758490774935247, |
|
"grad_norm": 0.30846071243286133, |
|
"learning_rate": 9.291886195995786e-05, |
|
"loss": 0.6929, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.20099154408985856, |
|
"grad_norm": 0.32923388481140137, |
|
"learning_rate": 9.274323849666315e-05, |
|
"loss": 0.6828, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.20439818043036462, |
|
"grad_norm": 0.34551820158958435, |
|
"learning_rate": 9.256761503336846e-05, |
|
"loss": 0.7007, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.2078048167708707, |
|
"grad_norm": 0.33527928590774536, |
|
"learning_rate": 9.239199157007376e-05, |
|
"loss": 0.7209, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.2112114531113768, |
|
"grad_norm": 0.30302199721336365, |
|
"learning_rate": 9.221636810677907e-05, |
|
"loss": 0.6802, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.21461808945188285, |
|
"grad_norm": 0.2833121120929718, |
|
"learning_rate": 9.204074464348437e-05, |
|
"loss": 0.6841, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.21802472579238893, |
|
"grad_norm": 0.3293180465698242, |
|
"learning_rate": 9.186512118018968e-05, |
|
"loss": 0.6902, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.22143136213289502, |
|
"grad_norm": 0.3296971023082733, |
|
"learning_rate": 9.168949771689498e-05, |
|
"loss": 0.6796, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.22483799847340108, |
|
"grad_norm": 0.29069727659225464, |
|
"learning_rate": 9.151387425360029e-05, |
|
"loss": 0.7244, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.22824463481390717, |
|
"grad_norm": 0.3884179890155792, |
|
"learning_rate": 9.13382507903056e-05, |
|
"loss": 0.6951, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.23165127115441325, |
|
"grad_norm": 0.30107781291007996, |
|
"learning_rate": 9.11626273270109e-05, |
|
"loss": 0.6873, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.2350579074949193, |
|
"grad_norm": 0.501139760017395, |
|
"learning_rate": 9.098700386371619e-05, |
|
"loss": 0.7114, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.2384645438354254, |
|
"grad_norm": 0.3498416841030121, |
|
"learning_rate": 9.08113804004215e-05, |
|
"loss": 0.6801, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.24187118017593148, |
|
"grad_norm": 0.34361162781715393, |
|
"learning_rate": 9.06357569371268e-05, |
|
"loss": 0.6702, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.24527781651643754, |
|
"grad_norm": 0.39064115285873413, |
|
"learning_rate": 9.046013347383211e-05, |
|
"loss": 0.6805, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.24868445285694363, |
|
"grad_norm": 0.29779884219169617, |
|
"learning_rate": 9.028451001053742e-05, |
|
"loss": 0.6929, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.2520910891974497, |
|
"grad_norm": 0.3189874291419983, |
|
"learning_rate": 9.010888654724272e-05, |
|
"loss": 0.6877, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.2554977255379558, |
|
"grad_norm": 0.33130019903182983, |
|
"learning_rate": 8.993326308394801e-05, |
|
"loss": 0.7176, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.2589043618784619, |
|
"grad_norm": 0.3092060983181, |
|
"learning_rate": 8.975763962065332e-05, |
|
"loss": 0.6872, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.2623109982189679, |
|
"grad_norm": 0.2977277338504791, |
|
"learning_rate": 8.958201615735862e-05, |
|
"loss": 0.6854, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.265717634559474, |
|
"grad_norm": 0.3292854130268097, |
|
"learning_rate": 8.940639269406393e-05, |
|
"loss": 0.6726, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.2691242708999801, |
|
"grad_norm": 0.30629754066467285, |
|
"learning_rate": 8.923076923076924e-05, |
|
"loss": 0.6928, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.2725309072404862, |
|
"grad_norm": 0.38389158248901367, |
|
"learning_rate": 8.905514576747454e-05, |
|
"loss": 0.6804, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.27593754358099226, |
|
"grad_norm": 0.29301708936691284, |
|
"learning_rate": 8.887952230417985e-05, |
|
"loss": 0.6815, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.27934417992149835, |
|
"grad_norm": 0.3136097490787506, |
|
"learning_rate": 8.870389884088515e-05, |
|
"loss": 0.7038, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.2827508162620044, |
|
"grad_norm": 0.4756239950656891, |
|
"learning_rate": 8.852827537759046e-05, |
|
"loss": 0.6772, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.28615745260251046, |
|
"grad_norm": 0.3121085464954376, |
|
"learning_rate": 8.835265191429576e-05, |
|
"loss": 0.6604, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.28956408894301655, |
|
"grad_norm": 0.3641608655452728, |
|
"learning_rate": 8.817702845100106e-05, |
|
"loss": 0.6645, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.29297072528352264, |
|
"grad_norm": 0.34608688950538635, |
|
"learning_rate": 8.800140498770636e-05, |
|
"loss": 0.6702, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.2963773616240287, |
|
"grad_norm": 0.3482177257537842, |
|
"learning_rate": 8.782578152441167e-05, |
|
"loss": 0.7293, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.2997839979645348, |
|
"grad_norm": 0.34053704142570496, |
|
"learning_rate": 8.765015806111697e-05, |
|
"loss": 0.6724, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.30319063430504084, |
|
"grad_norm": 0.32497864961624146, |
|
"learning_rate": 8.747453459782228e-05, |
|
"loss": 0.6511, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.3065972706455469, |
|
"grad_norm": 0.3069411814212799, |
|
"learning_rate": 8.729891113452757e-05, |
|
"loss": 0.6849, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.310003906986053, |
|
"grad_norm": 0.28222939372062683, |
|
"learning_rate": 8.712328767123288e-05, |
|
"loss": 0.66, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.3134105433265591, |
|
"grad_norm": 0.3223375082015991, |
|
"learning_rate": 8.694766420793818e-05, |
|
"loss": 0.6819, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.3168171796670652, |
|
"grad_norm": 0.2859184443950653, |
|
"learning_rate": 8.677204074464349e-05, |
|
"loss": 0.7061, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.32022381600757127, |
|
"grad_norm": 0.30723094940185547, |
|
"learning_rate": 8.659641728134879e-05, |
|
"loss": 0.678, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.3236304523480773, |
|
"grad_norm": 0.3178076446056366, |
|
"learning_rate": 8.642079381805409e-05, |
|
"loss": 0.6797, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.3270370886885834, |
|
"grad_norm": 0.3042934536933899, |
|
"learning_rate": 8.624517035475939e-05, |
|
"loss": 0.6654, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.3304437250290895, |
|
"grad_norm": 0.3517778813838959, |
|
"learning_rate": 8.60695468914647e-05, |
|
"loss": 0.6727, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.33385036136959556, |
|
"grad_norm": 0.2900087833404541, |
|
"learning_rate": 8.589392342817002e-05, |
|
"loss": 0.6886, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.33725699771010165, |
|
"grad_norm": 0.3086068630218506, |
|
"learning_rate": 8.571829996487532e-05, |
|
"loss": 0.6885, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.34066363405060773, |
|
"grad_norm": 0.3216247856616974, |
|
"learning_rate": 8.554267650158061e-05, |
|
"loss": 0.6753, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.34407027039111376, |
|
"grad_norm": 0.3239280581474304, |
|
"learning_rate": 8.536705303828592e-05, |
|
"loss": 0.7092, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.34747690673161985, |
|
"grad_norm": 0.2760443091392517, |
|
"learning_rate": 8.519142957499122e-05, |
|
"loss": 0.7039, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.35088354307212594, |
|
"grad_norm": 0.3144066631793976, |
|
"learning_rate": 8.501580611169653e-05, |
|
"loss": 0.6988, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.354290179412632, |
|
"grad_norm": 0.32952046394348145, |
|
"learning_rate": 8.484018264840184e-05, |
|
"loss": 0.6911, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.3576968157531381, |
|
"grad_norm": 2.4748454093933105, |
|
"learning_rate": 8.466455918510714e-05, |
|
"loss": 0.6748, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.3611034520936442, |
|
"grad_norm": 0.32749420404434204, |
|
"learning_rate": 8.448893572181243e-05, |
|
"loss": 0.6775, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.3645100884341502, |
|
"grad_norm": 0.28332722187042236, |
|
"learning_rate": 8.431331225851774e-05, |
|
"loss": 0.6788, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.3679167247746563, |
|
"grad_norm": 0.29811403155326843, |
|
"learning_rate": 8.413768879522305e-05, |
|
"loss": 0.6954, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.3713233611151624, |
|
"grad_norm": 0.3304058313369751, |
|
"learning_rate": 8.396206533192835e-05, |
|
"loss": 0.6759, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.3747299974556685, |
|
"grad_norm": 0.32398712635040283, |
|
"learning_rate": 8.378644186863366e-05, |
|
"loss": 0.6886, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.37813663379617457, |
|
"grad_norm": 0.2734803259372711, |
|
"learning_rate": 8.361081840533895e-05, |
|
"loss": 0.6839, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.38154327013668066, |
|
"grad_norm": 0.2840772271156311, |
|
"learning_rate": 8.343519494204425e-05, |
|
"loss": 0.6828, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.3849499064771867, |
|
"grad_norm": 0.39391207695007324, |
|
"learning_rate": 8.325957147874956e-05, |
|
"loss": 0.6935, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.3883565428176928, |
|
"grad_norm": 0.27879825234413147, |
|
"learning_rate": 8.308394801545487e-05, |
|
"loss": 0.7002, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.39176317915819886, |
|
"grad_norm": 0.30057233572006226, |
|
"learning_rate": 8.290832455216017e-05, |
|
"loss": 0.6784, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.39516981549870495, |
|
"grad_norm": 0.31198152899742126, |
|
"learning_rate": 8.273270108886548e-05, |
|
"loss": 0.6896, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.39857645183921103, |
|
"grad_norm": 0.2815667390823364, |
|
"learning_rate": 8.255707762557078e-05, |
|
"loss": 0.6649, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.4019830881797171, |
|
"grad_norm": 0.35836973786354065, |
|
"learning_rate": 8.238145416227609e-05, |
|
"loss": 0.6601, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.4053897245202232, |
|
"grad_norm": 0.289037823677063, |
|
"learning_rate": 8.22058306989814e-05, |
|
"loss": 0.6842, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.40879636086072924, |
|
"grad_norm": 0.32901930809020996, |
|
"learning_rate": 8.20302072356867e-05, |
|
"loss": 0.6677, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.4122029972012353, |
|
"grad_norm": 0.34060442447662354, |
|
"learning_rate": 8.185458377239199e-05, |
|
"loss": 0.6728, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.4156096335417414, |
|
"grad_norm": 0.3380946218967438, |
|
"learning_rate": 8.16789603090973e-05, |
|
"loss": 0.7131, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.4190162698822475, |
|
"grad_norm": 0.3136932849884033, |
|
"learning_rate": 8.15033368458026e-05, |
|
"loss": 0.6829, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.4224229062227536, |
|
"grad_norm": 0.2733888328075409, |
|
"learning_rate": 8.132771338250791e-05, |
|
"loss": 0.7026, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.42582954256325967, |
|
"grad_norm": 0.3108291029930115, |
|
"learning_rate": 8.115208991921321e-05, |
|
"loss": 0.689, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.4292361789037657, |
|
"grad_norm": 0.3084828853607178, |
|
"learning_rate": 8.09764664559185e-05, |
|
"loss": 0.6723, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.4326428152442718, |
|
"grad_norm": 0.2837083041667938, |
|
"learning_rate": 8.080084299262381e-05, |
|
"loss": 0.6699, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.43604945158477787, |
|
"grad_norm": 0.3026272654533386, |
|
"learning_rate": 8.062521952932912e-05, |
|
"loss": 0.6771, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.43945608792528396, |
|
"grad_norm": 0.29208138585090637, |
|
"learning_rate": 8.044959606603442e-05, |
|
"loss": 0.6636, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.44286272426579004, |
|
"grad_norm": 0.29942700266838074, |
|
"learning_rate": 8.027397260273973e-05, |
|
"loss": 0.6788, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.44626936060629613, |
|
"grad_norm": 0.262577086687088, |
|
"learning_rate": 8.009834913944503e-05, |
|
"loss": 0.65, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.44967599694680216, |
|
"grad_norm": 0.29627498984336853, |
|
"learning_rate": 7.992272567615034e-05, |
|
"loss": 0.651, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.45308263328730825, |
|
"grad_norm": 0.298234224319458, |
|
"learning_rate": 7.974710221285565e-05, |
|
"loss": 0.6883, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.45648926962781433, |
|
"grad_norm": 0.2771220803260803, |
|
"learning_rate": 7.957147874956095e-05, |
|
"loss": 0.6786, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.4598959059683204, |
|
"grad_norm": 0.29765036702156067, |
|
"learning_rate": 7.939585528626626e-05, |
|
"loss": 0.6835, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.4633025423088265, |
|
"grad_norm": 0.31564295291900635, |
|
"learning_rate": 7.922023182297156e-05, |
|
"loss": 0.6546, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.4667091786493326, |
|
"grad_norm": 0.2750963270664215, |
|
"learning_rate": 7.904460835967685e-05, |
|
"loss": 0.6765, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.4701158149898386, |
|
"grad_norm": 0.28891250491142273, |
|
"learning_rate": 7.886898489638216e-05, |
|
"loss": 0.6719, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.4735224513303447, |
|
"grad_norm": 0.3090450167655945, |
|
"learning_rate": 7.869336143308747e-05, |
|
"loss": 0.6629, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.4769290876708508, |
|
"grad_norm": 0.2829868495464325, |
|
"learning_rate": 7.851773796979277e-05, |
|
"loss": 0.676, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.4803357240113569, |
|
"grad_norm": 0.2897719144821167, |
|
"learning_rate": 7.834211450649808e-05, |
|
"loss": 0.6853, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.48374236035186297, |
|
"grad_norm": 0.31082597374916077, |
|
"learning_rate": 7.816649104320337e-05, |
|
"loss": 0.6818, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.48714899669236905, |
|
"grad_norm": 0.29339250922203064, |
|
"learning_rate": 7.799086757990867e-05, |
|
"loss": 0.673, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.4905556330328751, |
|
"grad_norm": 0.29037582874298096, |
|
"learning_rate": 7.781524411661398e-05, |
|
"loss": 0.6726, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.49396226937338117, |
|
"grad_norm": 0.2868305742740631, |
|
"learning_rate": 7.763962065331929e-05, |
|
"loss": 0.6894, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.49736890571388725, |
|
"grad_norm": 0.28504157066345215, |
|
"learning_rate": 7.746399719002459e-05, |
|
"loss": 0.6479, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.5007755420543933, |
|
"grad_norm": 0.28001776337623596, |
|
"learning_rate": 7.728837372672988e-05, |
|
"loss": 0.6571, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.5041821783948994, |
|
"grad_norm": 0.3145054280757904, |
|
"learning_rate": 7.711275026343519e-05, |
|
"loss": 0.6638, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.5075888147354055, |
|
"grad_norm": 0.3753933012485504, |
|
"learning_rate": 7.693712680014051e-05, |
|
"loss": 0.6617, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.5109954510759116, |
|
"grad_norm": 0.2966824173927307, |
|
"learning_rate": 7.676150333684581e-05, |
|
"loss": 0.6666, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.5144020874164177, |
|
"grad_norm": 0.26844897866249084, |
|
"learning_rate": 7.658587987355112e-05, |
|
"loss": 0.6646, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.5178087237569238, |
|
"grad_norm": 0.3304543197154999, |
|
"learning_rate": 7.641025641025641e-05, |
|
"loss": 0.6704, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.5212153600974297, |
|
"grad_norm": 0.31017178297042847, |
|
"learning_rate": 7.623463294696172e-05, |
|
"loss": 0.6846, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.5246219964379358, |
|
"grad_norm": 0.3583391308784485, |
|
"learning_rate": 7.605900948366702e-05, |
|
"loss": 0.6759, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.5280286327784419, |
|
"grad_norm": 0.28191253542900085, |
|
"learning_rate": 7.588338602037233e-05, |
|
"loss": 0.6369, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.531435269118948, |
|
"grad_norm": 0.27828291058540344, |
|
"learning_rate": 7.570776255707763e-05, |
|
"loss": 0.6818, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.5348419054594541, |
|
"grad_norm": 0.3020898997783661, |
|
"learning_rate": 7.553213909378293e-05, |
|
"loss": 0.6578, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.5382485417999602, |
|
"grad_norm": 0.266043484210968, |
|
"learning_rate": 7.535651563048823e-05, |
|
"loss": 0.6652, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.5416551781404663, |
|
"grad_norm": 0.2994140684604645, |
|
"learning_rate": 7.518089216719354e-05, |
|
"loss": 0.6639, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.5450618144809724, |
|
"grad_norm": 0.29508110880851746, |
|
"learning_rate": 7.500526870389884e-05, |
|
"loss": 0.7025, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.5484684508214784, |
|
"grad_norm": 0.29506492614746094, |
|
"learning_rate": 7.482964524060415e-05, |
|
"loss": 0.6565, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.5518750871619845, |
|
"grad_norm": 0.31011542677879333, |
|
"learning_rate": 7.465402177730946e-05, |
|
"loss": 0.6851, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.5552817235024906, |
|
"grad_norm": 0.3215409219264984, |
|
"learning_rate": 7.447839831401475e-05, |
|
"loss": 0.6559, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.5586883598429967, |
|
"grad_norm": 0.29974356293678284, |
|
"learning_rate": 7.430277485072005e-05, |
|
"loss": 0.6784, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.5620949961835027, |
|
"grad_norm": 0.29619455337524414, |
|
"learning_rate": 7.412715138742536e-05, |
|
"loss": 0.6836, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.5655016325240088, |
|
"grad_norm": 0.3017350733280182, |
|
"learning_rate": 7.395152792413066e-05, |
|
"loss": 0.6533, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.5689082688645148, |
|
"grad_norm": 0.2944609224796295, |
|
"learning_rate": 7.377590446083597e-05, |
|
"loss": 0.6568, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.5723149052050209, |
|
"grad_norm": 0.2667727470397949, |
|
"learning_rate": 7.360028099754128e-05, |
|
"loss": 0.6546, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.575721541545527, |
|
"grad_norm": 0.2934742569923401, |
|
"learning_rate": 7.342465753424658e-05, |
|
"loss": 0.6747, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.5791281778860331, |
|
"grad_norm": 0.3042696416378021, |
|
"learning_rate": 7.324903407095189e-05, |
|
"loss": 0.6939, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.5825348142265392, |
|
"grad_norm": 0.2748386859893799, |
|
"learning_rate": 7.307341060765719e-05, |
|
"loss": 0.6538, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.5859414505670453, |
|
"grad_norm": 0.2834206223487854, |
|
"learning_rate": 7.28977871443625e-05, |
|
"loss": 0.6296, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.5893480869075514, |
|
"grad_norm": 0.31131449341773987, |
|
"learning_rate": 7.272216368106779e-05, |
|
"loss": 0.6773, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.5927547232480574, |
|
"grad_norm": 0.28676480054855347, |
|
"learning_rate": 7.25465402177731e-05, |
|
"loss": 0.6908, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.5961613595885635, |
|
"grad_norm": 0.25846219062805176, |
|
"learning_rate": 7.23709167544784e-05, |
|
"loss": 0.6919, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.5995679959290696, |
|
"grad_norm": 0.3187685012817383, |
|
"learning_rate": 7.219529329118371e-05, |
|
"loss": 0.6816, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.6029746322695757, |
|
"grad_norm": 0.26819366216659546, |
|
"learning_rate": 7.201966982788901e-05, |
|
"loss": 0.6653, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.6063812686100817, |
|
"grad_norm": 0.3077665865421295, |
|
"learning_rate": 7.18440463645943e-05, |
|
"loss": 0.648, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.6097879049505878, |
|
"grad_norm": 0.4746949374675751, |
|
"learning_rate": 7.166842290129961e-05, |
|
"loss": 0.7012, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.6131945412910939, |
|
"grad_norm": 0.2993331849575043, |
|
"learning_rate": 7.149279943800492e-05, |
|
"loss": 0.6779, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.6166011776315999, |
|
"grad_norm": 0.26777181029319763, |
|
"learning_rate": 7.131717597471022e-05, |
|
"loss": 0.6706, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.620007813972106, |
|
"grad_norm": 0.31024643778800964, |
|
"learning_rate": 7.114155251141553e-05, |
|
"loss": 0.6748, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.6234144503126121, |
|
"grad_norm": 0.26060259342193604, |
|
"learning_rate": 7.096592904812083e-05, |
|
"loss": 0.6871, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.6268210866531182, |
|
"grad_norm": 0.2986789643764496, |
|
"learning_rate": 7.079030558482614e-05, |
|
"loss": 0.6638, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.6302277229936243, |
|
"grad_norm": 0.28097572922706604, |
|
"learning_rate": 7.061468212153144e-05, |
|
"loss": 0.661, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.6336343593341304, |
|
"grad_norm": 0.3189026415348053, |
|
"learning_rate": 7.043905865823675e-05, |
|
"loss": 0.6525, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.6370409956746365, |
|
"grad_norm": 0.29876482486724854, |
|
"learning_rate": 7.026343519494206e-05, |
|
"loss": 0.6655, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.6404476320151425, |
|
"grad_norm": 0.30885156989097595, |
|
"learning_rate": 7.008781173164735e-05, |
|
"loss": 0.6797, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.6438542683556486, |
|
"grad_norm": 0.3274490237236023, |
|
"learning_rate": 6.991218826835265e-05, |
|
"loss": 0.6723, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.6472609046961546, |
|
"grad_norm": 0.30561405420303345, |
|
"learning_rate": 6.973656480505796e-05, |
|
"loss": 0.6414, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.6506675410366607, |
|
"grad_norm": 0.3118368089199066, |
|
"learning_rate": 6.956094134176326e-05, |
|
"loss": 0.6786, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.6540741773771668, |
|
"grad_norm": 0.2799939811229706, |
|
"learning_rate": 6.938531787846857e-05, |
|
"loss": 0.6556, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.6574808137176729, |
|
"grad_norm": 0.31232938170433044, |
|
"learning_rate": 6.920969441517388e-05, |
|
"loss": 0.6527, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.660887450058179, |
|
"grad_norm": 0.37503504753112793, |
|
"learning_rate": 6.903407095187917e-05, |
|
"loss": 0.6765, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.664294086398685, |
|
"grad_norm": 0.2833762764930725, |
|
"learning_rate": 6.885844748858447e-05, |
|
"loss": 0.6558, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 0.6677007227391911, |
|
"grad_norm": 0.27285921573638916, |
|
"learning_rate": 6.868282402528978e-05, |
|
"loss": 0.6717, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.6711073590796972, |
|
"grad_norm": 0.29982614517211914, |
|
"learning_rate": 6.850720056199509e-05, |
|
"loss": 0.6736, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 0.6745139954202033, |
|
"grad_norm": 0.29421648383140564, |
|
"learning_rate": 6.833157709870039e-05, |
|
"loss": 0.6633, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.6779206317607094, |
|
"grad_norm": 0.30472108721733093, |
|
"learning_rate": 6.815595363540568e-05, |
|
"loss": 0.6418, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 0.6813272681012155, |
|
"grad_norm": 0.2899680733680725, |
|
"learning_rate": 6.7980330172111e-05, |
|
"loss": 0.681, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.6847339044417216, |
|
"grad_norm": 0.2816316485404968, |
|
"learning_rate": 6.780470670881631e-05, |
|
"loss": 0.6661, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 0.6881405407822275, |
|
"grad_norm": 0.2587604224681854, |
|
"learning_rate": 6.762908324552161e-05, |
|
"loss": 0.6474, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.6915471771227336, |
|
"grad_norm": 0.36257919669151306, |
|
"learning_rate": 6.745345978222692e-05, |
|
"loss": 0.6449, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 0.6949538134632397, |
|
"grad_norm": 0.2923133373260498, |
|
"learning_rate": 6.727783631893221e-05, |
|
"loss": 0.6805, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.6983604498037458, |
|
"grad_norm": 0.2798687517642975, |
|
"learning_rate": 6.710221285563752e-05, |
|
"loss": 0.6635, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 0.7017670861442519, |
|
"grad_norm": 0.2894415259361267, |
|
"learning_rate": 6.692658939234282e-05, |
|
"loss": 0.6748, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.705173722484758, |
|
"grad_norm": 0.31652697920799255, |
|
"learning_rate": 6.675096592904813e-05, |
|
"loss": 0.6434, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 0.708580358825264, |
|
"grad_norm": 0.28104209899902344, |
|
"learning_rate": 6.657534246575343e-05, |
|
"loss": 0.6693, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.7119869951657701, |
|
"grad_norm": 0.29532864689826965, |
|
"learning_rate": 6.639971900245873e-05, |
|
"loss": 0.665, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 0.7153936315062762, |
|
"grad_norm": 0.2964029312133789, |
|
"learning_rate": 6.622409553916403e-05, |
|
"loss": 0.6742, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.7188002678467823, |
|
"grad_norm": 0.3458377420902252, |
|
"learning_rate": 6.604847207586934e-05, |
|
"loss": 0.6836, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 0.7222069041872884, |
|
"grad_norm": 0.30560600757598877, |
|
"learning_rate": 6.587284861257464e-05, |
|
"loss": 0.6883, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.7256135405277945, |
|
"grad_norm": 0.266996294260025, |
|
"learning_rate": 6.569722514927995e-05, |
|
"loss": 0.6674, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 0.7290201768683005, |
|
"grad_norm": 0.29581552743911743, |
|
"learning_rate": 6.552160168598524e-05, |
|
"loss": 0.6776, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.7324268132088065, |
|
"grad_norm": 0.26997387409210205, |
|
"learning_rate": 6.534597822269055e-05, |
|
"loss": 0.6646, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 0.7358334495493126, |
|
"grad_norm": 0.3831222355365753, |
|
"learning_rate": 6.517035475939585e-05, |
|
"loss": 0.6626, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.7392400858898187, |
|
"grad_norm": 0.3023781180381775, |
|
"learning_rate": 6.499473129610116e-05, |
|
"loss": 0.672, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.7426467222303248, |
|
"grad_norm": 0.3060709536075592, |
|
"learning_rate": 6.481910783280646e-05, |
|
"loss": 0.68, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.7460533585708309, |
|
"grad_norm": 0.26548224687576294, |
|
"learning_rate": 6.464348436951177e-05, |
|
"loss": 0.6585, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 0.749459994911337, |
|
"grad_norm": 0.3094407916069031, |
|
"learning_rate": 6.446786090621707e-05, |
|
"loss": 0.6809, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.752866631251843, |
|
"grad_norm": 0.2568422853946686, |
|
"learning_rate": 6.429223744292238e-05, |
|
"loss": 0.6889, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 0.7562732675923491, |
|
"grad_norm": 0.2860637605190277, |
|
"learning_rate": 6.411661397962769e-05, |
|
"loss": 0.6782, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.7596799039328552, |
|
"grad_norm": 0.3119567036628723, |
|
"learning_rate": 6.394099051633299e-05, |
|
"loss": 0.6481, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 0.7630865402733613, |
|
"grad_norm": 0.3033282458782196, |
|
"learning_rate": 6.37653670530383e-05, |
|
"loss": 0.6735, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.7664931766138674, |
|
"grad_norm": 0.303755521774292, |
|
"learning_rate": 6.358974358974359e-05, |
|
"loss": 0.6736, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 0.7698998129543734, |
|
"grad_norm": 0.3597272038459778, |
|
"learning_rate": 6.34141201264489e-05, |
|
"loss": 0.6749, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.7733064492948795, |
|
"grad_norm": 0.26582157611846924, |
|
"learning_rate": 6.32384966631542e-05, |
|
"loss": 0.6646, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 0.7767130856353855, |
|
"grad_norm": 0.2802767753601074, |
|
"learning_rate": 6.30628731998595e-05, |
|
"loss": 0.6675, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.7801197219758916, |
|
"grad_norm": 0.31844425201416016, |
|
"learning_rate": 6.288724973656481e-05, |
|
"loss": 0.6637, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 0.7835263583163977, |
|
"grad_norm": 0.31489238142967224, |
|
"learning_rate": 6.27116262732701e-05, |
|
"loss": 0.6627, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.7869329946569038, |
|
"grad_norm": 0.3433346152305603, |
|
"learning_rate": 6.253600280997541e-05, |
|
"loss": 0.6796, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 0.7903396309974099, |
|
"grad_norm": 0.3126988708972931, |
|
"learning_rate": 6.236037934668072e-05, |
|
"loss": 0.6768, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.793746267337916, |
|
"grad_norm": 0.2817250192165375, |
|
"learning_rate": 6.218475588338602e-05, |
|
"loss": 0.6521, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 0.7971529036784221, |
|
"grad_norm": 0.30584487318992615, |
|
"learning_rate": 6.200913242009133e-05, |
|
"loss": 0.6357, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.8005595400189282, |
|
"grad_norm": 0.3048030138015747, |
|
"learning_rate": 6.183350895679663e-05, |
|
"loss": 0.6688, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 0.8039661763594342, |
|
"grad_norm": 0.30793818831443787, |
|
"learning_rate": 6.165788549350194e-05, |
|
"loss": 0.6717, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.8073728126999403, |
|
"grad_norm": 0.2916577458381653, |
|
"learning_rate": 6.148226203020724e-05, |
|
"loss": 0.6432, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 0.8107794490404464, |
|
"grad_norm": 0.3017030656337738, |
|
"learning_rate": 6.130663856691255e-05, |
|
"loss": 0.6237, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.8141860853809524, |
|
"grad_norm": 0.3230801820755005, |
|
"learning_rate": 6.113101510361785e-05, |
|
"loss": 0.6776, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 0.8175927217214585, |
|
"grad_norm": 0.3148077130317688, |
|
"learning_rate": 6.095539164032315e-05, |
|
"loss": 0.6602, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.8209993580619646, |
|
"grad_norm": 0.29618728160858154, |
|
"learning_rate": 6.077976817702845e-05, |
|
"loss": 0.6399, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 0.8244059944024706, |
|
"grad_norm": 0.2636460065841675, |
|
"learning_rate": 6.060414471373376e-05, |
|
"loss": 0.6703, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.8278126307429767, |
|
"grad_norm": 0.29034677147865295, |
|
"learning_rate": 6.0428521250439064e-05, |
|
"loss": 0.6406, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 0.8312192670834828, |
|
"grad_norm": 0.29782792925834656, |
|
"learning_rate": 6.025289778714437e-05, |
|
"loss": 0.6721, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.8346259034239889, |
|
"grad_norm": 0.29536172747612, |
|
"learning_rate": 6.007727432384966e-05, |
|
"loss": 0.6538, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 0.838032539764495, |
|
"grad_norm": 0.31480076909065247, |
|
"learning_rate": 5.990165086055497e-05, |
|
"loss": 0.6807, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.8414391761050011, |
|
"grad_norm": 0.28027939796447754, |
|
"learning_rate": 5.972602739726027e-05, |
|
"loss": 0.6602, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 0.8448458124455072, |
|
"grad_norm": 0.3105228543281555, |
|
"learning_rate": 5.9550403933965585e-05, |
|
"loss": 0.6438, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.8482524487860132, |
|
"grad_norm": 0.3044467568397522, |
|
"learning_rate": 5.937478047067089e-05, |
|
"loss": 0.6474, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 0.8516590851265193, |
|
"grad_norm": 0.2907201945781708, |
|
"learning_rate": 5.9199157007376196e-05, |
|
"loss": 0.6732, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.8550657214670253, |
|
"grad_norm": 0.3138374984264374, |
|
"learning_rate": 5.902353354408149e-05, |
|
"loss": 0.6629, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 0.8584723578075314, |
|
"grad_norm": 0.2916734516620636, |
|
"learning_rate": 5.8847910080786794e-05, |
|
"loss": 0.6588, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.8618789941480375, |
|
"grad_norm": 0.2865552604198456, |
|
"learning_rate": 5.86722866174921e-05, |
|
"loss": 0.6541, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 0.8652856304885436, |
|
"grad_norm": 0.31498757004737854, |
|
"learning_rate": 5.8496663154197405e-05, |
|
"loss": 0.6685, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.8686922668290497, |
|
"grad_norm": 0.30698952078819275, |
|
"learning_rate": 5.832103969090271e-05, |
|
"loss": 0.66, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 0.8720989031695557, |
|
"grad_norm": 0.30786094069480896, |
|
"learning_rate": 5.814541622760801e-05, |
|
"loss": 0.6641, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.8755055395100618, |
|
"grad_norm": 0.30545827746391296, |
|
"learning_rate": 5.7969792764313316e-05, |
|
"loss": 0.6713, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 0.8789121758505679, |
|
"grad_norm": 0.2694856524467468, |
|
"learning_rate": 5.779416930101862e-05, |
|
"loss": 0.6711, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.882318812191074, |
|
"grad_norm": 0.3137877881526947, |
|
"learning_rate": 5.761854583772393e-05, |
|
"loss": 0.6032, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 0.8857254485315801, |
|
"grad_norm": 0.2708178460597992, |
|
"learning_rate": 5.744292237442923e-05, |
|
"loss": 0.6799, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.8891320848720862, |
|
"grad_norm": 0.3112589716911316, |
|
"learning_rate": 5.7267298911134525e-05, |
|
"loss": 0.6839, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 0.8925387212125923, |
|
"grad_norm": 0.3140416741371155, |
|
"learning_rate": 5.709167544783983e-05, |
|
"loss": 0.6764, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.8959453575530982, |
|
"grad_norm": 0.26221442222595215, |
|
"learning_rate": 5.6916051984545136e-05, |
|
"loss": 0.6619, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 0.8993519938936043, |
|
"grad_norm": 0.2906458079814911, |
|
"learning_rate": 5.674042852125044e-05, |
|
"loss": 0.6581, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.9027586302341104, |
|
"grad_norm": 0.3028678596019745, |
|
"learning_rate": 5.656480505795575e-05, |
|
"loss": 0.6633, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 0.9061652665746165, |
|
"grad_norm": 0.31104981899261475, |
|
"learning_rate": 5.6389181594661046e-05, |
|
"loss": 0.6522, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.9095719029151226, |
|
"grad_norm": 0.2633511424064636, |
|
"learning_rate": 5.621355813136635e-05, |
|
"loss": 0.641, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 0.9129785392556287, |
|
"grad_norm": 0.28382760286331177, |
|
"learning_rate": 5.603793466807166e-05, |
|
"loss": 0.6704, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.9163851755961347, |
|
"grad_norm": 0.26792773604393005, |
|
"learning_rate": 5.586231120477696e-05, |
|
"loss": 0.6321, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 0.9197918119366408, |
|
"grad_norm": 0.3073452413082123, |
|
"learning_rate": 5.568668774148227e-05, |
|
"loss": 0.6548, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.9231984482771469, |
|
"grad_norm": 0.3464025855064392, |
|
"learning_rate": 5.551106427818756e-05, |
|
"loss": 0.6669, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 0.926605084617653, |
|
"grad_norm": 0.3169041574001312, |
|
"learning_rate": 5.5335440814892866e-05, |
|
"loss": 0.6739, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.9300117209581591, |
|
"grad_norm": 0.2641547620296478, |
|
"learning_rate": 5.515981735159817e-05, |
|
"loss": 0.6561, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 0.9334183572986652, |
|
"grad_norm": 0.2953278720378876, |
|
"learning_rate": 5.4984193888303484e-05, |
|
"loss": 0.661, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.9368249936391712, |
|
"grad_norm": 0.3245227336883545, |
|
"learning_rate": 5.480857042500879e-05, |
|
"loss": 0.6709, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 0.9402316299796772, |
|
"grad_norm": 0.2790832221508026, |
|
"learning_rate": 5.463294696171408e-05, |
|
"loss": 0.6625, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.9436382663201833, |
|
"grad_norm": 0.28198742866516113, |
|
"learning_rate": 5.445732349841939e-05, |
|
"loss": 0.6696, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 0.9470449026606894, |
|
"grad_norm": 0.27755188941955566, |
|
"learning_rate": 5.4281700035124694e-05, |
|
"loss": 0.6544, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.9504515390011955, |
|
"grad_norm": 0.28444674611091614, |
|
"learning_rate": 5.410607657183e-05, |
|
"loss": 0.6571, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 0.9538581753417016, |
|
"grad_norm": 0.3238435983657837, |
|
"learning_rate": 5.3930453108535305e-05, |
|
"loss": 0.6391, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.9572648116822077, |
|
"grad_norm": 0.30982422828674316, |
|
"learning_rate": 5.375482964524061e-05, |
|
"loss": 0.6604, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 0.9606714480227138, |
|
"grad_norm": 0.30013763904571533, |
|
"learning_rate": 5.357920618194591e-05, |
|
"loss": 0.6766, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.9640780843632198, |
|
"grad_norm": 0.33146125078201294, |
|
"learning_rate": 5.3403582718651215e-05, |
|
"loss": 0.6404, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 0.9674847207037259, |
|
"grad_norm": 0.23431046307086945, |
|
"learning_rate": 5.322795925535652e-05, |
|
"loss": 0.6412, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.970891357044232, |
|
"grad_norm": 0.29649725556373596, |
|
"learning_rate": 5.3052335792061826e-05, |
|
"loss": 0.6462, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 0.9742979933847381, |
|
"grad_norm": 0.31719136238098145, |
|
"learning_rate": 5.287671232876713e-05, |
|
"loss": 0.6877, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.9777046297252441, |
|
"grad_norm": 0.25989624857902527, |
|
"learning_rate": 5.2701088865472424e-05, |
|
"loss": 0.6416, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 0.9811112660657502, |
|
"grad_norm": 0.32486599683761597, |
|
"learning_rate": 5.252546540217773e-05, |
|
"loss": 0.6307, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.9845179024062563, |
|
"grad_norm": 0.31052151322364807, |
|
"learning_rate": 5.2349841938883035e-05, |
|
"loss": 0.6686, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 0.9879245387467623, |
|
"grad_norm": 0.2722817659378052, |
|
"learning_rate": 5.217421847558834e-05, |
|
"loss": 0.6554, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.9913311750872684, |
|
"grad_norm": 0.29433372616767883, |
|
"learning_rate": 5.199859501229365e-05, |
|
"loss": 0.6491, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 0.9947378114277745, |
|
"grad_norm": 0.2956300973892212, |
|
"learning_rate": 5.1822971548998945e-05, |
|
"loss": 0.6433, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.9981444477682806, |
|
"grad_norm": 0.29263532161712646, |
|
"learning_rate": 5.164734808570425e-05, |
|
"loss": 0.6434, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 1.0014989199898228, |
|
"grad_norm": 0.2794649600982666, |
|
"learning_rate": 5.147172462240956e-05, |
|
"loss": 0.6228, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 1.0049055563303289, |
|
"grad_norm": 0.2971467673778534, |
|
"learning_rate": 5.129610115911486e-05, |
|
"loss": 0.5837, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 1.008312192670835, |
|
"grad_norm": 0.2719724178314209, |
|
"learning_rate": 5.112047769582017e-05, |
|
"loss": 0.5884, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 1.0117188290113408, |
|
"grad_norm": 0.3151598870754242, |
|
"learning_rate": 5.094485423252546e-05, |
|
"loss": 0.5755, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 1.015125465351847, |
|
"grad_norm": 0.2950424551963806, |
|
"learning_rate": 5.0769230769230766e-05, |
|
"loss": 0.5849, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 1.018532101692353, |
|
"grad_norm": 0.2683977782726288, |
|
"learning_rate": 5.059360730593608e-05, |
|
"loss": 0.5864, |
|
"step": 7475 |
|
}, |
|
{ |
|
"epoch": 1.021938738032859, |
|
"grad_norm": 0.32205426692962646, |
|
"learning_rate": 5.0417983842641384e-05, |
|
"loss": 0.5712, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.0253453743733651, |
|
"grad_norm": 0.3280932903289795, |
|
"learning_rate": 5.024236037934669e-05, |
|
"loss": 0.5857, |
|
"step": 7525 |
|
}, |
|
{ |
|
"epoch": 1.0287520107138712, |
|
"grad_norm": 0.2890978753566742, |
|
"learning_rate": 5.006673691605198e-05, |
|
"loss": 0.5899, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 1.0321586470543773, |
|
"grad_norm": 0.2932690680027008, |
|
"learning_rate": 4.9891113452757294e-05, |
|
"loss": 0.5691, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 1.0355652833948834, |
|
"grad_norm": 0.3369404673576355, |
|
"learning_rate": 4.971548998946259e-05, |
|
"loss": 0.5946, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 1.0389719197353895, |
|
"grad_norm": 0.29351669549942017, |
|
"learning_rate": 4.95398665261679e-05, |
|
"loss": 0.5962, |
|
"step": 7625 |
|
}, |
|
{ |
|
"epoch": 1.0423785560758956, |
|
"grad_norm": 0.28703489899635315, |
|
"learning_rate": 4.93642430628732e-05, |
|
"loss": 0.6125, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 1.0457851924164017, |
|
"grad_norm": 0.30198702216148376, |
|
"learning_rate": 4.91886195995785e-05, |
|
"loss": 0.5828, |
|
"step": 7675 |
|
}, |
|
{ |
|
"epoch": 1.0491918287569078, |
|
"grad_norm": 0.2725096046924591, |
|
"learning_rate": 4.901299613628381e-05, |
|
"loss": 0.5757, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 1.0525984650974138, |
|
"grad_norm": 0.30557242035865784, |
|
"learning_rate": 4.8837372672989114e-05, |
|
"loss": 0.6002, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 1.05600510143792, |
|
"grad_norm": 0.30513855814933777, |
|
"learning_rate": 4.866174920969442e-05, |
|
"loss": 0.5882, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 1.059411737778426, |
|
"grad_norm": 0.28717830777168274, |
|
"learning_rate": 4.848612574639972e-05, |
|
"loss": 0.5846, |
|
"step": 7775 |
|
}, |
|
{ |
|
"epoch": 1.062818374118932, |
|
"grad_norm": 0.3230528235435486, |
|
"learning_rate": 4.8310502283105025e-05, |
|
"loss": 0.5949, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.0662250104594382, |
|
"grad_norm": 0.29600274562835693, |
|
"learning_rate": 4.813487881981033e-05, |
|
"loss": 0.5691, |
|
"step": 7825 |
|
}, |
|
{ |
|
"epoch": 1.0696316467999443, |
|
"grad_norm": 0.27012398838996887, |
|
"learning_rate": 4.795925535651563e-05, |
|
"loss": 0.6006, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 1.0730382831404504, |
|
"grad_norm": 0.36482536792755127, |
|
"learning_rate": 4.7783631893220935e-05, |
|
"loss": 0.5872, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 1.0764449194809564, |
|
"grad_norm": 0.3450656235218048, |
|
"learning_rate": 4.760800842992624e-05, |
|
"loss": 0.5777, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 1.0798515558214625, |
|
"grad_norm": 0.30672258138656616, |
|
"learning_rate": 4.7432384966631546e-05, |
|
"loss": 0.582, |
|
"step": 7925 |
|
}, |
|
{ |
|
"epoch": 1.0832581921619686, |
|
"grad_norm": 0.3871435523033142, |
|
"learning_rate": 4.725676150333685e-05, |
|
"loss": 0.5877, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 1.0866648285024747, |
|
"grad_norm": 0.3294704258441925, |
|
"learning_rate": 4.708113804004215e-05, |
|
"loss": 0.5606, |
|
"step": 7975 |
|
}, |
|
{ |
|
"epoch": 1.0900714648429806, |
|
"grad_norm": 0.3116719722747803, |
|
"learning_rate": 4.6905514576747456e-05, |
|
"loss": 0.5552, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.0934781011834867, |
|
"grad_norm": 0.35774242877960205, |
|
"learning_rate": 4.672989111345276e-05, |
|
"loss": 0.61, |
|
"step": 8025 |
|
}, |
|
{ |
|
"epoch": 1.0968847375239927, |
|
"grad_norm": 0.3226807117462158, |
|
"learning_rate": 4.655426765015806e-05, |
|
"loss": 0.6, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 1.1002913738644988, |
|
"grad_norm": 0.2967264652252197, |
|
"learning_rate": 4.6378644186863366e-05, |
|
"loss": 0.5923, |
|
"step": 8075 |
|
}, |
|
{ |
|
"epoch": 1.103698010205005, |
|
"grad_norm": 0.27228492498397827, |
|
"learning_rate": 4.6203020723568665e-05, |
|
"loss": 0.5761, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.107104646545511, |
|
"grad_norm": 0.33212724328041077, |
|
"learning_rate": 4.602739726027398e-05, |
|
"loss": 0.6051, |
|
"step": 8125 |
|
}, |
|
{ |
|
"epoch": 1.110511282886017, |
|
"grad_norm": 0.28719720244407654, |
|
"learning_rate": 4.585177379697928e-05, |
|
"loss": 0.6039, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 1.1139179192265232, |
|
"grad_norm": 0.32448840141296387, |
|
"learning_rate": 4.567615033368458e-05, |
|
"loss": 0.5913, |
|
"step": 8175 |
|
}, |
|
{ |
|
"epoch": 1.1173245555670293, |
|
"grad_norm": 0.2699980139732361, |
|
"learning_rate": 4.550052687038989e-05, |
|
"loss": 0.606, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 1.1207311919075353, |
|
"grad_norm": 0.285967081785202, |
|
"learning_rate": 4.5324903407095193e-05, |
|
"loss": 0.5831, |
|
"step": 8225 |
|
}, |
|
{ |
|
"epoch": 1.1241378282480414, |
|
"grad_norm": 0.31957241892814636, |
|
"learning_rate": 4.514927994380049e-05, |
|
"loss": 0.6087, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 1.1275444645885475, |
|
"grad_norm": 0.3458569049835205, |
|
"learning_rate": 4.49736564805058e-05, |
|
"loss": 0.5916, |
|
"step": 8275 |
|
}, |
|
{ |
|
"epoch": 1.1309511009290536, |
|
"grad_norm": 0.3341424763202667, |
|
"learning_rate": 4.47980330172111e-05, |
|
"loss": 0.5924, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 1.1343577372695597, |
|
"grad_norm": 0.2719230055809021, |
|
"learning_rate": 4.46224095539164e-05, |
|
"loss": 0.5583, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 1.1377643736100658, |
|
"grad_norm": 0.3184913098812103, |
|
"learning_rate": 4.444678609062171e-05, |
|
"loss": 0.576, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 1.1411710099505719, |
|
"grad_norm": 0.31068292260169983, |
|
"learning_rate": 4.4271162627327014e-05, |
|
"loss": 0.6214, |
|
"step": 8375 |
|
}, |
|
{ |
|
"epoch": 1.144577646291078, |
|
"grad_norm": 0.3055650293827057, |
|
"learning_rate": 4.409553916403232e-05, |
|
"loss": 0.5867, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 1.147984282631584, |
|
"grad_norm": 0.3341415524482727, |
|
"learning_rate": 4.391991570073762e-05, |
|
"loss": 0.573, |
|
"step": 8425 |
|
}, |
|
{ |
|
"epoch": 1.1513909189720901, |
|
"grad_norm": 0.30130231380462646, |
|
"learning_rate": 4.3744292237442924e-05, |
|
"loss": 0.5723, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 1.1547975553125962, |
|
"grad_norm": 0.3360777199268341, |
|
"learning_rate": 4.356866877414823e-05, |
|
"loss": 0.5857, |
|
"step": 8475 |
|
}, |
|
{ |
|
"epoch": 1.1582041916531023, |
|
"grad_norm": 0.28688421845436096, |
|
"learning_rate": 4.339304531085353e-05, |
|
"loss": 0.5814, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.1616108279936084, |
|
"grad_norm": 0.3305681049823761, |
|
"learning_rate": 4.3217421847558834e-05, |
|
"loss": 0.5829, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 1.1650174643341145, |
|
"grad_norm": 0.28347936272621155, |
|
"learning_rate": 4.304179838426414e-05, |
|
"loss": 0.5644, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 1.1684241006746205, |
|
"grad_norm": 0.3450355529785156, |
|
"learning_rate": 4.2866174920969445e-05, |
|
"loss": 0.6092, |
|
"step": 8575 |
|
}, |
|
{ |
|
"epoch": 1.1718307370151266, |
|
"grad_norm": 0.3150685727596283, |
|
"learning_rate": 4.269055145767475e-05, |
|
"loss": 0.5804, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 1.1752373733556327, |
|
"grad_norm": 0.2739415764808655, |
|
"learning_rate": 4.251492799438005e-05, |
|
"loss": 0.5934, |
|
"step": 8625 |
|
}, |
|
{ |
|
"epoch": 1.1786440096961388, |
|
"grad_norm": 0.28803735971450806, |
|
"learning_rate": 4.2339304531085356e-05, |
|
"loss": 0.5766, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 1.1820506460366447, |
|
"grad_norm": 0.3043755888938904, |
|
"learning_rate": 4.216368106779066e-05, |
|
"loss": 0.5652, |
|
"step": 8675 |
|
}, |
|
{ |
|
"epoch": 1.1854572823771508, |
|
"grad_norm": 0.3253932297229767, |
|
"learning_rate": 4.198805760449596e-05, |
|
"loss": 0.587, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.1888639187176568, |
|
"grad_norm": 0.332832932472229, |
|
"learning_rate": 4.1812434141201266e-05, |
|
"loss": 0.5877, |
|
"step": 8725 |
|
}, |
|
{ |
|
"epoch": 1.192270555058163, |
|
"grad_norm": 0.30993393063545227, |
|
"learning_rate": 4.163681067790657e-05, |
|
"loss": 0.5862, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 1.195677191398669, |
|
"grad_norm": 0.3366856276988983, |
|
"learning_rate": 4.146118721461188e-05, |
|
"loss": 0.583, |
|
"step": 8775 |
|
}, |
|
{ |
|
"epoch": 1.199083827739175, |
|
"grad_norm": 0.3211153447628021, |
|
"learning_rate": 4.128556375131718e-05, |
|
"loss": 0.578, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 1.2024904640796812, |
|
"grad_norm": 0.29055503010749817, |
|
"learning_rate": 4.110994028802248e-05, |
|
"loss": 0.6299, |
|
"step": 8825 |
|
}, |
|
{ |
|
"epoch": 1.2058971004201873, |
|
"grad_norm": 0.32854345440864563, |
|
"learning_rate": 4.093431682472779e-05, |
|
"loss": 0.6044, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 1.2093037367606934, |
|
"grad_norm": 0.3426538407802582, |
|
"learning_rate": 4.0758693361433086e-05, |
|
"loss": 0.6021, |
|
"step": 8875 |
|
}, |
|
{ |
|
"epoch": 1.2127103731011994, |
|
"grad_norm": 0.32610806822776794, |
|
"learning_rate": 4.058306989813839e-05, |
|
"loss": 0.5936, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 1.2161170094417055, |
|
"grad_norm": 0.3073614537715912, |
|
"learning_rate": 4.04074464348437e-05, |
|
"loss": 0.5826, |
|
"step": 8925 |
|
}, |
|
{ |
|
"epoch": 1.2195236457822116, |
|
"grad_norm": 0.317548930644989, |
|
"learning_rate": 4.0231822971548996e-05, |
|
"loss": 0.5917, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 1.2229302821227177, |
|
"grad_norm": 0.3202793300151825, |
|
"learning_rate": 4.00561995082543e-05, |
|
"loss": 0.5757, |
|
"step": 8975 |
|
}, |
|
{ |
|
"epoch": 1.2263369184632238, |
|
"grad_norm": 0.2945701479911804, |
|
"learning_rate": 3.9880576044959614e-05, |
|
"loss": 0.5901, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.2297435548037299, |
|
"grad_norm": 0.3056606650352478, |
|
"learning_rate": 3.970495258166491e-05, |
|
"loss": 0.5979, |
|
"step": 9025 |
|
}, |
|
{ |
|
"epoch": 1.233150191144236, |
|
"grad_norm": 0.3158716559410095, |
|
"learning_rate": 3.952932911837022e-05, |
|
"loss": 0.5945, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 1.236556827484742, |
|
"grad_norm": 0.3323630094528198, |
|
"learning_rate": 3.935370565507552e-05, |
|
"loss": 0.6056, |
|
"step": 9075 |
|
}, |
|
{ |
|
"epoch": 1.2399634638252481, |
|
"grad_norm": 0.31543681025505066, |
|
"learning_rate": 3.9178082191780823e-05, |
|
"loss": 0.5642, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 1.2433701001657542, |
|
"grad_norm": 0.33846592903137207, |
|
"learning_rate": 3.900245872848613e-05, |
|
"loss": 0.585, |
|
"step": 9125 |
|
}, |
|
{ |
|
"epoch": 1.2467767365062603, |
|
"grad_norm": 0.2971675992012024, |
|
"learning_rate": 3.882683526519143e-05, |
|
"loss": 0.5953, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 1.2501833728467662, |
|
"grad_norm": 0.3218803405761719, |
|
"learning_rate": 3.8651211801896734e-05, |
|
"loss": 0.5914, |
|
"step": 9175 |
|
}, |
|
{ |
|
"epoch": 1.2535900091872723, |
|
"grad_norm": 0.3213994801044464, |
|
"learning_rate": 3.847558833860204e-05, |
|
"loss": 0.5822, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.2569966455277783, |
|
"grad_norm": 0.33393600583076477, |
|
"learning_rate": 3.8299964875307345e-05, |
|
"loss": 0.5937, |
|
"step": 9225 |
|
}, |
|
{ |
|
"epoch": 1.2604032818682844, |
|
"grad_norm": 0.27980881929397583, |
|
"learning_rate": 3.812434141201265e-05, |
|
"loss": 0.6056, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 1.2638099182087905, |
|
"grad_norm": 0.3020496070384979, |
|
"learning_rate": 3.794871794871795e-05, |
|
"loss": 0.6062, |
|
"step": 9275 |
|
}, |
|
{ |
|
"epoch": 1.2672165545492966, |
|
"grad_norm": 0.3256005048751831, |
|
"learning_rate": 3.7773094485423255e-05, |
|
"loss": 0.6021, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.2706231908898027, |
|
"grad_norm": 0.30306681990623474, |
|
"learning_rate": 3.759747102212856e-05, |
|
"loss": 0.608, |
|
"step": 9325 |
|
}, |
|
{ |
|
"epoch": 1.2740298272303088, |
|
"grad_norm": 0.2977856695652008, |
|
"learning_rate": 3.742184755883386e-05, |
|
"loss": 0.5785, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 1.2774364635708149, |
|
"grad_norm": 0.3653808832168579, |
|
"learning_rate": 3.7246224095539165e-05, |
|
"loss": 0.5907, |
|
"step": 9375 |
|
}, |
|
{ |
|
"epoch": 1.280843099911321, |
|
"grad_norm": 0.3345490097999573, |
|
"learning_rate": 3.707060063224447e-05, |
|
"loss": 0.587, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.284249736251827, |
|
"grad_norm": 0.2982236444950104, |
|
"learning_rate": 3.6894977168949777e-05, |
|
"loss": 0.5547, |
|
"step": 9425 |
|
}, |
|
{ |
|
"epoch": 1.2876563725923331, |
|
"grad_norm": 0.3151262402534485, |
|
"learning_rate": 3.671935370565508e-05, |
|
"loss": 0.5801, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 1.2910630089328392, |
|
"grad_norm": 0.3142940402030945, |
|
"learning_rate": 3.654373024236038e-05, |
|
"loss": 0.574, |
|
"step": 9475 |
|
}, |
|
{ |
|
"epoch": 1.2944696452733453, |
|
"grad_norm": 0.3357933461666107, |
|
"learning_rate": 3.636810677906569e-05, |
|
"loss": 0.598, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.2978762816138514, |
|
"grad_norm": 0.3080194294452667, |
|
"learning_rate": 3.6192483315770986e-05, |
|
"loss": 0.5895, |
|
"step": 9525 |
|
}, |
|
{ |
|
"epoch": 1.3012829179543575, |
|
"grad_norm": 0.29399389028549194, |
|
"learning_rate": 3.601685985247629e-05, |
|
"loss": 0.5606, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 1.3046895542948636, |
|
"grad_norm": 0.3045051693916321, |
|
"learning_rate": 3.58412363891816e-05, |
|
"loss": 0.5781, |
|
"step": 9575 |
|
}, |
|
{ |
|
"epoch": 1.3080961906353696, |
|
"grad_norm": 0.3037458062171936, |
|
"learning_rate": 3.5665612925886896e-05, |
|
"loss": 0.5693, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.3115028269758757, |
|
"grad_norm": 0.3291562497615814, |
|
"learning_rate": 3.54899894625922e-05, |
|
"loss": 0.5733, |
|
"step": 9625 |
|
}, |
|
{ |
|
"epoch": 1.3149094633163818, |
|
"grad_norm": 0.33404019474983215, |
|
"learning_rate": 3.531436599929751e-05, |
|
"loss": 0.5834, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 1.318316099656888, |
|
"grad_norm": 0.3017292320728302, |
|
"learning_rate": 3.513874253600281e-05, |
|
"loss": 0.5724, |
|
"step": 9675 |
|
}, |
|
{ |
|
"epoch": 1.321722735997394, |
|
"grad_norm": 0.31595638394355774, |
|
"learning_rate": 3.496311907270812e-05, |
|
"loss": 0.5901, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 1.3251293723379, |
|
"grad_norm": 0.29873600602149963, |
|
"learning_rate": 3.478749560941342e-05, |
|
"loss": 0.5758, |
|
"step": 9725 |
|
}, |
|
{ |
|
"epoch": 1.3285360086784062, |
|
"grad_norm": 0.3320747911930084, |
|
"learning_rate": 3.461187214611872e-05, |
|
"loss": 0.5882, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 1.3319426450189122, |
|
"grad_norm": 0.2872166037559509, |
|
"learning_rate": 3.443624868282403e-05, |
|
"loss": 0.5942, |
|
"step": 9775 |
|
}, |
|
{ |
|
"epoch": 1.3353492813594183, |
|
"grad_norm": 0.2684285342693329, |
|
"learning_rate": 3.426062521952933e-05, |
|
"loss": 0.6047, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 1.3387559176999244, |
|
"grad_norm": 0.3230988383293152, |
|
"learning_rate": 3.408500175623463e-05, |
|
"loss": 0.5786, |
|
"step": 9825 |
|
}, |
|
{ |
|
"epoch": 1.3421625540404305, |
|
"grad_norm": 0.33714237809181213, |
|
"learning_rate": 3.390937829293994e-05, |
|
"loss": 0.5895, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 1.3455691903809366, |
|
"grad_norm": 0.29626786708831787, |
|
"learning_rate": 3.3733754829645244e-05, |
|
"loss": 0.5808, |
|
"step": 9875 |
|
}, |
|
{ |
|
"epoch": 1.3489758267214427, |
|
"grad_norm": 0.3248324990272522, |
|
"learning_rate": 3.355813136635055e-05, |
|
"loss": 0.5642, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.3523824630619485, |
|
"grad_norm": 0.32865700125694275, |
|
"learning_rate": 3.338250790305585e-05, |
|
"loss": 0.5807, |
|
"step": 9925 |
|
}, |
|
{ |
|
"epoch": 1.3557890994024546, |
|
"grad_norm": 0.280898779630661, |
|
"learning_rate": 3.3206884439761154e-05, |
|
"loss": 0.6035, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 1.3591957357429607, |
|
"grad_norm": 0.3238600492477417, |
|
"learning_rate": 3.303126097646645e-05, |
|
"loss": 0.5941, |
|
"step": 9975 |
|
}, |
|
{ |
|
"epoch": 1.3626023720834668, |
|
"grad_norm": 0.3435409367084503, |
|
"learning_rate": 3.285563751317176e-05, |
|
"loss": 0.5831, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.3660090084239729, |
|
"grad_norm": 0.3290906250476837, |
|
"learning_rate": 3.2680014049877065e-05, |
|
"loss": 0.6023, |
|
"step": 10025 |
|
}, |
|
{ |
|
"epoch": 1.369415644764479, |
|
"grad_norm": 0.32303091883659363, |
|
"learning_rate": 3.250439058658237e-05, |
|
"loss": 0.5957, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 1.372822281104985, |
|
"grad_norm": 0.30665114521980286, |
|
"learning_rate": 3.2328767123287676e-05, |
|
"loss": 0.586, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 1.3762289174454911, |
|
"grad_norm": 0.32518666982650757, |
|
"learning_rate": 3.215314365999298e-05, |
|
"loss": 0.5829, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 1.3796355537859972, |
|
"grad_norm": 0.3334820568561554, |
|
"learning_rate": 3.197752019669828e-05, |
|
"loss": 0.5939, |
|
"step": 10125 |
|
}, |
|
{ |
|
"epoch": 1.3830421901265033, |
|
"grad_norm": 0.30552002787590027, |
|
"learning_rate": 3.1801896733403586e-05, |
|
"loss": 0.5826, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 1.3864488264670094, |
|
"grad_norm": 0.35440927743911743, |
|
"learning_rate": 3.1626273270108885e-05, |
|
"loss": 0.607, |
|
"step": 10175 |
|
}, |
|
{ |
|
"epoch": 1.3898554628075155, |
|
"grad_norm": 0.31508344411849976, |
|
"learning_rate": 3.145064980681419e-05, |
|
"loss": 0.564, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 1.3932620991480216, |
|
"grad_norm": 0.2713547348976135, |
|
"learning_rate": 3.1275026343519496e-05, |
|
"loss": 0.5798, |
|
"step": 10225 |
|
}, |
|
{ |
|
"epoch": 1.3966687354885277, |
|
"grad_norm": 0.30810999870300293, |
|
"learning_rate": 3.1099402880224795e-05, |
|
"loss": 0.5887, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 1.4000753718290337, |
|
"grad_norm": 0.29226067662239075, |
|
"learning_rate": 3.092377941693011e-05, |
|
"loss": 0.5992, |
|
"step": 10275 |
|
}, |
|
{ |
|
"epoch": 1.4034820081695398, |
|
"grad_norm": 0.35241231322288513, |
|
"learning_rate": 3.0748155953635406e-05, |
|
"loss": 0.5965, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 1.406888644510046, |
|
"grad_norm": 0.321756511926651, |
|
"learning_rate": 3.057253249034071e-05, |
|
"loss": 0.6126, |
|
"step": 10325 |
|
}, |
|
{ |
|
"epoch": 1.410295280850552, |
|
"grad_norm": 0.28978297114372253, |
|
"learning_rate": 3.0396909027046018e-05, |
|
"loss": 0.5835, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 1.413701917191058, |
|
"grad_norm": 0.3767097592353821, |
|
"learning_rate": 3.0221285563751317e-05, |
|
"loss": 0.6188, |
|
"step": 10375 |
|
}, |
|
{ |
|
"epoch": 1.417108553531564, |
|
"grad_norm": 0.2829996347427368, |
|
"learning_rate": 3.0045662100456622e-05, |
|
"loss": 0.6087, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 1.42051518987207, |
|
"grad_norm": 0.3308793008327484, |
|
"learning_rate": 2.9870038637161928e-05, |
|
"loss": 0.5818, |
|
"step": 10425 |
|
}, |
|
{ |
|
"epoch": 1.4239218262125761, |
|
"grad_norm": 0.3255029618740082, |
|
"learning_rate": 2.969441517386723e-05, |
|
"loss": 0.5788, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 1.4273284625530822, |
|
"grad_norm": 0.328983873128891, |
|
"learning_rate": 2.9518791710572536e-05, |
|
"loss": 0.6092, |
|
"step": 10475 |
|
}, |
|
{ |
|
"epoch": 1.4307350988935883, |
|
"grad_norm": 0.3405100703239441, |
|
"learning_rate": 2.9343168247277835e-05, |
|
"loss": 0.5924, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.4341417352340944, |
|
"grad_norm": 0.30565881729125977, |
|
"learning_rate": 2.916754478398314e-05, |
|
"loss": 0.5827, |
|
"step": 10525 |
|
}, |
|
{ |
|
"epoch": 1.4375483715746005, |
|
"grad_norm": 0.2973788380622864, |
|
"learning_rate": 2.899192132068845e-05, |
|
"loss": 0.5797, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 1.4409550079151066, |
|
"grad_norm": 0.316902756690979, |
|
"learning_rate": 2.8816297857393748e-05, |
|
"loss": 0.5763, |
|
"step": 10575 |
|
}, |
|
{ |
|
"epoch": 1.4443616442556126, |
|
"grad_norm": 0.30036553740501404, |
|
"learning_rate": 2.8640674394099054e-05, |
|
"loss": 0.59, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 1.4477682805961187, |
|
"grad_norm": 0.3541527986526489, |
|
"learning_rate": 2.8465050930804356e-05, |
|
"loss": 0.6028, |
|
"step": 10625 |
|
}, |
|
{ |
|
"epoch": 1.4511749169366248, |
|
"grad_norm": 0.3401188850402832, |
|
"learning_rate": 2.8289427467509662e-05, |
|
"loss": 0.5771, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 1.454581553277131, |
|
"grad_norm": 0.3095962405204773, |
|
"learning_rate": 2.8113804004214967e-05, |
|
"loss": 0.5766, |
|
"step": 10675 |
|
}, |
|
{ |
|
"epoch": 1.457988189617637, |
|
"grad_norm": 0.3148902952671051, |
|
"learning_rate": 2.7938180540920266e-05, |
|
"loss": 0.5878, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 1.461394825958143, |
|
"grad_norm": 0.3642762303352356, |
|
"learning_rate": 2.7762557077625572e-05, |
|
"loss": 0.5876, |
|
"step": 10725 |
|
}, |
|
{ |
|
"epoch": 1.4648014622986492, |
|
"grad_norm": 0.3234134018421173, |
|
"learning_rate": 2.7586933614330874e-05, |
|
"loss": 0.5963, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 1.4682080986391552, |
|
"grad_norm": 0.3760201632976532, |
|
"learning_rate": 2.741131015103618e-05, |
|
"loss": 0.5982, |
|
"step": 10775 |
|
}, |
|
{ |
|
"epoch": 1.4716147349796613, |
|
"grad_norm": 0.3245134651660919, |
|
"learning_rate": 2.7235686687741486e-05, |
|
"loss": 0.5676, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 1.4750213713201674, |
|
"grad_norm": 0.32639381289482117, |
|
"learning_rate": 2.7060063224446784e-05, |
|
"loss": 0.5614, |
|
"step": 10825 |
|
}, |
|
{ |
|
"epoch": 1.4784280076606735, |
|
"grad_norm": 0.3028101325035095, |
|
"learning_rate": 2.688443976115209e-05, |
|
"loss": 0.5965, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 1.4818346440011796, |
|
"grad_norm": 0.32848045229911804, |
|
"learning_rate": 2.67088162978574e-05, |
|
"loss": 0.5873, |
|
"step": 10875 |
|
}, |
|
{ |
|
"epoch": 1.4852412803416857, |
|
"grad_norm": 0.30644866824150085, |
|
"learning_rate": 2.6533192834562698e-05, |
|
"loss": 0.5908, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 1.4886479166821918, |
|
"grad_norm": 0.3460093140602112, |
|
"learning_rate": 2.6357569371268004e-05, |
|
"loss": 0.5897, |
|
"step": 10925 |
|
}, |
|
{ |
|
"epoch": 1.4920545530226978, |
|
"grad_norm": 0.3794984221458435, |
|
"learning_rate": 2.6181945907973306e-05, |
|
"loss": 0.6111, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 1.495461189363204, |
|
"grad_norm": 0.3346981108188629, |
|
"learning_rate": 2.600632244467861e-05, |
|
"loss": 0.5662, |
|
"step": 10975 |
|
}, |
|
{ |
|
"epoch": 1.49886782570371, |
|
"grad_norm": 0.32983726263046265, |
|
"learning_rate": 2.5830698981383917e-05, |
|
"loss": 0.5525, |
|
"step": 11000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 14676, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.679900088910612e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|