|
{ |
|
"best_metric": 3.290560483932495, |
|
"best_model_checkpoint": "./models/lora-finetuning/german-gpt2/checkpoint-292000", |
|
"epoch": 2.50689395838556, |
|
"eval_steps": 1000, |
|
"global_step": 300000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004178156597309267, |
|
"grad_norm": 6.081511497497559, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 5.8544, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.008356313194618534, |
|
"grad_norm": 11.10040283203125, |
|
"learning_rate": 4.9791054795670054e-05, |
|
"loss": 5.1312, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.008356313194618534, |
|
"eval_loss": 4.83798360824585, |
|
"eval_runtime": 40.3269, |
|
"eval_samples_per_second": 110.646, |
|
"eval_steps_per_second": 13.837, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.012534469791927802, |
|
"grad_norm": 5.357274055480957, |
|
"learning_rate": 4.9581270453973316e-05, |
|
"loss": 4.8327, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.016712626389237067, |
|
"grad_norm": 4.852677345275879, |
|
"learning_rate": 4.9371486112276584e-05, |
|
"loss": 4.6285, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.016712626389237067, |
|
"eval_loss": 4.516997814178467, |
|
"eval_runtime": 40.2461, |
|
"eval_samples_per_second": 110.868, |
|
"eval_steps_per_second": 13.865, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.020890782986546334, |
|
"grad_norm": 5.627632141113281, |
|
"learning_rate": 4.916170177057985e-05, |
|
"loss": 4.6442, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.025068939583855605, |
|
"grad_norm": 4.464819431304932, |
|
"learning_rate": 4.8951917428883106e-05, |
|
"loss": 4.4546, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.025068939583855605, |
|
"eval_loss": 4.347652912139893, |
|
"eval_runtime": 40.226, |
|
"eval_samples_per_second": 110.923, |
|
"eval_steps_per_second": 13.872, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.02924709618116487, |
|
"grad_norm": 5.631376266479492, |
|
"learning_rate": 4.8742133087186374e-05, |
|
"loss": 4.4188, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.033425252778474135, |
|
"grad_norm": 5.489138603210449, |
|
"learning_rate": 4.8532348745489635e-05, |
|
"loss": 4.4816, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.033425252778474135, |
|
"eval_loss": 4.25124454498291, |
|
"eval_runtime": 40.2167, |
|
"eval_samples_per_second": 110.949, |
|
"eval_steps_per_second": 13.875, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.0376034093757834, |
|
"grad_norm": 6.327524185180664, |
|
"learning_rate": 4.83229839724763e-05, |
|
"loss": 4.3622, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.04178156597309267, |
|
"grad_norm": 5.45938777923584, |
|
"learning_rate": 4.811319963077956e-05, |
|
"loss": 4.2968, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.04178156597309267, |
|
"eval_loss": 4.177467346191406, |
|
"eval_runtime": 40.2059, |
|
"eval_samples_per_second": 110.979, |
|
"eval_steps_per_second": 13.879, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.045959722570401935, |
|
"grad_norm": 1.7540432214736938, |
|
"learning_rate": 4.7903415289082825e-05, |
|
"loss": 4.1926, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.05013787916771121, |
|
"grad_norm": 3.5564088821411133, |
|
"learning_rate": 4.769363094738609e-05, |
|
"loss": 4.1807, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.05013787916771121, |
|
"eval_loss": 4.1200947761535645, |
|
"eval_runtime": 40.2083, |
|
"eval_samples_per_second": 110.972, |
|
"eval_steps_per_second": 13.878, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.054316035765020476, |
|
"grad_norm": 4.009562969207764, |
|
"learning_rate": 4.7484266174372746e-05, |
|
"loss": 4.2543, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.05849419236232974, |
|
"grad_norm": 2.7500414848327637, |
|
"learning_rate": 4.727448183267601e-05, |
|
"loss": 4.2382, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.05849419236232974, |
|
"eval_loss": 4.056636810302734, |
|
"eval_runtime": 40.2301, |
|
"eval_samples_per_second": 110.912, |
|
"eval_steps_per_second": 13.87, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.06267234895963901, |
|
"grad_norm": 9.726302146911621, |
|
"learning_rate": 4.7064697490979275e-05, |
|
"loss": 4.1848, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.06685050555694827, |
|
"grad_norm": 8.977545738220215, |
|
"learning_rate": 4.685491314928254e-05, |
|
"loss": 4.1906, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.06685050555694827, |
|
"eval_loss": 4.021559715270996, |
|
"eval_runtime": 40.1844, |
|
"eval_samples_per_second": 111.038, |
|
"eval_steps_per_second": 13.886, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.07102866215425754, |
|
"grad_norm": 8.016194343566895, |
|
"learning_rate": 4.6645128807585805e-05, |
|
"loss": 4.2125, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.0752068187515668, |
|
"grad_norm": 7.392004489898682, |
|
"learning_rate": 4.643576403457246e-05, |
|
"loss": 4.1017, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.0752068187515668, |
|
"eval_loss": 3.993513822555542, |
|
"eval_runtime": 40.2054, |
|
"eval_samples_per_second": 110.98, |
|
"eval_steps_per_second": 13.879, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.07938497534887608, |
|
"grad_norm": 5.018495559692383, |
|
"learning_rate": 4.6225979692875726e-05, |
|
"loss": 4.1314, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.08356313194618534, |
|
"grad_norm": 4.584932804107666, |
|
"learning_rate": 4.6016195351178994e-05, |
|
"loss": 4.0724, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.08356313194618534, |
|
"eval_loss": 3.9573867321014404, |
|
"eval_runtime": 40.2188, |
|
"eval_samples_per_second": 110.943, |
|
"eval_steps_per_second": 13.874, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.08774128854349461, |
|
"grad_norm": 6.544865608215332, |
|
"learning_rate": 4.5806411009482255e-05, |
|
"loss": 4.0296, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.09191944514080387, |
|
"grad_norm": 8.033525466918945, |
|
"learning_rate": 4.5596626667785516e-05, |
|
"loss": 4.1032, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.09191944514080387, |
|
"eval_loss": 3.9294466972351074, |
|
"eval_runtime": 40.2168, |
|
"eval_samples_per_second": 110.949, |
|
"eval_steps_per_second": 13.875, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.09609760173811314, |
|
"grad_norm": 5.5422539710998535, |
|
"learning_rate": 4.5387261894772176e-05, |
|
"loss": 4.0642, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.10027575833542242, |
|
"grad_norm": 4.3626508712768555, |
|
"learning_rate": 4.5177477553075444e-05, |
|
"loss": 3.9922, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.10027575833542242, |
|
"eval_loss": 3.9016854763031006, |
|
"eval_runtime": 40.208, |
|
"eval_samples_per_second": 110.973, |
|
"eval_steps_per_second": 13.878, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.10445391493273168, |
|
"grad_norm": 4.636664867401123, |
|
"learning_rate": 4.4967693211378706e-05, |
|
"loss": 4.017, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.10863207153004095, |
|
"grad_norm": 5.429406642913818, |
|
"learning_rate": 4.475790886968197e-05, |
|
"loss": 3.9346, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.10863207153004095, |
|
"eval_loss": 3.881444215774536, |
|
"eval_runtime": 40.1856, |
|
"eval_samples_per_second": 111.035, |
|
"eval_steps_per_second": 13.886, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.11281022812735021, |
|
"grad_norm": 6.612671375274658, |
|
"learning_rate": 4.454854409666863e-05, |
|
"loss": 4.0277, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.11698838472465949, |
|
"grad_norm": 3.6754331588745117, |
|
"learning_rate": 4.4338759754971895e-05, |
|
"loss": 3.9975, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.11698838472465949, |
|
"eval_loss": 3.8531486988067627, |
|
"eval_runtime": 40.1972, |
|
"eval_samples_per_second": 111.003, |
|
"eval_steps_per_second": 13.882, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.12116654132196875, |
|
"grad_norm": 5.864243984222412, |
|
"learning_rate": 4.412981455064194e-05, |
|
"loss": 5.2354, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.12534469791927802, |
|
"grad_norm": 6.96849250793457, |
|
"learning_rate": 4.392003020894521e-05, |
|
"loss": 4.8952, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.12534469791927802, |
|
"eval_loss": 4.6278204917907715, |
|
"eval_runtime": 40.2925, |
|
"eval_samples_per_second": 110.74, |
|
"eval_steps_per_second": 13.849, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.1295228545165873, |
|
"grad_norm": 8.802956581115723, |
|
"learning_rate": 4.371024586724847e-05, |
|
"loss": 4.7056, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.13370101111389654, |
|
"grad_norm": 4.981395721435547, |
|
"learning_rate": 4.350046152555173e-05, |
|
"loss": 4.5538, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.13370101111389654, |
|
"eval_loss": 4.411046504974365, |
|
"eval_runtime": 40.2719, |
|
"eval_samples_per_second": 110.797, |
|
"eval_steps_per_second": 13.856, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.1378791677112058, |
|
"grad_norm": 5.577733993530273, |
|
"learning_rate": 4.3290677183855e-05, |
|
"loss": 4.507, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.1420573243085151, |
|
"grad_norm": 12.114670753479004, |
|
"learning_rate": 4.308089284215827e-05, |
|
"loss": 4.4785, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.1420573243085151, |
|
"eval_loss": 4.291799068450928, |
|
"eval_runtime": 40.2805, |
|
"eval_samples_per_second": 110.773, |
|
"eval_steps_per_second": 13.853, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.14623548090582436, |
|
"grad_norm": 8.596464157104492, |
|
"learning_rate": 4.287110850046153e-05, |
|
"loss": 4.4097, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.1504136375031336, |
|
"grad_norm": 11.195298194885254, |
|
"learning_rate": 4.2661324158764796e-05, |
|
"loss": 4.3955, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.1504136375031336, |
|
"eval_loss": 4.214114189147949, |
|
"eval_runtime": 40.2758, |
|
"eval_samples_per_second": 110.786, |
|
"eval_steps_per_second": 13.854, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.15459179410044288, |
|
"grad_norm": 7.286564826965332, |
|
"learning_rate": 4.245195938575145e-05, |
|
"loss": 4.2833, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.15876995069775215, |
|
"grad_norm": 4.253493309020996, |
|
"learning_rate": 4.224217504405472e-05, |
|
"loss": 4.3205, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.15876995069775215, |
|
"eval_loss": 4.141177177429199, |
|
"eval_runtime": 40.2675, |
|
"eval_samples_per_second": 110.809, |
|
"eval_steps_per_second": 13.857, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.16294810729506143, |
|
"grad_norm": 6.702279090881348, |
|
"learning_rate": 4.203239070235798e-05, |
|
"loss": 4.2901, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.16712626389237067, |
|
"grad_norm": 8.38072681427002, |
|
"learning_rate": 4.182260636066124e-05, |
|
"loss": 4.1862, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.16712626389237067, |
|
"eval_loss": 4.087652683258057, |
|
"eval_runtime": 40.2756, |
|
"eval_samples_per_second": 110.787, |
|
"eval_steps_per_second": 13.855, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.17130442048967995, |
|
"grad_norm": 7.476578712463379, |
|
"learning_rate": 4.16132415876479e-05, |
|
"loss": 4.1174, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.17548257708698922, |
|
"grad_norm": 6.918697357177734, |
|
"learning_rate": 4.140345724595117e-05, |
|
"loss": 4.2499, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.17548257708698922, |
|
"eval_loss": 4.053555011749268, |
|
"eval_runtime": 40.1812, |
|
"eval_samples_per_second": 111.047, |
|
"eval_steps_per_second": 13.887, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.1796607336842985, |
|
"grad_norm": 3.7625985145568848, |
|
"learning_rate": 4.119367290425443e-05, |
|
"loss": 4.1048, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.18383889028160774, |
|
"grad_norm": 14.019062042236328, |
|
"learning_rate": 4.098388856255769e-05, |
|
"loss": 4.0907, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.18383889028160774, |
|
"eval_loss": 4.000556468963623, |
|
"eval_runtime": 40.1881, |
|
"eval_samples_per_second": 111.028, |
|
"eval_steps_per_second": 13.885, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.18801704687891702, |
|
"grad_norm": 4.171750545501709, |
|
"learning_rate": 4.077410422086096e-05, |
|
"loss": 4.1064, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.1921952034762263, |
|
"grad_norm": 6.7127885818481445, |
|
"learning_rate": 4.056473944784762e-05, |
|
"loss": 4.1832, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.1921952034762263, |
|
"eval_loss": 3.97273588180542, |
|
"eval_runtime": 40.1852, |
|
"eval_samples_per_second": 111.036, |
|
"eval_steps_per_second": 13.886, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.19637336007353556, |
|
"grad_norm": 5.6008830070495605, |
|
"learning_rate": 4.035495510615088e-05, |
|
"loss": 4.0533, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.20055151667084484, |
|
"grad_norm": 6.787017345428467, |
|
"learning_rate": 4.014517076445414e-05, |
|
"loss": 4.1166, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.20055151667084484, |
|
"eval_loss": 3.9389452934265137, |
|
"eval_runtime": 40.1944, |
|
"eval_samples_per_second": 111.01, |
|
"eval_steps_per_second": 13.883, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.20472967326815408, |
|
"grad_norm": 4.794278144836426, |
|
"learning_rate": 3.993538642275741e-05, |
|
"loss": 4.101, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.20890782986546336, |
|
"grad_norm": 8.866153717041016, |
|
"learning_rate": 3.972602164974407e-05, |
|
"loss": 4.1032, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.20890782986546336, |
|
"eval_loss": 3.9197261333465576, |
|
"eval_runtime": 40.1697, |
|
"eval_samples_per_second": 111.079, |
|
"eval_steps_per_second": 13.891, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.21308598646277263, |
|
"grad_norm": 5.620716094970703, |
|
"learning_rate": 3.951623730804733e-05, |
|
"loss": 3.9787, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.2172641430600819, |
|
"grad_norm": 4.146615028381348, |
|
"learning_rate": 3.930645296635059e-05, |
|
"loss": 4.0408, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.2172641430600819, |
|
"eval_loss": 3.9010603427886963, |
|
"eval_runtime": 40.1779, |
|
"eval_samples_per_second": 111.056, |
|
"eval_steps_per_second": 13.888, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.22144229965739115, |
|
"grad_norm": 5.577437400817871, |
|
"learning_rate": 3.909666862465386e-05, |
|
"loss": 3.9532, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.22562045625470042, |
|
"grad_norm": 4.08618688583374, |
|
"learning_rate": 3.888730385164052e-05, |
|
"loss": 3.9802, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.22562045625470042, |
|
"eval_loss": 3.8729515075683594, |
|
"eval_runtime": 40.2084, |
|
"eval_samples_per_second": 110.972, |
|
"eval_steps_per_second": 13.878, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.2297986128520097, |
|
"grad_norm": 6.147714614868164, |
|
"learning_rate": 3.8677519509943774e-05, |
|
"loss": 4.0436, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.23397676944931897, |
|
"grad_norm": 8.099363327026367, |
|
"learning_rate": 3.846815473693044e-05, |
|
"loss": 4.0165, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.23397676944931897, |
|
"eval_loss": 3.847057342529297, |
|
"eval_runtime": 40.2222, |
|
"eval_samples_per_second": 110.934, |
|
"eval_steps_per_second": 13.873, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.23815492604662822, |
|
"grad_norm": 5.891151428222656, |
|
"learning_rate": 3.82583703952337e-05, |
|
"loss": 3.9964, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.2423330826439375, |
|
"grad_norm": 4.3900861740112305, |
|
"learning_rate": 3.804858605353696e-05, |
|
"loss": 3.9467, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.2423330826439375, |
|
"eval_loss": 3.8354432582855225, |
|
"eval_runtime": 40.3928, |
|
"eval_samples_per_second": 110.465, |
|
"eval_steps_per_second": 13.814, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.24651123924124677, |
|
"grad_norm": 11.848061561584473, |
|
"learning_rate": 3.783880171184023e-05, |
|
"loss": 3.894, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.25068939583855604, |
|
"grad_norm": 6.982874870300293, |
|
"learning_rate": 3.762901737014349e-05, |
|
"loss": 3.9895, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.25068939583855604, |
|
"eval_loss": 3.813512086868286, |
|
"eval_runtime": 40.3474, |
|
"eval_samples_per_second": 110.59, |
|
"eval_steps_per_second": 13.83, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.2548675524358653, |
|
"grad_norm": 3.815357208251953, |
|
"learning_rate": 3.741923302844676e-05, |
|
"loss": 3.9362, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.2590457090331746, |
|
"grad_norm": 4.882671356201172, |
|
"learning_rate": 3.720944868675002e-05, |
|
"loss": 3.9015, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.2590457090331746, |
|
"eval_loss": 3.801868438720703, |
|
"eval_runtime": 40.3453, |
|
"eval_samples_per_second": 110.595, |
|
"eval_steps_per_second": 13.831, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.26322386563048383, |
|
"grad_norm": 4.451164722442627, |
|
"learning_rate": 3.699966434505328e-05, |
|
"loss": 3.7901, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.2674020222277931, |
|
"grad_norm": 6.627746105194092, |
|
"learning_rate": 3.679029957203994e-05, |
|
"loss": 3.8765, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.2674020222277931, |
|
"eval_loss": 3.786133289337158, |
|
"eval_runtime": 40.3847, |
|
"eval_samples_per_second": 110.487, |
|
"eval_steps_per_second": 13.817, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.2715801788251024, |
|
"grad_norm": 6.209244251251221, |
|
"learning_rate": 3.658051523034321e-05, |
|
"loss": 3.89, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.2757583354224116, |
|
"grad_norm": 6.814667701721191, |
|
"learning_rate": 3.637073088864648e-05, |
|
"loss": 3.8415, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.2757583354224116, |
|
"eval_loss": 3.7628209590911865, |
|
"eval_runtime": 40.404, |
|
"eval_samples_per_second": 110.435, |
|
"eval_steps_per_second": 13.811, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.2799364920197209, |
|
"grad_norm": 4.806527614593506, |
|
"learning_rate": 3.6160946546949734e-05, |
|
"loss": 3.928, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.2841146486170302, |
|
"grad_norm": 6.326642036437988, |
|
"learning_rate": 3.5951581773936394e-05, |
|
"loss": 3.8653, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.2841146486170302, |
|
"eval_loss": 3.751917600631714, |
|
"eval_runtime": 40.3766, |
|
"eval_samples_per_second": 110.51, |
|
"eval_steps_per_second": 13.82, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.2882928052143394, |
|
"grad_norm": 4.734166145324707, |
|
"learning_rate": 3.574179743223966e-05, |
|
"loss": 3.8688, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.2924709618116487, |
|
"grad_norm": 5.2707343101501465, |
|
"learning_rate": 3.553201309054292e-05, |
|
"loss": 3.7493, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.2924709618116487, |
|
"eval_loss": 3.7429351806640625, |
|
"eval_runtime": 40.3777, |
|
"eval_samples_per_second": 110.506, |
|
"eval_steps_per_second": 13.819, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.29664911840895797, |
|
"grad_norm": 4.153214454650879, |
|
"learning_rate": 3.5322228748846184e-05, |
|
"loss": 3.8655, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.3008272750062672, |
|
"grad_norm": 8.407896995544434, |
|
"learning_rate": 3.511244440714945e-05, |
|
"loss": 3.7849, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.3008272750062672, |
|
"eval_loss": 3.7235593795776367, |
|
"eval_runtime": 40.3782, |
|
"eval_samples_per_second": 110.505, |
|
"eval_steps_per_second": 13.819, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.3050054316035765, |
|
"grad_norm": 5.558535099029541, |
|
"learning_rate": 3.4902660065452714e-05, |
|
"loss": 3.7974, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.30918358820088576, |
|
"grad_norm": 7.9293293952941895, |
|
"learning_rate": 3.4693295292439374e-05, |
|
"loss": 3.8025, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.30918358820088576, |
|
"eval_loss": 3.7119622230529785, |
|
"eval_runtime": 40.3046, |
|
"eval_samples_per_second": 110.707, |
|
"eval_steps_per_second": 13.845, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.31336174479819506, |
|
"grad_norm": 7.648885250091553, |
|
"learning_rate": 3.4483510950742635e-05, |
|
"loss": 3.7967, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.3175399013955043, |
|
"grad_norm": 5.128037929534912, |
|
"learning_rate": 3.42737266090459e-05, |
|
"loss": 3.7835, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.3175399013955043, |
|
"eval_loss": 3.6989333629608154, |
|
"eval_runtime": 40.4275, |
|
"eval_samples_per_second": 110.37, |
|
"eval_steps_per_second": 13.802, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.32171805799281356, |
|
"grad_norm": 5.813144207000732, |
|
"learning_rate": 3.406394226734917e-05, |
|
"loss": 3.8355, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.32589621459012286, |
|
"grad_norm": 5.470829010009766, |
|
"learning_rate": 3.3854577494335824e-05, |
|
"loss": 3.7739, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.32589621459012286, |
|
"eval_loss": 3.6981310844421387, |
|
"eval_runtime": 40.3386, |
|
"eval_samples_per_second": 110.614, |
|
"eval_steps_per_second": 13.833, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.3300743711874321, |
|
"grad_norm": 5.560943126678467, |
|
"learning_rate": 3.3644793152639085e-05, |
|
"loss": 3.8491, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.33425252778474135, |
|
"grad_norm": 6.327647686004639, |
|
"learning_rate": 3.3435008810942353e-05, |
|
"loss": 3.6971, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.33425252778474135, |
|
"eval_loss": 3.6876156330108643, |
|
"eval_runtime": 40.3373, |
|
"eval_samples_per_second": 110.617, |
|
"eval_steps_per_second": 13.833, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.33843068438205065, |
|
"grad_norm": 5.916841983795166, |
|
"learning_rate": 3.322522446924562e-05, |
|
"loss": 3.7719, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.3426088409793599, |
|
"grad_norm": 5.004364967346191, |
|
"learning_rate": 3.3015859696232275e-05, |
|
"loss": 3.7641, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.3426088409793599, |
|
"eval_loss": 3.672261953353882, |
|
"eval_runtime": 40.3068, |
|
"eval_samples_per_second": 110.701, |
|
"eval_steps_per_second": 13.844, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.3467869975766692, |
|
"grad_norm": 5.481464862823486, |
|
"learning_rate": 3.2806075354535536e-05, |
|
"loss": 3.75, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.35096515417397844, |
|
"grad_norm": 3.293029546737671, |
|
"learning_rate": 3.2596291012838804e-05, |
|
"loss": 3.7251, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.35096515417397844, |
|
"eval_loss": 3.660274028778076, |
|
"eval_runtime": 40.3581, |
|
"eval_samples_per_second": 110.56, |
|
"eval_steps_per_second": 13.826, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.3551433107712877, |
|
"grad_norm": 4.981443405151367, |
|
"learning_rate": 3.238650667114207e-05, |
|
"loss": 3.6741, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.359321467368597, |
|
"grad_norm": 4.1700263023376465, |
|
"learning_rate": 3.217672232944533e-05, |
|
"loss": 3.774, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.359321467368597, |
|
"eval_loss": 3.659872055053711, |
|
"eval_runtime": 40.335, |
|
"eval_samples_per_second": 110.624, |
|
"eval_steps_per_second": 13.834, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.36349962396590624, |
|
"grad_norm": 5.307525157928467, |
|
"learning_rate": 3.1966937987748595e-05, |
|
"loss": 3.7479, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.3676777805632155, |
|
"grad_norm": 4.981558322906494, |
|
"learning_rate": 3.1757573214735255e-05, |
|
"loss": 3.7878, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.3676777805632155, |
|
"eval_loss": 3.647749662399292, |
|
"eval_runtime": 40.341, |
|
"eval_samples_per_second": 110.607, |
|
"eval_steps_per_second": 13.832, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.3718559371605248, |
|
"grad_norm": 6.054855823516846, |
|
"learning_rate": 3.154778887303852e-05, |
|
"loss": 3.7851, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.37603409375783403, |
|
"grad_norm": 5.549240589141846, |
|
"learning_rate": 3.133800453134178e-05, |
|
"loss": 3.7971, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.37603409375783403, |
|
"eval_loss": 3.6410250663757324, |
|
"eval_runtime": 40.3919, |
|
"eval_samples_per_second": 110.468, |
|
"eval_steps_per_second": 13.815, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.38021225035514333, |
|
"grad_norm": 7.926109313964844, |
|
"learning_rate": 3.112863975832844e-05, |
|
"loss": 3.7444, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.3843904069524526, |
|
"grad_norm": 4.797922611236572, |
|
"learning_rate": 3.0918855416631705e-05, |
|
"loss": 3.7357, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.3843904069524526, |
|
"eval_loss": 3.6285250186920166, |
|
"eval_runtime": 40.2976, |
|
"eval_samples_per_second": 110.726, |
|
"eval_steps_per_second": 13.847, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.3885685635497618, |
|
"grad_norm": 3.2273850440979004, |
|
"learning_rate": 3.0709071074934966e-05, |
|
"loss": 3.6811, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.3927467201470711, |
|
"grad_norm": 3.6433870792388916, |
|
"learning_rate": 3.049928673323823e-05, |
|
"loss": 3.7285, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.3927467201470711, |
|
"eval_loss": 3.622321367263794, |
|
"eval_runtime": 40.3211, |
|
"eval_samples_per_second": 110.662, |
|
"eval_steps_per_second": 13.839, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.3969248767443804, |
|
"grad_norm": 3.6138274669647217, |
|
"learning_rate": 3.0289502391541496e-05, |
|
"loss": 3.7385, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.4011030333416897, |
|
"grad_norm": 3.9696879386901855, |
|
"learning_rate": 3.0079718049844764e-05, |
|
"loss": 3.7095, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.4011030333416897, |
|
"eval_loss": 3.6237733364105225, |
|
"eval_runtime": 40.3393, |
|
"eval_samples_per_second": 110.612, |
|
"eval_steps_per_second": 13.833, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.4052811899389989, |
|
"grad_norm": 4.0967912673950195, |
|
"learning_rate": 2.9869933708148025e-05, |
|
"loss": 3.7014, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.40945934653630817, |
|
"grad_norm": 6.807064533233643, |
|
"learning_rate": 2.966014936645129e-05, |
|
"loss": 3.7479, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.40945934653630817, |
|
"eval_loss": 3.614962100982666, |
|
"eval_runtime": 40.3225, |
|
"eval_samples_per_second": 110.658, |
|
"eval_steps_per_second": 13.838, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.41363750313361747, |
|
"grad_norm": 5.55667781829834, |
|
"learning_rate": 2.9450784593437946e-05, |
|
"loss": 3.6814, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.4178156597309267, |
|
"grad_norm": 3.904834032058716, |
|
"learning_rate": 2.9241000251741214e-05, |
|
"loss": 3.624, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.4178156597309267, |
|
"eval_loss": 3.604703187942505, |
|
"eval_runtime": 40.3492, |
|
"eval_samples_per_second": 110.585, |
|
"eval_steps_per_second": 13.829, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.42199381632823596, |
|
"grad_norm": 8.520374298095703, |
|
"learning_rate": 2.9031215910044472e-05, |
|
"loss": 3.6371, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.42617197292554526, |
|
"grad_norm": 8.05156135559082, |
|
"learning_rate": 2.8821851137031135e-05, |
|
"loss": 3.7153, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.42617197292554526, |
|
"eval_loss": 3.587707996368408, |
|
"eval_runtime": 40.3074, |
|
"eval_samples_per_second": 110.699, |
|
"eval_steps_per_second": 13.844, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.4303501295228545, |
|
"grad_norm": 5.482362270355225, |
|
"learning_rate": 2.8612066795334397e-05, |
|
"loss": 3.6853, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.4345282861201638, |
|
"grad_norm": 5.900150775909424, |
|
"learning_rate": 2.840228245363766e-05, |
|
"loss": 3.7009, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.4345282861201638, |
|
"eval_loss": 3.5961596965789795, |
|
"eval_runtime": 40.3077, |
|
"eval_samples_per_second": 110.698, |
|
"eval_steps_per_second": 13.843, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.43870644271747306, |
|
"grad_norm": 4.387772083282471, |
|
"learning_rate": 2.8192498111940923e-05, |
|
"loss": 3.6555, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.4428845993147823, |
|
"grad_norm": 4.479552745819092, |
|
"learning_rate": 2.798271377024419e-05, |
|
"loss": 3.6585, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.4428845993147823, |
|
"eval_loss": 3.587613582611084, |
|
"eval_runtime": 40.2994, |
|
"eval_samples_per_second": 110.721, |
|
"eval_steps_per_second": 13.846, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.4470627559120916, |
|
"grad_norm": 7.089645862579346, |
|
"learning_rate": 2.7772929428547455e-05, |
|
"loss": 3.6997, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.45124091250940085, |
|
"grad_norm": 4.489483833312988, |
|
"learning_rate": 2.7563145086850717e-05, |
|
"loss": 3.6804, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.45124091250940085, |
|
"eval_loss": 3.5782344341278076, |
|
"eval_runtime": 40.3396, |
|
"eval_samples_per_second": 110.611, |
|
"eval_steps_per_second": 13.833, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.4554190691067101, |
|
"grad_norm": 4.279026985168457, |
|
"learning_rate": 2.7353360745153985e-05, |
|
"loss": 3.6596, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.4595972257040194, |
|
"grad_norm": 5.833207607269287, |
|
"learning_rate": 2.714399597214064e-05, |
|
"loss": 3.6249, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.4595972257040194, |
|
"eval_loss": 3.5750606060028076, |
|
"eval_runtime": 40.2836, |
|
"eval_samples_per_second": 110.765, |
|
"eval_steps_per_second": 13.852, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.46377538230132864, |
|
"grad_norm": 1.6167926788330078, |
|
"learning_rate": 2.6934631199127298e-05, |
|
"loss": 3.6656, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.46795353889863794, |
|
"grad_norm": 8.225107192993164, |
|
"learning_rate": 2.6724846857430562e-05, |
|
"loss": 3.5755, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.46795353889863794, |
|
"eval_loss": 3.5670828819274902, |
|
"eval_runtime": 40.3346, |
|
"eval_samples_per_second": 110.625, |
|
"eval_steps_per_second": 13.834, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.4721316954959472, |
|
"grad_norm": 6.227290153503418, |
|
"learning_rate": 2.651506251573383e-05, |
|
"loss": 3.658, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.47630985209325644, |
|
"grad_norm": 2.954385995864868, |
|
"learning_rate": 2.6305278174037092e-05, |
|
"loss": 3.6594, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.47630985209325644, |
|
"eval_loss": 3.5596070289611816, |
|
"eval_runtime": 40.3256, |
|
"eval_samples_per_second": 110.649, |
|
"eval_steps_per_second": 13.837, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.48048800869056574, |
|
"grad_norm": 4.584821701049805, |
|
"learning_rate": 2.6095493832340356e-05, |
|
"loss": 3.6365, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.484666165287875, |
|
"grad_norm": 5.709506511688232, |
|
"learning_rate": 2.5885709490643618e-05, |
|
"loss": 3.7103, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.484666165287875, |
|
"eval_loss": 3.5584540367126465, |
|
"eval_runtime": 40.3216, |
|
"eval_samples_per_second": 110.66, |
|
"eval_steps_per_second": 13.839, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.48884432188518423, |
|
"grad_norm": 2.718370199203491, |
|
"learning_rate": 2.5675925148946882e-05, |
|
"loss": 3.6135, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.49302247848249353, |
|
"grad_norm": 5.212717056274414, |
|
"learning_rate": 2.546614080725015e-05, |
|
"loss": 3.6775, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.49302247848249353, |
|
"eval_loss": 3.54938006401062, |
|
"eval_runtime": 40.2984, |
|
"eval_samples_per_second": 110.724, |
|
"eval_steps_per_second": 13.847, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.4972006350798028, |
|
"grad_norm": 5.458935737609863, |
|
"learning_rate": 2.5256776034236807e-05, |
|
"loss": 3.6635, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.5013787916771121, |
|
"grad_norm": 6.3775248527526855, |
|
"learning_rate": 2.504699169254007e-05, |
|
"loss": 3.6256, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.5013787916771121, |
|
"eval_loss": 3.5428049564361572, |
|
"eval_runtime": 40.2928, |
|
"eval_samples_per_second": 110.739, |
|
"eval_steps_per_second": 13.849, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.5055569482744213, |
|
"grad_norm": 3.5704195499420166, |
|
"learning_rate": 2.4837207350843333e-05, |
|
"loss": 3.591, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.5097351048717306, |
|
"grad_norm": 4.804786682128906, |
|
"learning_rate": 2.46274230091466e-05, |
|
"loss": 3.6524, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.5097351048717306, |
|
"eval_loss": 3.5428876876831055, |
|
"eval_runtime": 40.303, |
|
"eval_samples_per_second": 110.711, |
|
"eval_steps_per_second": 13.845, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.5139132614690398, |
|
"grad_norm": 5.669532775878906, |
|
"learning_rate": 2.4418058236133258e-05, |
|
"loss": 3.613, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.5180914180663492, |
|
"grad_norm": 6.797584056854248, |
|
"learning_rate": 2.420827389443652e-05, |
|
"loss": 3.6231, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.5180914180663492, |
|
"eval_loss": 3.537569046020508, |
|
"eval_runtime": 40.3095, |
|
"eval_samples_per_second": 110.694, |
|
"eval_steps_per_second": 13.843, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.5222695746636584, |
|
"grad_norm": 3.8286426067352295, |
|
"learning_rate": 2.3998489552739783e-05, |
|
"loss": 3.6407, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.5264477312609677, |
|
"grad_norm": 6.5328850746154785, |
|
"learning_rate": 2.3788705211043048e-05, |
|
"loss": 3.6704, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.5264477312609677, |
|
"eval_loss": 3.5320422649383545, |
|
"eval_runtime": 40.3117, |
|
"eval_samples_per_second": 110.687, |
|
"eval_steps_per_second": 13.842, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.5306258878582769, |
|
"grad_norm": 5.264260768890381, |
|
"learning_rate": 2.3579340438029708e-05, |
|
"loss": 3.6362, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.5348040444555862, |
|
"grad_norm": 4.104739189147949, |
|
"learning_rate": 2.336955609633297e-05, |
|
"loss": 3.5952, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.5348040444555862, |
|
"eval_loss": 3.5205154418945312, |
|
"eval_runtime": 40.2973, |
|
"eval_samples_per_second": 110.727, |
|
"eval_steps_per_second": 13.847, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.5389822010528955, |
|
"grad_norm": 3.8011248111724854, |
|
"learning_rate": 2.3159771754636237e-05, |
|
"loss": 3.6439, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.5431603576502048, |
|
"grad_norm": 3.5783491134643555, |
|
"learning_rate": 2.29499874129395e-05, |
|
"loss": 3.6597, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.5431603576502048, |
|
"eval_loss": 3.5196046829223633, |
|
"eval_runtime": 40.4106, |
|
"eval_samples_per_second": 110.416, |
|
"eval_steps_per_second": 13.808, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.547338514247514, |
|
"grad_norm": 6.129964351654053, |
|
"learning_rate": 2.2740203071242763e-05, |
|
"loss": 3.6149, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.5515166708448233, |
|
"grad_norm": 5.428370952606201, |
|
"learning_rate": 2.253083829822942e-05, |
|
"loss": 3.543, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.5515166708448233, |
|
"eval_loss": 3.5081422328948975, |
|
"eval_runtime": 40.3325, |
|
"eval_samples_per_second": 110.63, |
|
"eval_steps_per_second": 13.835, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.5556948274421325, |
|
"grad_norm": 4.526833534240723, |
|
"learning_rate": 2.232147352521608e-05, |
|
"loss": 5.1116, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.5598729840394417, |
|
"grad_norm": 11.199429512023926, |
|
"learning_rate": 2.2111689183519345e-05, |
|
"loss": 4.8238, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.5598729840394417, |
|
"eval_loss": 4.616936683654785, |
|
"eval_runtime": 40.3658, |
|
"eval_samples_per_second": 110.539, |
|
"eval_steps_per_second": 13.824, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.5640511406367511, |
|
"grad_norm": 8.317018508911133, |
|
"learning_rate": 2.1901904841822606e-05, |
|
"loss": 4.683, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.5682292972340603, |
|
"grad_norm": 4.753358364105225, |
|
"learning_rate": 2.169212050012587e-05, |
|
"loss": 4.5343, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.5682292972340603, |
|
"eval_loss": 4.415843963623047, |
|
"eval_runtime": 40.3527, |
|
"eval_samples_per_second": 110.575, |
|
"eval_steps_per_second": 13.828, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.5724074538313696, |
|
"grad_norm": 13.623976707458496, |
|
"learning_rate": 2.148275572711253e-05, |
|
"loss": 4.4902, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.5765856104286788, |
|
"grad_norm": 6.011263847351074, |
|
"learning_rate": 2.1272971385415795e-05, |
|
"loss": 4.4547, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.5765856104286788, |
|
"eval_loss": 4.297609806060791, |
|
"eval_runtime": 40.3782, |
|
"eval_samples_per_second": 110.505, |
|
"eval_steps_per_second": 13.819, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.5807637670259881, |
|
"grad_norm": 5.958744049072266, |
|
"learning_rate": 2.1063187043719056e-05, |
|
"loss": 4.413, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.5849419236232974, |
|
"grad_norm": 3.0502681732177734, |
|
"learning_rate": 2.0853402702022324e-05, |
|
"loss": 4.3609, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.5849419236232974, |
|
"eval_loss": 4.230189323425293, |
|
"eval_runtime": 40.3883, |
|
"eval_samples_per_second": 110.478, |
|
"eval_steps_per_second": 13.816, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.5891200802206067, |
|
"grad_norm": 4.797601699829102, |
|
"learning_rate": 2.064403792900898e-05, |
|
"loss": 4.3879, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.5932982368179159, |
|
"grad_norm": 7.879065990447998, |
|
"learning_rate": 2.0434253587312242e-05, |
|
"loss": 4.3503, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.5932982368179159, |
|
"eval_loss": 4.177266597747803, |
|
"eval_runtime": 40.395, |
|
"eval_samples_per_second": 110.459, |
|
"eval_steps_per_second": 13.814, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.5974763934152252, |
|
"grad_norm": 5.502597332000732, |
|
"learning_rate": 2.0224469245615507e-05, |
|
"loss": 4.3594, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.6016545500125344, |
|
"grad_norm": 14.831869125366211, |
|
"learning_rate": 2.0014684903918775e-05, |
|
"loss": 4.2399, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.6016545500125344, |
|
"eval_loss": 4.133894443511963, |
|
"eval_runtime": 40.3876, |
|
"eval_samples_per_second": 110.479, |
|
"eval_steps_per_second": 13.816, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.6058327066098438, |
|
"grad_norm": 7.395357608795166, |
|
"learning_rate": 1.980532013090543e-05, |
|
"loss": 4.2413, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.610010863207153, |
|
"grad_norm": 8.504011154174805, |
|
"learning_rate": 1.9595535789208693e-05, |
|
"loss": 4.3134, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.610010863207153, |
|
"eval_loss": 4.093682765960693, |
|
"eval_runtime": 40.4955, |
|
"eval_samples_per_second": 110.185, |
|
"eval_steps_per_second": 13.779, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.6141890198044623, |
|
"grad_norm": 4.6213698387146, |
|
"learning_rate": 1.9385751447511957e-05, |
|
"loss": 4.2654, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.6183671764017715, |
|
"grad_norm": 7.031868934631348, |
|
"learning_rate": 1.9176386674498617e-05, |
|
"loss": 4.1979, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.6183671764017715, |
|
"eval_loss": 4.06504487991333, |
|
"eval_runtime": 40.3928, |
|
"eval_samples_per_second": 110.465, |
|
"eval_steps_per_second": 13.814, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.6225453329990808, |
|
"grad_norm": 6.018976211547852, |
|
"learning_rate": 1.8966602332801882e-05, |
|
"loss": 4.1942, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.6267234895963901, |
|
"grad_norm": 5.645545959472656, |
|
"learning_rate": 1.8756817991105143e-05, |
|
"loss": 4.2067, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.6267234895963901, |
|
"eval_loss": 4.035527229309082, |
|
"eval_runtime": 40.4831, |
|
"eval_samples_per_second": 110.219, |
|
"eval_steps_per_second": 13.784, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.6309016461936994, |
|
"grad_norm": 9.228676795959473, |
|
"learning_rate": 1.854703364940841e-05, |
|
"loss": 4.1713, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.6350798027910086, |
|
"grad_norm": 5.62150239944458, |
|
"learning_rate": 1.8337668876395068e-05, |
|
"loss": 4.2362, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.6350798027910086, |
|
"eval_loss": 4.016374588012695, |
|
"eval_runtime": 40.3961, |
|
"eval_samples_per_second": 110.456, |
|
"eval_steps_per_second": 13.813, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.6392579593883179, |
|
"grad_norm": 6.281893730163574, |
|
"learning_rate": 1.812788453469833e-05, |
|
"loss": 4.1349, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.6434361159856271, |
|
"grad_norm": 5.5328497886657715, |
|
"learning_rate": 1.7918100193001594e-05, |
|
"loss": 4.0535, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.6434361159856271, |
|
"eval_loss": 3.986826181411743, |
|
"eval_runtime": 40.408, |
|
"eval_samples_per_second": 110.424, |
|
"eval_steps_per_second": 13.809, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.6476142725829364, |
|
"grad_norm": 11.436102867126465, |
|
"learning_rate": 1.7708315851304862e-05, |
|
"loss": 4.1203, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.6517924291802457, |
|
"grad_norm": 6.688998222351074, |
|
"learning_rate": 1.7498531509608123e-05, |
|
"loss": 4.0486, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.6517924291802457, |
|
"eval_loss": 3.9673120975494385, |
|
"eval_runtime": 40.4149, |
|
"eval_samples_per_second": 110.405, |
|
"eval_steps_per_second": 13.807, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.655970585777555, |
|
"grad_norm": 8.663886070251465, |
|
"learning_rate": 1.7288747167911388e-05, |
|
"loss": 4.0426, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.6601487423748642, |
|
"grad_norm": 5.486411094665527, |
|
"learning_rate": 1.7078962826214652e-05, |
|
"loss": 4.0643, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.6601487423748642, |
|
"eval_loss": 3.941148042678833, |
|
"eval_runtime": 40.4048, |
|
"eval_samples_per_second": 110.432, |
|
"eval_steps_per_second": 13.81, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.6643268989721735, |
|
"grad_norm": 6.589982509613037, |
|
"learning_rate": 1.6869178484517917e-05, |
|
"loss": 4.1028, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.6685050555694827, |
|
"grad_norm": 5.7215800285339355, |
|
"learning_rate": 1.6659813711504574e-05, |
|
"loss": 4.0685, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.6685050555694827, |
|
"eval_loss": 3.935919761657715, |
|
"eval_runtime": 40.3891, |
|
"eval_samples_per_second": 110.475, |
|
"eval_steps_per_second": 13.816, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.672683212166792, |
|
"grad_norm": 4.948646068572998, |
|
"learning_rate": 1.645002936980784e-05, |
|
"loss": 4.0631, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.6768613687641013, |
|
"grad_norm": 10.537930488586426, |
|
"learning_rate": 1.6240245028111103e-05, |
|
"loss": 4.0076, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.6768613687641013, |
|
"eval_loss": 3.921271800994873, |
|
"eval_runtime": 40.394, |
|
"eval_samples_per_second": 110.462, |
|
"eval_steps_per_second": 13.814, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.6810395253614105, |
|
"grad_norm": 8.366657257080078, |
|
"learning_rate": 1.603088025509776e-05, |
|
"loss": 4.0462, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.6852176819587198, |
|
"grad_norm": 8.010354042053223, |
|
"learning_rate": 1.5821095913401024e-05, |
|
"loss": 4.0686, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.6852176819587198, |
|
"eval_loss": 3.901902914047241, |
|
"eval_runtime": 40.3855, |
|
"eval_samples_per_second": 110.485, |
|
"eval_steps_per_second": 13.817, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.689395838556029, |
|
"grad_norm": 7.819444179534912, |
|
"learning_rate": 1.561131157170429e-05, |
|
"loss": 3.9706, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.6935739951533384, |
|
"grad_norm": 5.248921871185303, |
|
"learning_rate": 1.5401527230007554e-05, |
|
"loss": 3.9257, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.6935739951533384, |
|
"eval_loss": 3.8821218013763428, |
|
"eval_runtime": 40.3861, |
|
"eval_samples_per_second": 110.484, |
|
"eval_steps_per_second": 13.817, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.6977521517506476, |
|
"grad_norm": 6.49260139465332, |
|
"learning_rate": 1.5191742888310817e-05, |
|
"loss": 4.0404, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.7019303083479569, |
|
"grad_norm": 5.810378074645996, |
|
"learning_rate": 1.4981958546614081e-05, |
|
"loss": 3.9735, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.7019303083479569, |
|
"eval_loss": 3.877354145050049, |
|
"eval_runtime": 40.4901, |
|
"eval_samples_per_second": 110.2, |
|
"eval_steps_per_second": 13.781, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.7061084649452661, |
|
"grad_norm": 5.234148025512695, |
|
"learning_rate": 1.4772174204917347e-05, |
|
"loss": 3.8852, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.7102866215425754, |
|
"grad_norm": 10.57536506652832, |
|
"learning_rate": 1.456238986322061e-05, |
|
"loss": 3.9721, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.7102866215425754, |
|
"eval_loss": 3.865615129470825, |
|
"eval_runtime": 40.3575, |
|
"eval_samples_per_second": 110.562, |
|
"eval_steps_per_second": 13.826, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.7144647781398847, |
|
"grad_norm": 4.43326473236084, |
|
"learning_rate": 1.4353025090207267e-05, |
|
"loss": 4.0048, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.718642934737194, |
|
"grad_norm": 12.378066062927246, |
|
"learning_rate": 1.414324074851053e-05, |
|
"loss": 3.9356, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.718642934737194, |
|
"eval_loss": 3.8526625633239746, |
|
"eval_runtime": 40.4019, |
|
"eval_samples_per_second": 110.44, |
|
"eval_steps_per_second": 13.811, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.7228210913345032, |
|
"grad_norm": 6.761714458465576, |
|
"learning_rate": 1.3933456406813796e-05, |
|
"loss": 3.9714, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.7269992479318125, |
|
"grad_norm": 7.718149662017822, |
|
"learning_rate": 1.3723672065117061e-05, |
|
"loss": 3.9137, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.7269992479318125, |
|
"eval_loss": 3.8465211391448975, |
|
"eval_runtime": 40.4199, |
|
"eval_samples_per_second": 110.391, |
|
"eval_steps_per_second": 13.805, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.7311774045291217, |
|
"grad_norm": 6.301932334899902, |
|
"learning_rate": 1.3514307292103718e-05, |
|
"loss": 3.9436, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.735355561126431, |
|
"grad_norm": 5.401347637176514, |
|
"learning_rate": 1.3304522950406984e-05, |
|
"loss": 3.908, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.735355561126431, |
|
"eval_loss": 3.828646659851074, |
|
"eval_runtime": 40.4627, |
|
"eval_samples_per_second": 110.275, |
|
"eval_steps_per_second": 13.79, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.7395337177237403, |
|
"grad_norm": 5.284492492675781, |
|
"learning_rate": 1.3094738608710247e-05, |
|
"loss": 3.9225, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.7437118743210496, |
|
"grad_norm": 7.627567768096924, |
|
"learning_rate": 1.288495426701351e-05, |
|
"loss": 3.9705, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.7437118743210496, |
|
"eval_loss": 3.8248815536499023, |
|
"eval_runtime": 40.4521, |
|
"eval_samples_per_second": 110.303, |
|
"eval_steps_per_second": 13.794, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.7478900309183588, |
|
"grad_norm": 10.147069931030273, |
|
"learning_rate": 1.2675169925316775e-05, |
|
"loss": 3.9042, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.7520681875156681, |
|
"grad_norm": 5.002488136291504, |
|
"learning_rate": 1.2465805152303433e-05, |
|
"loss": 3.9459, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.7520681875156681, |
|
"eval_loss": 3.8160951137542725, |
|
"eval_runtime": 40.3907, |
|
"eval_samples_per_second": 110.471, |
|
"eval_steps_per_second": 13.815, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.7562463441129773, |
|
"grad_norm": 17.16789436340332, |
|
"learning_rate": 1.2256440379290091e-05, |
|
"loss": 4.0108, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.7604245007102867, |
|
"grad_norm": 7.082775115966797, |
|
"learning_rate": 1.2046656037593354e-05, |
|
"loss": 3.9803, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.7604245007102867, |
|
"eval_loss": 3.807090997695923, |
|
"eval_runtime": 40.3824, |
|
"eval_samples_per_second": 110.494, |
|
"eval_steps_per_second": 13.818, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.7646026573075959, |
|
"grad_norm": 5.471470832824707, |
|
"learning_rate": 1.1836871695896619e-05, |
|
"loss": 3.9751, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.7687808139049052, |
|
"grad_norm": 10.89763355255127, |
|
"learning_rate": 1.1627087354199883e-05, |
|
"loss": 3.9095, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.7687808139049052, |
|
"eval_loss": 3.7957355976104736, |
|
"eval_runtime": 40.3893, |
|
"eval_samples_per_second": 110.475, |
|
"eval_steps_per_second": 13.816, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.7729589705022144, |
|
"grad_norm": 9.122933387756348, |
|
"learning_rate": 1.1417303012503146e-05, |
|
"loss": 3.9344, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.7771371270995237, |
|
"grad_norm": 12.458266258239746, |
|
"learning_rate": 1.1207518670806413e-05, |
|
"loss": 3.7938, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.7771371270995237, |
|
"eval_loss": 3.790499210357666, |
|
"eval_runtime": 40.3688, |
|
"eval_samples_per_second": 110.531, |
|
"eval_steps_per_second": 13.823, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.781315283696833, |
|
"grad_norm": 7.447702407836914, |
|
"learning_rate": 1.0997734329109676e-05, |
|
"loss": 3.9318, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.7854934402941423, |
|
"grad_norm": 5.904074668884277, |
|
"learning_rate": 1.078794998741294e-05, |
|
"loss": 3.851, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.7854934402941423, |
|
"eval_loss": 3.7797744274139404, |
|
"eval_runtime": 40.3311, |
|
"eval_samples_per_second": 110.634, |
|
"eval_steps_per_second": 13.835, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.7896715968914515, |
|
"grad_norm": 8.686776161193848, |
|
"learning_rate": 1.0578585214399597e-05, |
|
"loss": 3.886, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.7938497534887607, |
|
"grad_norm": 9.1967134475708, |
|
"learning_rate": 1.0368800872702862e-05, |
|
"loss": 3.8963, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.7938497534887607, |
|
"eval_loss": 3.777365207672119, |
|
"eval_runtime": 40.3669, |
|
"eval_samples_per_second": 110.536, |
|
"eval_steps_per_second": 13.823, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.79802791008607, |
|
"grad_norm": 8.713147163391113, |
|
"learning_rate": 1.0159016531006126e-05, |
|
"loss": 3.9161, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.8022060666833793, |
|
"grad_norm": 4.081088542938232, |
|
"learning_rate": 9.94923218930939e-06, |
|
"loss": 3.8978, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.8022060666833793, |
|
"eval_loss": 3.769115447998047, |
|
"eval_runtime": 40.3884, |
|
"eval_samples_per_second": 110.477, |
|
"eval_steps_per_second": 13.816, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.8063842232806886, |
|
"grad_norm": 6.48073673248291, |
|
"learning_rate": 9.739447847612655e-06, |
|
"loss": 3.8779, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.8105623798779978, |
|
"grad_norm": 10.993706703186035, |
|
"learning_rate": 9.530083074599312e-06, |
|
"loss": 3.8931, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.8105623798779978, |
|
"eval_loss": 3.7640254497528076, |
|
"eval_runtime": 40.412, |
|
"eval_samples_per_second": 110.413, |
|
"eval_steps_per_second": 13.808, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.8147405364753071, |
|
"grad_norm": 9.473855018615723, |
|
"learning_rate": 9.320298732902577e-06, |
|
"loss": 3.8533, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.8189186930726163, |
|
"grad_norm": 6.103699684143066, |
|
"learning_rate": 9.110933959889233e-06, |
|
"loss": 3.7507, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.8189186930726163, |
|
"eval_loss": 3.759049415588379, |
|
"eval_runtime": 40.3873, |
|
"eval_samples_per_second": 110.48, |
|
"eval_steps_per_second": 13.816, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.8230968496699256, |
|
"grad_norm": 7.411025524139404, |
|
"learning_rate": 8.9011496181925e-06, |
|
"loss": 3.8444, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.8272750062672349, |
|
"grad_norm": 10.114791870117188, |
|
"learning_rate": 8.691365276495763e-06, |
|
"loss": 3.8984, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.8272750062672349, |
|
"eval_loss": 3.753899574279785, |
|
"eval_runtime": 40.3896, |
|
"eval_samples_per_second": 110.474, |
|
"eval_steps_per_second": 13.815, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.8314531628645442, |
|
"grad_norm": 5.56119441986084, |
|
"learning_rate": 8.481580934799027e-06, |
|
"loss": 3.8441, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.8356313194618534, |
|
"grad_norm": 7.270823001861572, |
|
"learning_rate": 8.27179659310229e-06, |
|
"loss": 3.8968, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.8356313194618534, |
|
"eval_loss": 3.7469892501831055, |
|
"eval_runtime": 40.37, |
|
"eval_samples_per_second": 110.528, |
|
"eval_steps_per_second": 13.822, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.8398094760591627, |
|
"grad_norm": 5.920239448547363, |
|
"learning_rate": 8.062012251405555e-06, |
|
"loss": 3.8607, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.8439876326564719, |
|
"grad_norm": 6.5870442390441895, |
|
"learning_rate": 7.85222790970882e-06, |
|
"loss": 3.8543, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.8439876326564719, |
|
"eval_loss": 3.7423293590545654, |
|
"eval_runtime": 40.3917, |
|
"eval_samples_per_second": 110.468, |
|
"eval_steps_per_second": 13.815, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.8481657892537813, |
|
"grad_norm": 11.815503120422363, |
|
"learning_rate": 7.642443568012084e-06, |
|
"loss": 3.7859, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.8523439458510905, |
|
"grad_norm": 9.67881965637207, |
|
"learning_rate": 7.4330787949987425e-06, |
|
"loss": 3.8422, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.8523439458510905, |
|
"eval_loss": 3.739718198776245, |
|
"eval_runtime": 40.4029, |
|
"eval_samples_per_second": 110.438, |
|
"eval_steps_per_second": 13.811, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.8565221024483998, |
|
"grad_norm": 9.272939682006836, |
|
"learning_rate": 7.223294453302005e-06, |
|
"loss": 3.7835, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.860700259045709, |
|
"grad_norm": 5.5945000648498535, |
|
"learning_rate": 7.01351011160527e-06, |
|
"loss": 3.8318, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.860700259045709, |
|
"eval_loss": 3.7335968017578125, |
|
"eval_runtime": 40.3673, |
|
"eval_samples_per_second": 110.535, |
|
"eval_steps_per_second": 13.823, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.8648784156430183, |
|
"grad_norm": 9.059176445007324, |
|
"learning_rate": 6.803725769908534e-06, |
|
"loss": 3.822, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.8690565722403276, |
|
"grad_norm": 5.025524139404297, |
|
"learning_rate": 6.5939414282117985e-06, |
|
"loss": 3.7921, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.8690565722403276, |
|
"eval_loss": 3.726698637008667, |
|
"eval_runtime": 40.365, |
|
"eval_samples_per_second": 110.541, |
|
"eval_steps_per_second": 13.824, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.8732347288376369, |
|
"grad_norm": 6.096043586730957, |
|
"learning_rate": 6.384157086515063e-06, |
|
"loss": 3.7771, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 0.8774128854349461, |
|
"grad_norm": 10.153267860412598, |
|
"learning_rate": 6.174792313501721e-06, |
|
"loss": 3.7626, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.8774128854349461, |
|
"eval_loss": 3.728839635848999, |
|
"eval_runtime": 40.2024, |
|
"eval_samples_per_second": 110.988, |
|
"eval_steps_per_second": 13.88, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.8815910420322554, |
|
"grad_norm": 11.755720138549805, |
|
"learning_rate": 5.965007971804984e-06, |
|
"loss": 3.8455, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 0.8857691986295646, |
|
"grad_norm": 7.943704605102539, |
|
"learning_rate": 5.755223630108249e-06, |
|
"loss": 3.8765, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.8857691986295646, |
|
"eval_loss": 3.7219812870025635, |
|
"eval_runtime": 40.2173, |
|
"eval_samples_per_second": 110.947, |
|
"eval_steps_per_second": 13.875, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.8899473552268738, |
|
"grad_norm": 3.804535150527954, |
|
"learning_rate": 5.545439288411513e-06, |
|
"loss": 3.8256, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 0.8941255118241832, |
|
"grad_norm": 11.546579360961914, |
|
"learning_rate": 5.3356549467147775e-06, |
|
"loss": 3.7837, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.8941255118241832, |
|
"eval_loss": 3.7193517684936523, |
|
"eval_runtime": 40.3063, |
|
"eval_samples_per_second": 110.702, |
|
"eval_steps_per_second": 13.844, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.8983036684214925, |
|
"grad_norm": 5.996183395385742, |
|
"learning_rate": 5.125870605018042e-06, |
|
"loss": 3.8781, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 0.9024818250188017, |
|
"grad_norm": 7.48162841796875, |
|
"learning_rate": 4.916086263321306e-06, |
|
"loss": 3.8106, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.9024818250188017, |
|
"eval_loss": 3.713139295578003, |
|
"eval_runtime": 40.3495, |
|
"eval_samples_per_second": 110.584, |
|
"eval_steps_per_second": 13.829, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.9066599816161109, |
|
"grad_norm": 6.757467746734619, |
|
"learning_rate": 4.706721490307963e-06, |
|
"loss": 5.0696, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 0.9108381382134202, |
|
"grad_norm": 7.004515647888184, |
|
"learning_rate": 4.496937148611228e-06, |
|
"loss": 4.8587, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.9108381382134202, |
|
"eval_loss": 4.723120212554932, |
|
"eval_runtime": 40.383, |
|
"eval_samples_per_second": 110.492, |
|
"eval_steps_per_second": 13.818, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.9150162948107295, |
|
"grad_norm": 9.177141189575195, |
|
"learning_rate": 4.287152806914492e-06, |
|
"loss": 4.8224, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 0.9191944514080388, |
|
"grad_norm": 5.546660900115967, |
|
"learning_rate": 4.0773684652177565e-06, |
|
"loss": 4.7712, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 0.9191944514080388, |
|
"eval_loss": 4.593838214874268, |
|
"eval_runtime": 40.3004, |
|
"eval_samples_per_second": 110.719, |
|
"eval_steps_per_second": 13.846, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 0.923372608005348, |
|
"grad_norm": 7.7168169021606445, |
|
"learning_rate": 3.868003692204414e-06, |
|
"loss": 4.7148, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 0.9275507646026573, |
|
"grad_norm": 6.541009426116943, |
|
"learning_rate": 3.658219350507678e-06, |
|
"loss": 4.6866, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 0.9275507646026573, |
|
"eval_loss": 4.512722969055176, |
|
"eval_runtime": 40.2842, |
|
"eval_samples_per_second": 110.763, |
|
"eval_steps_per_second": 13.852, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 0.9317289211999665, |
|
"grad_norm": 8.877721786499023, |
|
"learning_rate": 3.448435008810943e-06, |
|
"loss": 4.6519, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 0.9359070777972759, |
|
"grad_norm": 11.03339958190918, |
|
"learning_rate": 3.238650667114207e-06, |
|
"loss": 4.5986, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.9359070777972759, |
|
"eval_loss": 4.453566074371338, |
|
"eval_runtime": 40.3468, |
|
"eval_samples_per_second": 110.591, |
|
"eval_steps_per_second": 13.83, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.9400852343945851, |
|
"grad_norm": 14.915512084960938, |
|
"learning_rate": 3.0292858941008645e-06, |
|
"loss": 4.6582, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 0.9442633909918944, |
|
"grad_norm": 7.842238903045654, |
|
"learning_rate": 2.8199211210875224e-06, |
|
"loss": 4.6007, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 0.9442633909918944, |
|
"eval_loss": 4.413528919219971, |
|
"eval_runtime": 40.3191, |
|
"eval_samples_per_second": 110.667, |
|
"eval_steps_per_second": 13.84, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 0.9484415475892036, |
|
"grad_norm": 5.456047534942627, |
|
"learning_rate": 2.6101367793907866e-06, |
|
"loss": 4.5077, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 0.9526197041865129, |
|
"grad_norm": 8.683294296264648, |
|
"learning_rate": 2.4003524376940504e-06, |
|
"loss": 4.5176, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 0.9526197041865129, |
|
"eval_loss": 4.385701656341553, |
|
"eval_runtime": 40.2816, |
|
"eval_samples_per_second": 110.77, |
|
"eval_steps_per_second": 13.852, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 0.9567978607838222, |
|
"grad_norm": 6.126138210296631, |
|
"learning_rate": 2.1905680959973146e-06, |
|
"loss": 4.4375, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 0.9609760173811315, |
|
"grad_norm": 8.375338554382324, |
|
"learning_rate": 1.9807837543005793e-06, |
|
"loss": 4.505, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 0.9609760173811315, |
|
"eval_loss": 4.364550590515137, |
|
"eval_runtime": 40.3282, |
|
"eval_samples_per_second": 110.642, |
|
"eval_steps_per_second": 13.836, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 0.9651541739784407, |
|
"grad_norm": 10.08293628692627, |
|
"learning_rate": 1.7709994126038433e-06, |
|
"loss": 4.5584, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 0.96933233057575, |
|
"grad_norm": 6.49351692199707, |
|
"learning_rate": 1.5616346395905012e-06, |
|
"loss": 4.411, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 0.96933233057575, |
|
"eval_loss": 4.347931385040283, |
|
"eval_runtime": 40.3323, |
|
"eval_samples_per_second": 110.631, |
|
"eval_steps_per_second": 13.835, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 0.9735104871730592, |
|
"grad_norm": 8.52354621887207, |
|
"learning_rate": 1.3518502978937652e-06, |
|
"loss": 4.4968, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 0.9776886437703685, |
|
"grad_norm": 4.475661754608154, |
|
"learning_rate": 1.1420659561970296e-06, |
|
"loss": 4.4801, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 0.9776886437703685, |
|
"eval_loss": 4.335597991943359, |
|
"eval_runtime": 40.253, |
|
"eval_samples_per_second": 110.849, |
|
"eval_steps_per_second": 13.862, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 0.9818668003676778, |
|
"grad_norm": 7.370941638946533, |
|
"learning_rate": 9.322816145002936e-07, |
|
"loss": 4.4369, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 0.9860449569649871, |
|
"grad_norm": 7.109913349151611, |
|
"learning_rate": 7.224972728035579e-07, |
|
"loss": 4.4866, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 0.9860449569649871, |
|
"eval_loss": 4.327831745147705, |
|
"eval_runtime": 40.3607, |
|
"eval_samples_per_second": 110.553, |
|
"eval_steps_per_second": 13.825, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 0.9902231135622963, |
|
"grad_norm": 6.5864434242248535, |
|
"learning_rate": 5.131324997902156e-07, |
|
"loss": 4.3809, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 0.9944012701596056, |
|
"grad_norm": 12.8942232131958, |
|
"learning_rate": 3.0334815809347995e-07, |
|
"loss": 4.5074, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 0.9944012701596056, |
|
"eval_loss": 4.323913097381592, |
|
"eval_runtime": 40.3487, |
|
"eval_samples_per_second": 110.586, |
|
"eval_steps_per_second": 13.829, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 0.9985794267569148, |
|
"grad_norm": 5.209051132202148, |
|
"learning_rate": 9.356381639674414e-08, |
|
"loss": 4.528, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 1.0027575833542242, |
|
"grad_norm": 5.586207866668701, |
|
"learning_rate": 3.7510351750386886e-05, |
|
"loss": 4.8288, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.0027575833542242, |
|
"eval_loss": 4.565053462982178, |
|
"eval_runtime": 40.4476, |
|
"eval_samples_per_second": 110.316, |
|
"eval_steps_per_second": 13.796, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 1.0069357399515333, |
|
"grad_norm": 7.037691593170166, |
|
"learning_rate": 3.745807018277636e-05, |
|
"loss": 4.5428, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 1.0111138965488426, |
|
"grad_norm": 15.185105323791504, |
|
"learning_rate": 3.740589317830106e-05, |
|
"loss": 4.4273, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.0111138965488426, |
|
"eval_loss": 4.302701950073242, |
|
"eval_runtime": 40.408, |
|
"eval_samples_per_second": 110.424, |
|
"eval_steps_per_second": 13.809, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 1.015292053146152, |
|
"grad_norm": 6.465140342712402, |
|
"learning_rate": 3.735361161069054e-05, |
|
"loss": 4.4009, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 1.0194702097434611, |
|
"grad_norm": 5.409151077270508, |
|
"learning_rate": 3.730133004308001e-05, |
|
"loss": 4.3574, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.0194702097434611, |
|
"eval_loss": 4.197505474090576, |
|
"eval_runtime": 40.4321, |
|
"eval_samples_per_second": 110.358, |
|
"eval_steps_per_second": 13.801, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 1.0236483663407705, |
|
"grad_norm": 3.3518075942993164, |
|
"learning_rate": 3.724904847546949e-05, |
|
"loss": 4.287, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 1.0278265229380796, |
|
"grad_norm": 7.260291576385498, |
|
"learning_rate": 3.719676690785896e-05, |
|
"loss": 4.2858, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.0278265229380796, |
|
"eval_loss": 4.131897449493408, |
|
"eval_runtime": 40.5776, |
|
"eval_samples_per_second": 109.962, |
|
"eval_steps_per_second": 13.751, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 1.032004679535389, |
|
"grad_norm": 5.965569972991943, |
|
"learning_rate": 3.714448534024844e-05, |
|
"loss": 4.2024, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 1.0361828361326983, |
|
"grad_norm": 6.48886251449585, |
|
"learning_rate": 3.709220377263792e-05, |
|
"loss": 4.2055, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.0361828361326983, |
|
"eval_loss": 4.078915596008301, |
|
"eval_runtime": 40.4564, |
|
"eval_samples_per_second": 110.291, |
|
"eval_steps_per_second": 13.793, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 1.0403609927300075, |
|
"grad_norm": 6.582276344299316, |
|
"learning_rate": 3.7040026768162615e-05, |
|
"loss": 4.1067, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 1.0445391493273168, |
|
"grad_norm": 5.228335857391357, |
|
"learning_rate": 3.6987745200552094e-05, |
|
"loss": 4.1359, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.0445391493273168, |
|
"eval_loss": 4.038590908050537, |
|
"eval_runtime": 40.4792, |
|
"eval_samples_per_second": 110.229, |
|
"eval_steps_per_second": 13.785, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 1.048717305924626, |
|
"grad_norm": 8.54234790802002, |
|
"learning_rate": 3.6935463632941566e-05, |
|
"loss": 4.1414, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 1.0528954625219353, |
|
"grad_norm": 6.923229694366455, |
|
"learning_rate": 3.688318206533105e-05, |
|
"loss": 4.1308, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.0528954625219353, |
|
"eval_loss": 3.986788272857666, |
|
"eval_runtime": 40.465, |
|
"eval_samples_per_second": 110.268, |
|
"eval_steps_per_second": 13.79, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 1.0570736191192447, |
|
"grad_norm": 8.614554405212402, |
|
"learning_rate": 3.683090049772053e-05, |
|
"loss": 4.1132, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 1.0612517757165538, |
|
"grad_norm": 11.300436019897461, |
|
"learning_rate": 3.677861893011e-05, |
|
"loss": 4.0469, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.0612517757165538, |
|
"eval_loss": 3.9560070037841797, |
|
"eval_runtime": 40.5254, |
|
"eval_samples_per_second": 110.104, |
|
"eval_steps_per_second": 13.769, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 1.0654299323138632, |
|
"grad_norm": 6.241533279418945, |
|
"learning_rate": 3.672633736249948e-05, |
|
"loss": 4.0292, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 1.0696080889111723, |
|
"grad_norm": 4.231565952301025, |
|
"learning_rate": 3.6674055794888954e-05, |
|
"loss": 4.0256, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.0696080889111723, |
|
"eval_loss": 3.9226207733154297, |
|
"eval_runtime": 40.4419, |
|
"eval_samples_per_second": 110.331, |
|
"eval_steps_per_second": 13.798, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.0737862455084817, |
|
"grad_norm": 6.553409099578857, |
|
"learning_rate": 3.662177422727843e-05, |
|
"loss": 3.9736, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 1.077964402105791, |
|
"grad_norm": 5.839107990264893, |
|
"learning_rate": 3.6569597222803135e-05, |
|
"loss": 4.0075, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.077964402105791, |
|
"eval_loss": 3.9092047214508057, |
|
"eval_runtime": 40.4523, |
|
"eval_samples_per_second": 110.303, |
|
"eval_steps_per_second": 13.794, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 1.0821425587031002, |
|
"grad_norm": 9.884592056274414, |
|
"learning_rate": 3.651731565519261e-05, |
|
"loss": 4.0616, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 1.0863207153004095, |
|
"grad_norm": 9.318048477172852, |
|
"learning_rate": 3.6465034087582086e-05, |
|
"loss": 3.9327, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.0863207153004095, |
|
"eval_loss": 3.877835988998413, |
|
"eval_runtime": 40.4173, |
|
"eval_samples_per_second": 110.398, |
|
"eval_steps_per_second": 13.806, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 1.0904988718977187, |
|
"grad_norm": 5.386814117431641, |
|
"learning_rate": 3.641275251997156e-05, |
|
"loss": 3.9593, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 1.094677028495028, |
|
"grad_norm": 6.430805683135986, |
|
"learning_rate": 3.636057551549626e-05, |
|
"loss": 3.8757, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.094677028495028, |
|
"eval_loss": 3.8680901527404785, |
|
"eval_runtime": 40.4859, |
|
"eval_samples_per_second": 110.211, |
|
"eval_steps_per_second": 13.783, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 1.0988551850923374, |
|
"grad_norm": 4.691482067108154, |
|
"learning_rate": 3.630829394788574e-05, |
|
"loss": 3.9689, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 1.1030333416896465, |
|
"grad_norm": 5.442196369171143, |
|
"learning_rate": 3.625601238027521e-05, |
|
"loss": 3.9561, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.1030333416896465, |
|
"eval_loss": 3.8496084213256836, |
|
"eval_runtime": 40.4836, |
|
"eval_samples_per_second": 110.217, |
|
"eval_steps_per_second": 13.783, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 1.1072114982869559, |
|
"grad_norm": 5.613692760467529, |
|
"learning_rate": 3.620373081266469e-05, |
|
"loss": 3.9522, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 1.111389654884265, |
|
"grad_norm": 6.412971496582031, |
|
"learning_rate": 3.6151553808189384e-05, |
|
"loss": 3.9606, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.111389654884265, |
|
"eval_loss": 3.827300786972046, |
|
"eval_runtime": 40.4412, |
|
"eval_samples_per_second": 110.333, |
|
"eval_steps_per_second": 13.798, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 1.1155678114815744, |
|
"grad_norm": 5.4674072265625, |
|
"learning_rate": 3.609927224057886e-05, |
|
"loss": 4.018, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 1.1197459680788837, |
|
"grad_norm": 8.124231338500977, |
|
"learning_rate": 3.604699067296834e-05, |
|
"loss": 3.9462, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.1197459680788837, |
|
"eval_loss": 3.802276611328125, |
|
"eval_runtime": 40.4531, |
|
"eval_samples_per_second": 110.301, |
|
"eval_steps_per_second": 13.794, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 1.1239241246761928, |
|
"grad_norm": 6.545614719390869, |
|
"learning_rate": 3.5994709105357814e-05, |
|
"loss": 3.9833, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 1.1281022812735022, |
|
"grad_norm": 4.819684028625488, |
|
"learning_rate": 3.5942532100882516e-05, |
|
"loss": 3.8276, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.1281022812735022, |
|
"eval_loss": 3.7954277992248535, |
|
"eval_runtime": 40.4488, |
|
"eval_samples_per_second": 110.312, |
|
"eval_steps_per_second": 13.795, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 1.1322804378708113, |
|
"grad_norm": 5.222882270812988, |
|
"learning_rate": 3.589035509640721e-05, |
|
"loss": 3.8508, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 1.1364585944681207, |
|
"grad_norm": 4.044717788696289, |
|
"learning_rate": 3.583807352879669e-05, |
|
"loss": 3.9272, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.1364585944681207, |
|
"eval_loss": 3.774327039718628, |
|
"eval_runtime": 40.444, |
|
"eval_samples_per_second": 110.325, |
|
"eval_steps_per_second": 13.797, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 1.14063675106543, |
|
"grad_norm": 7.321952819824219, |
|
"learning_rate": 3.578579196118617e-05, |
|
"loss": 3.9062, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 1.1448149076627392, |
|
"grad_norm": 7.0700297355651855, |
|
"learning_rate": 3.573351039357564e-05, |
|
"loss": 3.9186, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.1448149076627392, |
|
"eval_loss": 3.7648792266845703, |
|
"eval_runtime": 40.4894, |
|
"eval_samples_per_second": 110.202, |
|
"eval_steps_per_second": 13.781, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 1.1489930642600485, |
|
"grad_norm": 10.587075233459473, |
|
"learning_rate": 3.568122882596512e-05, |
|
"loss": 3.8594, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 1.1531712208573577, |
|
"grad_norm": 9.123832702636719, |
|
"learning_rate": 3.562894725835459e-05, |
|
"loss": 3.7987, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.1531712208573577, |
|
"eval_loss": 3.750222682952881, |
|
"eval_runtime": 40.4606, |
|
"eval_samples_per_second": 110.28, |
|
"eval_steps_per_second": 13.791, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 1.157349377454667, |
|
"grad_norm": 9.549878120422363, |
|
"learning_rate": 3.557666569074407e-05, |
|
"loss": 3.8156, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 1.1615275340519762, |
|
"grad_norm": 7.350008964538574, |
|
"learning_rate": 3.552438412313355e-05, |
|
"loss": 3.9018, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.1615275340519762, |
|
"eval_loss": 3.7414135932922363, |
|
"eval_runtime": 40.4584, |
|
"eval_samples_per_second": 110.286, |
|
"eval_steps_per_second": 13.792, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 1.1657056906492855, |
|
"grad_norm": 8.974669456481934, |
|
"learning_rate": 3.5472207118658245e-05, |
|
"loss": 3.833, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 1.1698838472465949, |
|
"grad_norm": 9.602900505065918, |
|
"learning_rate": 3.5419925551047724e-05, |
|
"loss": 3.829, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.1698838472465949, |
|
"eval_loss": 3.735084056854248, |
|
"eval_runtime": 40.431, |
|
"eval_samples_per_second": 110.361, |
|
"eval_steps_per_second": 13.801, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 1.174062003843904, |
|
"grad_norm": 4.472715854644775, |
|
"learning_rate": 3.5367643983437196e-05, |
|
"loss": 3.8553, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 1.1782401604412134, |
|
"grad_norm": 4.927888870239258, |
|
"learning_rate": 3.531536241582668e-05, |
|
"loss": 3.8512, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.1782401604412134, |
|
"eval_loss": 3.724465847015381, |
|
"eval_runtime": 40.4785, |
|
"eval_samples_per_second": 110.231, |
|
"eval_steps_per_second": 13.785, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 1.1824183170385225, |
|
"grad_norm": 7.687999725341797, |
|
"learning_rate": 3.526318541135138e-05, |
|
"loss": 3.8196, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 1.1865964736358319, |
|
"grad_norm": 5.985261917114258, |
|
"learning_rate": 3.521090384374085e-05, |
|
"loss": 3.842, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.1865964736358319, |
|
"eval_loss": 3.7113516330718994, |
|
"eval_runtime": 40.4693, |
|
"eval_samples_per_second": 110.256, |
|
"eval_steps_per_second": 13.788, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 1.1907746302331412, |
|
"grad_norm": 6.08819055557251, |
|
"learning_rate": 3.5158622276130335e-05, |
|
"loss": 3.852, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 1.1949527868304504, |
|
"grad_norm": 8.868494033813477, |
|
"learning_rate": 3.510634070851981e-05, |
|
"loss": 3.7532, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.1949527868304504, |
|
"eval_loss": 3.7093913555145264, |
|
"eval_runtime": 40.4739, |
|
"eval_samples_per_second": 110.244, |
|
"eval_steps_per_second": 13.787, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 1.1991309434277597, |
|
"grad_norm": 7.246727466583252, |
|
"learning_rate": 3.50541637040445e-05, |
|
"loss": 3.7916, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 1.2033091000250689, |
|
"grad_norm": 5.877631187438965, |
|
"learning_rate": 3.500188213643398e-05, |
|
"loss": 3.7639, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.2033091000250689, |
|
"eval_loss": 3.6901369094848633, |
|
"eval_runtime": 40.4322, |
|
"eval_samples_per_second": 110.358, |
|
"eval_steps_per_second": 13.801, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 1.2074872566223782, |
|
"grad_norm": 9.112384796142578, |
|
"learning_rate": 3.494960056882346e-05, |
|
"loss": 3.7623, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 1.2116654132196876, |
|
"grad_norm": 5.740987777709961, |
|
"learning_rate": 3.489731900121294e-05, |
|
"loss": 3.8049, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.2116654132196876, |
|
"eval_loss": 3.6749048233032227, |
|
"eval_runtime": 40.4704, |
|
"eval_samples_per_second": 110.254, |
|
"eval_steps_per_second": 13.788, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 1.2158435698169967, |
|
"grad_norm": 3.883148431777954, |
|
"learning_rate": 3.4845141996737627e-05, |
|
"loss": 3.7315, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 1.220021726414306, |
|
"grad_norm": 5.081009387969971, |
|
"learning_rate": 3.479286042912711e-05, |
|
"loss": 3.7657, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.220021726414306, |
|
"eval_loss": 3.678842306137085, |
|
"eval_runtime": 40.4772, |
|
"eval_samples_per_second": 110.235, |
|
"eval_steps_per_second": 13.786, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 1.2241998830116152, |
|
"grad_norm": 4.904498100280762, |
|
"learning_rate": 3.4740578861516584e-05, |
|
"loss": 3.7378, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 1.2283780396089246, |
|
"grad_norm": 6.102782249450684, |
|
"learning_rate": 3.468829729390606e-05, |
|
"loss": 3.728, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.2283780396089246, |
|
"eval_loss": 3.6656246185302734, |
|
"eval_runtime": 40.4863, |
|
"eval_samples_per_second": 110.21, |
|
"eval_steps_per_second": 13.782, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 1.2325561962062337, |
|
"grad_norm": 6.608770847320557, |
|
"learning_rate": 3.463601572629554e-05, |
|
"loss": 3.7734, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 1.236734352803543, |
|
"grad_norm": 10.70559310913086, |
|
"learning_rate": 3.458383872182024e-05, |
|
"loss": 3.7575, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.236734352803543, |
|
"eval_loss": 3.65804123878479, |
|
"eval_runtime": 40.4413, |
|
"eval_samples_per_second": 110.333, |
|
"eval_steps_per_second": 13.798, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 1.2409125094008524, |
|
"grad_norm": 4.311787128448486, |
|
"learning_rate": 3.4531557154209716e-05, |
|
"loss": 3.7713, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 1.2450906659981615, |
|
"grad_norm": 5.005003929138184, |
|
"learning_rate": 3.447927558659919e-05, |
|
"loss": 3.7568, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.2450906659981615, |
|
"eval_loss": 3.6577367782592773, |
|
"eval_runtime": 40.4197, |
|
"eval_samples_per_second": 110.392, |
|
"eval_steps_per_second": 13.805, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 1.249268822595471, |
|
"grad_norm": 5.860953330993652, |
|
"learning_rate": 3.442709858212389e-05, |
|
"loss": 3.8178, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 1.25344697919278, |
|
"grad_norm": 7.703333377838135, |
|
"learning_rate": 3.437481701451337e-05, |
|
"loss": 3.6736, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.25344697919278, |
|
"eval_loss": 3.6417226791381836, |
|
"eval_runtime": 40.4622, |
|
"eval_samples_per_second": 110.276, |
|
"eval_steps_per_second": 13.791, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 1.2576251357900894, |
|
"grad_norm": 4.65405797958374, |
|
"learning_rate": 3.432253544690284e-05, |
|
"loss": 3.5936, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 1.2618032923873987, |
|
"grad_norm": 9.61950969696045, |
|
"learning_rate": 3.427025387929232e-05, |
|
"loss": 3.7404, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.2618032923873987, |
|
"eval_loss": 3.631601333618164, |
|
"eval_runtime": 40.4481, |
|
"eval_samples_per_second": 110.314, |
|
"eval_steps_per_second": 13.795, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 1.2659814489847079, |
|
"grad_norm": 4.689013957977295, |
|
"learning_rate": 3.421797231168179e-05, |
|
"loss": 3.7412, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 1.2701596055820172, |
|
"grad_norm": 5.773473739624023, |
|
"learning_rate": 3.4165795307206494e-05, |
|
"loss": 3.749, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.2701596055820172, |
|
"eval_loss": 3.6227645874023438, |
|
"eval_runtime": 40.4716, |
|
"eval_samples_per_second": 110.25, |
|
"eval_steps_per_second": 13.787, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 1.2743377621793264, |
|
"grad_norm": 4.025112152099609, |
|
"learning_rate": 3.411351373959597e-05, |
|
"loss": 3.688, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 1.2785159187766357, |
|
"grad_norm": 4.763550281524658, |
|
"learning_rate": 3.4061232171985445e-05, |
|
"loss": 3.7055, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.2785159187766357, |
|
"eval_loss": 3.6147379875183105, |
|
"eval_runtime": 40.4216, |
|
"eval_samples_per_second": 110.387, |
|
"eval_steps_per_second": 13.804, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 1.282694075373945, |
|
"grad_norm": 6.419916152954102, |
|
"learning_rate": 3.4008950604374924e-05, |
|
"loss": 3.6067, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 1.2868722319712542, |
|
"grad_norm": 4.361742973327637, |
|
"learning_rate": 3.3956669036764396e-05, |
|
"loss": 3.7258, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.2868722319712542, |
|
"eval_loss": 3.607306957244873, |
|
"eval_runtime": 40.4593, |
|
"eval_samples_per_second": 110.284, |
|
"eval_steps_per_second": 13.792, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 1.2910503885685636, |
|
"grad_norm": 6.296881675720215, |
|
"learning_rate": 3.3904387469153875e-05, |
|
"loss": 3.6885, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 1.2952285451658727, |
|
"grad_norm": 4.604855537414551, |
|
"learning_rate": 3.3852105901543354e-05, |
|
"loss": 3.6569, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.2952285451658727, |
|
"eval_loss": 3.604320764541626, |
|
"eval_runtime": 40.4612, |
|
"eval_samples_per_second": 110.279, |
|
"eval_steps_per_second": 13.791, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 1.299406701763182, |
|
"grad_norm": 4.907881736755371, |
|
"learning_rate": 3.3799824333932826e-05, |
|
"loss": 3.6488, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 1.3035848583604914, |
|
"grad_norm": 5.200791358947754, |
|
"learning_rate": 3.374764732945753e-05, |
|
"loss": 3.7493, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.3035848583604914, |
|
"eval_loss": 3.596531629562378, |
|
"eval_runtime": 40.4463, |
|
"eval_samples_per_second": 110.319, |
|
"eval_steps_per_second": 13.796, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 1.3077630149578006, |
|
"grad_norm": 6.453032970428467, |
|
"learning_rate": 3.3695365761847e-05, |
|
"loss": 3.6468, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 1.31194117155511, |
|
"grad_norm": 5.0477142333984375, |
|
"learning_rate": 3.364308419423648e-05, |
|
"loss": 3.6687, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.31194117155511, |
|
"eval_loss": 3.5927352905273438, |
|
"eval_runtime": 40.4591, |
|
"eval_samples_per_second": 110.284, |
|
"eval_steps_per_second": 13.792, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 1.316119328152419, |
|
"grad_norm": 8.696310997009277, |
|
"learning_rate": 3.359080262662596e-05, |
|
"loss": 3.6277, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 1.3202974847497284, |
|
"grad_norm": 4.83682107925415, |
|
"learning_rate": 3.353862562215065e-05, |
|
"loss": 3.6981, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.3202974847497284, |
|
"eval_loss": 3.5905144214630127, |
|
"eval_runtime": 40.438, |
|
"eval_samples_per_second": 110.342, |
|
"eval_steps_per_second": 13.799, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 1.3244756413470378, |
|
"grad_norm": 5.432042121887207, |
|
"learning_rate": 3.348634405454013e-05, |
|
"loss": 3.598, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 1.328653797944347, |
|
"grad_norm": 6.204931735992432, |
|
"learning_rate": 3.3434062486929604e-05, |
|
"loss": 3.6486, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.328653797944347, |
|
"eval_loss": 3.581803321838379, |
|
"eval_runtime": 40.4743, |
|
"eval_samples_per_second": 110.243, |
|
"eval_steps_per_second": 13.787, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 1.3328319545416563, |
|
"grad_norm": 6.183650016784668, |
|
"learning_rate": 3.338178091931909e-05, |
|
"loss": 3.7491, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 1.3370101111389654, |
|
"grad_norm": 7.474965572357178, |
|
"learning_rate": 3.3329603914843784e-05, |
|
"loss": 3.6358, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.3370101111389654, |
|
"eval_loss": 3.5771028995513916, |
|
"eval_runtime": 40.4482, |
|
"eval_samples_per_second": 110.314, |
|
"eval_steps_per_second": 13.795, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 1.3411882677362748, |
|
"grad_norm": 5.560182094573975, |
|
"learning_rate": 3.3277322347233256e-05, |
|
"loss": 3.6578, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 1.345366424333584, |
|
"grad_norm": 7.701005458831787, |
|
"learning_rate": 3.322504077962274e-05, |
|
"loss": 3.5839, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.345366424333584, |
|
"eval_loss": 3.5681257247924805, |
|
"eval_runtime": 40.4648, |
|
"eval_samples_per_second": 110.269, |
|
"eval_steps_per_second": 13.79, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 1.3495445809308932, |
|
"grad_norm": 2.855236053466797, |
|
"learning_rate": 3.3172759212012214e-05, |
|
"loss": 3.6106, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 1.3537227375282026, |
|
"grad_norm": 7.088588714599609, |
|
"learning_rate": 3.312058220753691e-05, |
|
"loss": 3.5712, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.3537227375282026, |
|
"eval_loss": 3.5612449645996094, |
|
"eval_runtime": 40.457, |
|
"eval_samples_per_second": 110.29, |
|
"eval_steps_per_second": 13.792, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 1.3579008941255117, |
|
"grad_norm": 5.757685661315918, |
|
"learning_rate": 3.3068300639926395e-05, |
|
"loss": 3.6394, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 1.362079050722821, |
|
"grad_norm": 3.8591792583465576, |
|
"learning_rate": 3.301601907231587e-05, |
|
"loss": 3.6229, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.362079050722821, |
|
"eval_loss": 3.562960624694824, |
|
"eval_runtime": 40.456, |
|
"eval_samples_per_second": 110.293, |
|
"eval_steps_per_second": 13.793, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 1.3662572073201305, |
|
"grad_norm": 5.039220809936523, |
|
"learning_rate": 3.2963737504705346e-05, |
|
"loss": 3.5562, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 1.3704353639174396, |
|
"grad_norm": 3.936936616897583, |
|
"learning_rate": 3.291145593709482e-05, |
|
"loss": 3.5953, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.3704353639174396, |
|
"eval_loss": 3.555675983428955, |
|
"eval_runtime": 40.4593, |
|
"eval_samples_per_second": 110.284, |
|
"eval_steps_per_second": 13.792, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 1.374613520514749, |
|
"grad_norm": 5.979030609130859, |
|
"learning_rate": 3.28591743694843e-05, |
|
"loss": 3.7403, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 1.378791677112058, |
|
"grad_norm": 3.9190635681152344, |
|
"learning_rate": 3.2806892801873776e-05, |
|
"loss": 3.6413, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.378791677112058, |
|
"eval_loss": 3.5578091144561768, |
|
"eval_runtime": 40.4291, |
|
"eval_samples_per_second": 110.366, |
|
"eval_steps_per_second": 13.802, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 1.3829698337093674, |
|
"grad_norm": 3.9135348796844482, |
|
"learning_rate": 3.275461123426325e-05, |
|
"loss": 3.5954, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 1.3871479903066768, |
|
"grad_norm": 4.480642795562744, |
|
"learning_rate": 3.270243422978795e-05, |
|
"loss": 3.6482, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.3871479903066768, |
|
"eval_loss": 3.5539774894714355, |
|
"eval_runtime": 40.4703, |
|
"eval_samples_per_second": 110.254, |
|
"eval_steps_per_second": 13.788, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 1.391326146903986, |
|
"grad_norm": 4.073483467102051, |
|
"learning_rate": 3.2650257225312645e-05, |
|
"loss": 3.6099, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 1.3955043035012953, |
|
"grad_norm": 6.1564860343933105, |
|
"learning_rate": 3.2597975657702124e-05, |
|
"loss": 3.5885, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.3955043035012953, |
|
"eval_loss": 3.55012583732605, |
|
"eval_runtime": 40.43, |
|
"eval_samples_per_second": 110.364, |
|
"eval_steps_per_second": 13.802, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 1.3996824600986044, |
|
"grad_norm": 5.0414605140686035, |
|
"learning_rate": 3.25456940900916e-05, |
|
"loss": 3.6201, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 1.4038606166959138, |
|
"grad_norm": 4.474603652954102, |
|
"learning_rate": 3.2493412522481075e-05, |
|
"loss": 3.6112, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.4038606166959138, |
|
"eval_loss": 3.5365307331085205, |
|
"eval_runtime": 40.4685, |
|
"eval_samples_per_second": 110.259, |
|
"eval_steps_per_second": 13.789, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 1.4080387732932231, |
|
"grad_norm": 5.0425801277160645, |
|
"learning_rate": 3.2441130954870554e-05, |
|
"loss": 3.5891, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 1.4122169298905323, |
|
"grad_norm": 4.1331915855407715, |
|
"learning_rate": 3.2388849387260026e-05, |
|
"loss": 3.5289, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.4122169298905323, |
|
"eval_loss": 3.5384674072265625, |
|
"eval_runtime": 40.5065, |
|
"eval_samples_per_second": 110.155, |
|
"eval_steps_per_second": 13.776, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 1.4163950864878416, |
|
"grad_norm": 5.106828212738037, |
|
"learning_rate": 3.2336567819649505e-05, |
|
"loss": 3.6218, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 1.4205732430851508, |
|
"grad_norm": 7.320224761962891, |
|
"learning_rate": 3.2284390815174207e-05, |
|
"loss": 3.6387, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.4205732430851508, |
|
"eval_loss": 3.5212485790252686, |
|
"eval_runtime": 40.4469, |
|
"eval_samples_per_second": 110.317, |
|
"eval_steps_per_second": 13.796, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 1.4247513996824601, |
|
"grad_norm": 5.692435264587402, |
|
"learning_rate": 3.223210924756368e-05, |
|
"loss": 3.6134, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 1.4289295562797695, |
|
"grad_norm": 6.391637325286865, |
|
"learning_rate": 3.217982767995316e-05, |
|
"loss": 3.6322, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.4289295562797695, |
|
"eval_loss": 3.5183897018432617, |
|
"eval_runtime": 41.7439, |
|
"eval_samples_per_second": 106.89, |
|
"eval_steps_per_second": 13.367, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 1.4331077128770786, |
|
"grad_norm": 9.630993843078613, |
|
"learning_rate": 3.212754611234263e-05, |
|
"loss": 3.5568, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 1.437285869474388, |
|
"grad_norm": 4.306568145751953, |
|
"learning_rate": 3.207526454473211e-05, |
|
"loss": 3.6337, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.437285869474388, |
|
"eval_loss": 3.511368751525879, |
|
"eval_runtime": 41.0631, |
|
"eval_samples_per_second": 108.662, |
|
"eval_steps_per_second": 13.589, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 1.441464026071697, |
|
"grad_norm": 4.794216156005859, |
|
"learning_rate": 3.202298297712159e-05, |
|
"loss": 3.6292, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 1.4456421826690065, |
|
"grad_norm": 8.913003921508789, |
|
"learning_rate": 3.1970701409511067e-05, |
|
"loss": 3.5451, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.4456421826690065, |
|
"eval_loss": 3.5066542625427246, |
|
"eval_runtime": 40.4633, |
|
"eval_samples_per_second": 110.273, |
|
"eval_steps_per_second": 13.79, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 1.4498203392663158, |
|
"grad_norm": 4.385974884033203, |
|
"learning_rate": 3.1918419841900545e-05, |
|
"loss": 3.5534, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 1.453998495863625, |
|
"grad_norm": 3.306346893310547, |
|
"learning_rate": 3.186613827429002e-05, |
|
"loss": 3.5008, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.453998495863625, |
|
"eval_loss": 3.496458053588867, |
|
"eval_runtime": 40.461, |
|
"eval_samples_per_second": 110.279, |
|
"eval_steps_per_second": 13.791, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 1.4581766524609343, |
|
"grad_norm": 6.9272050857543945, |
|
"learning_rate": 3.181396126981472e-05, |
|
"loss": 3.6206, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 1.4623548090582434, |
|
"grad_norm": 6.005661964416504, |
|
"learning_rate": 3.176167970220419e-05, |
|
"loss": 3.554, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.4623548090582434, |
|
"eval_loss": 3.4955668449401855, |
|
"eval_runtime": 40.47, |
|
"eval_samples_per_second": 110.255, |
|
"eval_steps_per_second": 13.788, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 1.4665329656555528, |
|
"grad_norm": 5.330180644989014, |
|
"learning_rate": 3.170939813459367e-05, |
|
"loss": 3.5613, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 1.4707111222528622, |
|
"grad_norm": 4.7661027908325195, |
|
"learning_rate": 3.165711656698315e-05, |
|
"loss": 3.6066, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.4707111222528622, |
|
"eval_loss": 3.491217851638794, |
|
"eval_runtime": 40.7953, |
|
"eval_samples_per_second": 109.375, |
|
"eval_steps_per_second": 13.678, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 1.4748892788501713, |
|
"grad_norm": 5.372822284698486, |
|
"learning_rate": 3.160483499937262e-05, |
|
"loss": 3.5948, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 1.4790674354474806, |
|
"grad_norm": 4.185611248016357, |
|
"learning_rate": 3.155265799489732e-05, |
|
"loss": 3.5511, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.4790674354474806, |
|
"eval_loss": 3.4882309436798096, |
|
"eval_runtime": 40.4344, |
|
"eval_samples_per_second": 110.352, |
|
"eval_steps_per_second": 13.8, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 1.4832455920447898, |
|
"grad_norm": 4.737360000610352, |
|
"learning_rate": 3.1500376427286795e-05, |
|
"loss": 3.5946, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 1.4874237486420991, |
|
"grad_norm": 6.088687419891357, |
|
"learning_rate": 3.1448094859676274e-05, |
|
"loss": 3.6226, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.4874237486420991, |
|
"eval_loss": 3.4909913539886475, |
|
"eval_runtime": 40.4208, |
|
"eval_samples_per_second": 110.389, |
|
"eval_steps_per_second": 13.805, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 1.4916019052394085, |
|
"grad_norm": 4.8783369064331055, |
|
"learning_rate": 3.139581329206575e-05, |
|
"loss": 3.5074, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 1.4957800618367176, |
|
"grad_norm": 4.050315856933594, |
|
"learning_rate": 3.134363628759045e-05, |
|
"loss": 3.5209, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.4957800618367176, |
|
"eval_loss": 3.4775376319885254, |
|
"eval_runtime": 40.4543, |
|
"eval_samples_per_second": 110.297, |
|
"eval_steps_per_second": 13.793, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 1.4999582184340268, |
|
"grad_norm": 7.867382526397705, |
|
"learning_rate": 3.129135471997993e-05, |
|
"loss": 3.5293, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 1.5041363750313361, |
|
"grad_norm": 4.2094597816467285, |
|
"learning_rate": 3.12390731523694e-05, |
|
"loss": 3.5055, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.5041363750313361, |
|
"eval_loss": 3.4833099842071533, |
|
"eval_runtime": 40.4847, |
|
"eval_samples_per_second": 110.214, |
|
"eval_steps_per_second": 13.783, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 1.5083145316286455, |
|
"grad_norm": 5.466799259185791, |
|
"learning_rate": 3.118679158475888e-05, |
|
"loss": 3.4833, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 1.5124926882259548, |
|
"grad_norm": 8.360751152038574, |
|
"learning_rate": 3.113461458028358e-05, |
|
"loss": 3.4472, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.5124926882259548, |
|
"eval_loss": 3.4807190895080566, |
|
"eval_runtime": 40.437, |
|
"eval_samples_per_second": 110.345, |
|
"eval_steps_per_second": 13.799, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 1.516670844823264, |
|
"grad_norm": 4.71815824508667, |
|
"learning_rate": 3.108233301267305e-05, |
|
"loss": 3.5698, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 1.520849001420573, |
|
"grad_norm": 5.6605658531188965, |
|
"learning_rate": 3.103005144506253e-05, |
|
"loss": 3.5575, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 1.520849001420573, |
|
"eval_loss": 3.4685845375061035, |
|
"eval_runtime": 40.4646, |
|
"eval_samples_per_second": 110.269, |
|
"eval_steps_per_second": 13.79, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 1.5250271580178825, |
|
"grad_norm": 5.890238285064697, |
|
"learning_rate": 3.097776987745201e-05, |
|
"loss": 3.5999, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 1.5292053146151918, |
|
"grad_norm": 6.8811140060424805, |
|
"learning_rate": 3.0925592872976705e-05, |
|
"loss": 3.4265, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 1.5292053146151918, |
|
"eval_loss": 3.465893030166626, |
|
"eval_runtime": 40.4398, |
|
"eval_samples_per_second": 110.337, |
|
"eval_steps_per_second": 13.798, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 1.5333834712125012, |
|
"grad_norm": 6.643799304962158, |
|
"learning_rate": 3.0873311305366184e-05, |
|
"loss": 3.6255, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 1.5375616278098103, |
|
"grad_norm": 6.201560020446777, |
|
"learning_rate": 3.0821029737755656e-05, |
|
"loss": 3.622, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 1.5375616278098103, |
|
"eval_loss": 3.464231491088867, |
|
"eval_runtime": 40.4428, |
|
"eval_samples_per_second": 110.329, |
|
"eval_steps_per_second": 13.797, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 1.5417397844071195, |
|
"grad_norm": 4.211904048919678, |
|
"learning_rate": 3.0768748170145135e-05, |
|
"loss": 3.5642, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 1.5459179410044288, |
|
"grad_norm": 5.1118011474609375, |
|
"learning_rate": 3.0716466602534614e-05, |
|
"loss": 3.5833, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 1.5459179410044288, |
|
"eval_loss": 3.461984157562256, |
|
"eval_runtime": 40.4142, |
|
"eval_samples_per_second": 110.407, |
|
"eval_steps_per_second": 13.807, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 1.5500960976017382, |
|
"grad_norm": 5.767035484313965, |
|
"learning_rate": 3.066428959805931e-05, |
|
"loss": 3.6235, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 1.5542742541990475, |
|
"grad_norm": 3.612896680831909, |
|
"learning_rate": 3.061200803044879e-05, |
|
"loss": 3.5037, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 1.5542742541990475, |
|
"eval_loss": 3.4645168781280518, |
|
"eval_runtime": 40.4303, |
|
"eval_samples_per_second": 110.363, |
|
"eval_steps_per_second": 13.802, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 1.5584524107963567, |
|
"grad_norm": 7.333379745483398, |
|
"learning_rate": 3.055972646283826e-05, |
|
"loss": 3.5563, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 1.5626305673936658, |
|
"grad_norm": 4.652407646179199, |
|
"learning_rate": 3.050744489522774e-05, |
|
"loss": 3.5311, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 1.5626305673936658, |
|
"eval_loss": 3.456352949142456, |
|
"eval_runtime": 40.4378, |
|
"eval_samples_per_second": 110.342, |
|
"eval_steps_per_second": 13.799, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 1.5668087239909751, |
|
"grad_norm": 6.25783634185791, |
|
"learning_rate": 3.0455267890752437e-05, |
|
"loss": 3.5793, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 1.5709868805882845, |
|
"grad_norm": 10.195725440979004, |
|
"learning_rate": 3.0402986323141912e-05, |
|
"loss": 3.4834, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 1.5709868805882845, |
|
"eval_loss": 3.4547135829925537, |
|
"eval_runtime": 40.522, |
|
"eval_samples_per_second": 110.113, |
|
"eval_steps_per_second": 13.77, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 1.5751650371855939, |
|
"grad_norm": 3.9726510047912598, |
|
"learning_rate": 3.0350704755531388e-05, |
|
"loss": 3.4863, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 1.579343193782903, |
|
"grad_norm": 5.096458911895752, |
|
"learning_rate": 3.0298423187920867e-05, |
|
"loss": 3.4679, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 1.579343193782903, |
|
"eval_loss": 3.448751926422119, |
|
"eval_runtime": 40.4496, |
|
"eval_samples_per_second": 110.31, |
|
"eval_steps_per_second": 13.795, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 1.5835213503802121, |
|
"grad_norm": 5.1278157234191895, |
|
"learning_rate": 3.024614162031035e-05, |
|
"loss": 3.5127, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 1.5876995069775215, |
|
"grad_norm": 3.9133641719818115, |
|
"learning_rate": 3.0193860052699825e-05, |
|
"loss": 3.4699, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.5876995069775215, |
|
"eval_loss": 3.453089952468872, |
|
"eval_runtime": 40.4458, |
|
"eval_samples_per_second": 110.32, |
|
"eval_steps_per_second": 13.796, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 1.5918776635748308, |
|
"grad_norm": 3.5668985843658447, |
|
"learning_rate": 3.0141683048224516e-05, |
|
"loss": 3.5933, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 1.5960558201721402, |
|
"grad_norm": 4.461442947387695, |
|
"learning_rate": 3.0089506043749215e-05, |
|
"loss": 3.4547, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 1.5960558201721402, |
|
"eval_loss": 3.4460525512695312, |
|
"eval_runtime": 40.4625, |
|
"eval_samples_per_second": 110.275, |
|
"eval_steps_per_second": 13.791, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 1.6002339767694493, |
|
"grad_norm": 9.258716583251953, |
|
"learning_rate": 3.0037224476138694e-05, |
|
"loss": 3.4844, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 1.6044121333667585, |
|
"grad_norm": 5.085646629333496, |
|
"learning_rate": 2.998494290852817e-05, |
|
"loss": 3.4608, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.6044121333667585, |
|
"eval_loss": 3.4406673908233643, |
|
"eval_runtime": 40.4745, |
|
"eval_samples_per_second": 110.242, |
|
"eval_steps_per_second": 13.786, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 1.6085902899640678, |
|
"grad_norm": 4.961802005767822, |
|
"learning_rate": 2.9932661340917645e-05, |
|
"loss": 3.4265, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 1.6127684465613772, |
|
"grad_norm": 5.648656845092773, |
|
"learning_rate": 2.9880379773307127e-05, |
|
"loss": 3.5027, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 1.6127684465613772, |
|
"eval_loss": 3.437014579772949, |
|
"eval_runtime": 40.4642, |
|
"eval_samples_per_second": 110.27, |
|
"eval_steps_per_second": 13.79, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 1.6169466031586865, |
|
"grad_norm": 5.819306373596191, |
|
"learning_rate": 2.9828098205696603e-05, |
|
"loss": 3.5167, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 1.6211247597559957, |
|
"grad_norm": 5.806619167327881, |
|
"learning_rate": 2.9775816638086078e-05, |
|
"loss": 3.4641, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 1.6211247597559957, |
|
"eval_loss": 3.435065746307373, |
|
"eval_runtime": 40.453, |
|
"eval_samples_per_second": 110.301, |
|
"eval_steps_per_second": 13.794, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 1.6253029163533048, |
|
"grad_norm": 7.277612209320068, |
|
"learning_rate": 2.9723535070475557e-05, |
|
"loss": 3.5148, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 1.6294810729506142, |
|
"grad_norm": 5.517014503479004, |
|
"learning_rate": 2.9671358066000255e-05, |
|
"loss": 3.4822, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 1.6294810729506142, |
|
"eval_loss": 3.4265546798706055, |
|
"eval_runtime": 40.492, |
|
"eval_samples_per_second": 110.194, |
|
"eval_steps_per_second": 13.78, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 1.6336592295479235, |
|
"grad_norm": 5.561305046081543, |
|
"learning_rate": 2.961907649838973e-05, |
|
"loss": 3.4422, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 1.6378373861452329, |
|
"grad_norm": 4.937339782714844, |
|
"learning_rate": 2.9566794930779206e-05, |
|
"loss": 3.4682, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 1.6378373861452329, |
|
"eval_loss": 3.425950765609741, |
|
"eval_runtime": 40.469, |
|
"eval_samples_per_second": 110.257, |
|
"eval_steps_per_second": 13.788, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 1.642015542742542, |
|
"grad_norm": 4.299443244934082, |
|
"learning_rate": 2.9514513363168682e-05, |
|
"loss": 3.4398, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 1.6461936993398512, |
|
"grad_norm": 3.591259479522705, |
|
"learning_rate": 2.9462336358693384e-05, |
|
"loss": 3.4695, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 1.6461936993398512, |
|
"eval_loss": 3.4207708835601807, |
|
"eval_runtime": 40.4515, |
|
"eval_samples_per_second": 110.305, |
|
"eval_steps_per_second": 13.794, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 1.6503718559371605, |
|
"grad_norm": 4.869890213012695, |
|
"learning_rate": 2.941005479108286e-05, |
|
"loss": 3.5136, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 1.6545500125344699, |
|
"grad_norm": 4.020719051361084, |
|
"learning_rate": 2.9357773223472335e-05, |
|
"loss": 3.4545, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 1.6545500125344699, |
|
"eval_loss": 3.4241840839385986, |
|
"eval_runtime": 40.4973, |
|
"eval_samples_per_second": 110.18, |
|
"eval_steps_per_second": 13.779, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 1.658728169131779, |
|
"grad_norm": 5.599808692932129, |
|
"learning_rate": 2.930549165586181e-05, |
|
"loss": 3.4355, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 1.6629063257290884, |
|
"grad_norm": 5.005913734436035, |
|
"learning_rate": 2.9253210088251286e-05, |
|
"loss": 3.4585, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 1.6629063257290884, |
|
"eval_loss": 3.4280543327331543, |
|
"eval_runtime": 40.4983, |
|
"eval_samples_per_second": 110.177, |
|
"eval_steps_per_second": 13.778, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 1.6670844823263975, |
|
"grad_norm": 3.628136396408081, |
|
"learning_rate": 2.9200928520640765e-05, |
|
"loss": 3.5212, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 1.6712626389237069, |
|
"grad_norm": 3.2922470569610596, |
|
"learning_rate": 2.914864695303024e-05, |
|
"loss": 3.5205, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 1.6712626389237069, |
|
"eval_loss": 3.416393280029297, |
|
"eval_runtime": 40.4533, |
|
"eval_samples_per_second": 110.3, |
|
"eval_steps_per_second": 13.794, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 1.6754407955210162, |
|
"grad_norm": 4.794888496398926, |
|
"learning_rate": 2.9096365385419716e-05, |
|
"loss": 3.4798, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 1.6796189521183253, |
|
"grad_norm": 4.415316581726074, |
|
"learning_rate": 2.9044188380944414e-05, |
|
"loss": 3.4709, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 1.6796189521183253, |
|
"eval_loss": 3.4112062454223633, |
|
"eval_runtime": 40.4752, |
|
"eval_samples_per_second": 110.24, |
|
"eval_steps_per_second": 13.786, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 1.6837971087156347, |
|
"grad_norm": 3.854452610015869, |
|
"learning_rate": 2.899190681333389e-05, |
|
"loss": 3.427, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 1.6879752653129438, |
|
"grad_norm": 4.938957214355469, |
|
"learning_rate": 2.893962524572337e-05, |
|
"loss": 3.407, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 1.6879752653129438, |
|
"eval_loss": 3.410858154296875, |
|
"eval_runtime": 40.4568, |
|
"eval_samples_per_second": 110.29, |
|
"eval_steps_per_second": 13.792, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 1.6921534219102532, |
|
"grad_norm": 5.272583961486816, |
|
"learning_rate": 2.8887343678112844e-05, |
|
"loss": 3.4582, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 1.6963315785075626, |
|
"grad_norm": 5.953027725219727, |
|
"learning_rate": 2.8835166673637542e-05, |
|
"loss": 3.3917, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 1.6963315785075626, |
|
"eval_loss": 3.4051761627197266, |
|
"eval_runtime": 40.4397, |
|
"eval_samples_per_second": 110.337, |
|
"eval_steps_per_second": 13.798, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 1.7005097351048717, |
|
"grad_norm": 9.10191535949707, |
|
"learning_rate": 2.8782885106027018e-05, |
|
"loss": 3.4971, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 1.704687891702181, |
|
"grad_norm": 7.353392124176025, |
|
"learning_rate": 2.8730603538416493e-05, |
|
"loss": 3.4285, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 1.704687891702181, |
|
"eval_loss": 3.3989508152008057, |
|
"eval_runtime": 40.4649, |
|
"eval_samples_per_second": 110.268, |
|
"eval_steps_per_second": 13.79, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 1.7088660482994902, |
|
"grad_norm": 5.292013168334961, |
|
"learning_rate": 2.8678321970805972e-05, |
|
"loss": 3.4771, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 1.7130442048967995, |
|
"grad_norm": 4.941309452056885, |
|
"learning_rate": 2.862614496633067e-05, |
|
"loss": 3.4328, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 1.7130442048967995, |
|
"eval_loss": 3.404594898223877, |
|
"eval_runtime": 40.469, |
|
"eval_samples_per_second": 110.257, |
|
"eval_steps_per_second": 13.788, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 1.717222361494109, |
|
"grad_norm": 5.848660945892334, |
|
"learning_rate": 2.8573863398720146e-05, |
|
"loss": 3.5314, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 1.721400518091418, |
|
"grad_norm": 3.0623245239257812, |
|
"learning_rate": 2.8521581831109622e-05, |
|
"loss": 3.4575, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 1.721400518091418, |
|
"eval_loss": 3.4002387523651123, |
|
"eval_runtime": 40.4802, |
|
"eval_samples_per_second": 110.227, |
|
"eval_steps_per_second": 13.785, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 1.7255786746887274, |
|
"grad_norm": 2.902146816253662, |
|
"learning_rate": 2.8469300263499104e-05, |
|
"loss": 3.4533, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 1.7297568312860365, |
|
"grad_norm": 4.314316749572754, |
|
"learning_rate": 2.841701869588858e-05, |
|
"loss": 3.4924, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 1.7297568312860365, |
|
"eval_loss": 3.398782253265381, |
|
"eval_runtime": 40.4718, |
|
"eval_samples_per_second": 110.25, |
|
"eval_steps_per_second": 13.787, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 1.7339349878833459, |
|
"grad_norm": 4.598596096038818, |
|
"learning_rate": 2.8364841691413275e-05, |
|
"loss": 3.4796, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 1.7381131444806552, |
|
"grad_norm": 7.308864593505859, |
|
"learning_rate": 2.8312560123802757e-05, |
|
"loss": 3.4804, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 1.7381131444806552, |
|
"eval_loss": 3.393099308013916, |
|
"eval_runtime": 40.4423, |
|
"eval_samples_per_second": 110.33, |
|
"eval_steps_per_second": 13.797, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 1.7422913010779644, |
|
"grad_norm": 2.0738399028778076, |
|
"learning_rate": 2.8260278556192232e-05, |
|
"loss": 3.5247, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 1.7464694576752735, |
|
"grad_norm": 4.9979753494262695, |
|
"learning_rate": 2.8207996988581708e-05, |
|
"loss": 3.445, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 1.7464694576752735, |
|
"eval_loss": 3.389554977416992, |
|
"eval_runtime": 40.4907, |
|
"eval_samples_per_second": 110.198, |
|
"eval_steps_per_second": 13.781, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 1.7506476142725829, |
|
"grad_norm": 6.01882791519165, |
|
"learning_rate": 2.8155819984106406e-05, |
|
"loss": 3.461, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 1.7548257708698922, |
|
"grad_norm": 4.392005920410156, |
|
"learning_rate": 2.8103538416495885e-05, |
|
"loss": 3.5033, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 1.7548257708698922, |
|
"eval_loss": 3.391627073287964, |
|
"eval_runtime": 40.4818, |
|
"eval_samples_per_second": 110.222, |
|
"eval_steps_per_second": 13.784, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 1.7590039274672016, |
|
"grad_norm": 3.1449947357177734, |
|
"learning_rate": 2.805125684888536e-05, |
|
"loss": 3.4507, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 1.7631820840645107, |
|
"grad_norm": 5.737192153930664, |
|
"learning_rate": 2.7998975281274836e-05, |
|
"loss": 3.4617, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 1.7631820840645107, |
|
"eval_loss": 3.3873517513275146, |
|
"eval_runtime": 40.4668, |
|
"eval_samples_per_second": 110.263, |
|
"eval_steps_per_second": 13.789, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 1.7673602406618198, |
|
"grad_norm": 5.513020038604736, |
|
"learning_rate": 2.7946693713664312e-05, |
|
"loss": 3.4475, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 1.7715383972591292, |
|
"grad_norm": 3.074239492416382, |
|
"learning_rate": 2.789451670918901e-05, |
|
"loss": 3.4343, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 1.7715383972591292, |
|
"eval_loss": 3.38964581489563, |
|
"eval_runtime": 41.7425, |
|
"eval_samples_per_second": 106.893, |
|
"eval_steps_per_second": 13.368, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 1.7757165538564386, |
|
"grad_norm": 3.9970204830169678, |
|
"learning_rate": 2.784223514157849e-05, |
|
"loss": 3.3716, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 1.779894710453748, |
|
"grad_norm": 4.594680309295654, |
|
"learning_rate": 2.7789953573967965e-05, |
|
"loss": 3.4534, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 1.779894710453748, |
|
"eval_loss": 3.382511854171753, |
|
"eval_runtime": 40.4565, |
|
"eval_samples_per_second": 110.291, |
|
"eval_steps_per_second": 13.793, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 1.784072867051057, |
|
"grad_norm": 6.570225238800049, |
|
"learning_rate": 2.773767200635744e-05, |
|
"loss": 3.4291, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 1.7882510236483662, |
|
"grad_norm": 5.155056953430176, |
|
"learning_rate": 2.768549500188214e-05, |
|
"loss": 3.4402, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 1.7882510236483662, |
|
"eval_loss": 3.381510019302368, |
|
"eval_runtime": 40.5141, |
|
"eval_samples_per_second": 110.134, |
|
"eval_steps_per_second": 13.773, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 1.7924291802456755, |
|
"grad_norm": 4.226760387420654, |
|
"learning_rate": 2.7633213434271614e-05, |
|
"loss": 3.4705, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 1.796607336842985, |
|
"grad_norm": 6.171258926391602, |
|
"learning_rate": 2.7581036429796316e-05, |
|
"loss": 3.4773, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 1.796607336842985, |
|
"eval_loss": 3.3844947814941406, |
|
"eval_runtime": 40.4689, |
|
"eval_samples_per_second": 110.258, |
|
"eval_steps_per_second": 13.788, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 1.8007854934402943, |
|
"grad_norm": 5.332149982452393, |
|
"learning_rate": 2.752875486218579e-05, |
|
"loss": 3.4529, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 1.8049636500376034, |
|
"grad_norm": 6.050594806671143, |
|
"learning_rate": 2.7476473294575267e-05, |
|
"loss": 3.4419, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 1.8049636500376034, |
|
"eval_loss": 3.3761329650878906, |
|
"eval_runtime": 40.4659, |
|
"eval_samples_per_second": 110.266, |
|
"eval_steps_per_second": 13.789, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 1.8091418066349125, |
|
"grad_norm": 4.910857200622559, |
|
"learning_rate": 2.7424191726964742e-05, |
|
"loss": 3.424, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 1.8133199632322219, |
|
"grad_norm": 8.457829475402832, |
|
"learning_rate": 2.7371910159354218e-05, |
|
"loss": 3.4431, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 1.8133199632322219, |
|
"eval_loss": 3.38053297996521, |
|
"eval_runtime": 40.4472, |
|
"eval_samples_per_second": 110.317, |
|
"eval_steps_per_second": 13.796, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 1.8174981198295312, |
|
"grad_norm": 4.863792896270752, |
|
"learning_rate": 2.7319628591743697e-05, |
|
"loss": 3.4556, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 1.8216762764268406, |
|
"grad_norm": 6.228066921234131, |
|
"learning_rate": 2.7267347024133172e-05, |
|
"loss": 3.4047, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 1.8216762764268406, |
|
"eval_loss": 3.3676483631134033, |
|
"eval_runtime": 40.4895, |
|
"eval_samples_per_second": 110.201, |
|
"eval_steps_per_second": 13.781, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 1.8258544330241497, |
|
"grad_norm": 6.320400714874268, |
|
"learning_rate": 2.7215065456522648e-05, |
|
"loss": 3.4152, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 1.8300325896214589, |
|
"grad_norm": 3.3179121017456055, |
|
"learning_rate": 2.7162888452047346e-05, |
|
"loss": 3.3419, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 1.8300325896214589, |
|
"eval_loss": 3.3715813159942627, |
|
"eval_runtime": 40.4828, |
|
"eval_samples_per_second": 110.22, |
|
"eval_steps_per_second": 13.784, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 1.8342107462187682, |
|
"grad_norm": 4.821847915649414, |
|
"learning_rate": 2.7110606884436822e-05, |
|
"loss": 3.4722, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 1.8383889028160776, |
|
"grad_norm": 5.0839972496032715, |
|
"learning_rate": 2.70583253168263e-05, |
|
"loss": 3.3995, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 1.8383889028160776, |
|
"eval_loss": 3.3703973293304443, |
|
"eval_runtime": 40.4815, |
|
"eval_samples_per_second": 110.223, |
|
"eval_steps_per_second": 13.784, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 1.842567059413387, |
|
"grad_norm": 6.820371627807617, |
|
"learning_rate": 2.7006043749215776e-05, |
|
"loss": 3.4211, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 1.846745216010696, |
|
"grad_norm": 6.776442527770996, |
|
"learning_rate": 2.6953866744740475e-05, |
|
"loss": 3.4227, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 1.846745216010696, |
|
"eval_loss": 3.372316360473633, |
|
"eval_runtime": 40.4479, |
|
"eval_samples_per_second": 110.315, |
|
"eval_steps_per_second": 13.796, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 1.8509233726080052, |
|
"grad_norm": 5.450851917266846, |
|
"learning_rate": 2.690158517712995e-05, |
|
"loss": 3.5122, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 1.8551015292053146, |
|
"grad_norm": 7.390010833740234, |
|
"learning_rate": 2.6849303609519426e-05, |
|
"loss": 3.4446, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 1.8551015292053146, |
|
"eval_loss": 3.3640332221984863, |
|
"eval_runtime": 40.4941, |
|
"eval_samples_per_second": 110.189, |
|
"eval_steps_per_second": 13.78, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 1.859279685802624, |
|
"grad_norm": 5.441040992736816, |
|
"learning_rate": 2.6797022041908905e-05, |
|
"loss": 3.4843, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 1.8634578423999333, |
|
"grad_norm": 6.238590717315674, |
|
"learning_rate": 2.6744845037433603e-05, |
|
"loss": 3.3788, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 1.8634578423999333, |
|
"eval_loss": 3.360541582107544, |
|
"eval_runtime": 40.4855, |
|
"eval_samples_per_second": 110.212, |
|
"eval_steps_per_second": 13.783, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 1.8676359989972424, |
|
"grad_norm": 1.9441295862197876, |
|
"learning_rate": 2.669256346982308e-05, |
|
"loss": 3.4215, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 1.8718141555945516, |
|
"grad_norm": 4.011134624481201, |
|
"learning_rate": 2.6640281902212554e-05, |
|
"loss": 3.4202, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 1.8718141555945516, |
|
"eval_loss": 3.3591411113739014, |
|
"eval_runtime": 40.4741, |
|
"eval_samples_per_second": 110.243, |
|
"eval_steps_per_second": 13.787, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 1.875992312191861, |
|
"grad_norm": 4.267407417297363, |
|
"learning_rate": 2.6588000334602033e-05, |
|
"loss": 3.3769, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 1.8801704687891703, |
|
"grad_norm": 4.621767520904541, |
|
"learning_rate": 2.6535718766991512e-05, |
|
"loss": 3.408, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 1.8801704687891703, |
|
"eval_loss": 3.3587403297424316, |
|
"eval_runtime": 40.4994, |
|
"eval_samples_per_second": 110.174, |
|
"eval_steps_per_second": 13.778, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 1.8843486253864796, |
|
"grad_norm": 4.63948392868042, |
|
"learning_rate": 2.6483541762516207e-05, |
|
"loss": 3.3376, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 1.8885267819837888, |
|
"grad_norm": 3.7362630367279053, |
|
"learning_rate": 2.6431260194905682e-05, |
|
"loss": 3.3261, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 1.8885267819837888, |
|
"eval_loss": 3.357508420944214, |
|
"eval_runtime": 40.4249, |
|
"eval_samples_per_second": 110.378, |
|
"eval_steps_per_second": 13.803, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 1.892704938581098, |
|
"grad_norm": 3.484428644180298, |
|
"learning_rate": 2.6378978627295165e-05, |
|
"loss": 3.3812, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 1.8968830951784073, |
|
"grad_norm": 5.982702255249023, |
|
"learning_rate": 2.632669705968464e-05, |
|
"loss": 3.4233, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 1.8968830951784073, |
|
"eval_loss": 3.355980396270752, |
|
"eval_runtime": 40.4822, |
|
"eval_samples_per_second": 110.221, |
|
"eval_steps_per_second": 13.784, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 1.9010612517757166, |
|
"grad_norm": 3.434718132019043, |
|
"learning_rate": 2.6274415492074116e-05, |
|
"loss": 3.3951, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 1.905239408373026, |
|
"grad_norm": 4.6421709060668945, |
|
"learning_rate": 2.6222238487598817e-05, |
|
"loss": 3.3664, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 1.905239408373026, |
|
"eval_loss": 3.351598024368286, |
|
"eval_runtime": 40.4676, |
|
"eval_samples_per_second": 110.261, |
|
"eval_steps_per_second": 13.789, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 1.909417564970335, |
|
"grad_norm": 10.201983451843262, |
|
"learning_rate": 2.6169956919988293e-05, |
|
"loss": 3.4341, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 1.9135957215676442, |
|
"grad_norm": 4.66003942489624, |
|
"learning_rate": 2.611767535237777e-05, |
|
"loss": 3.3314, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 1.9135957215676442, |
|
"eval_loss": 3.3540987968444824, |
|
"eval_runtime": 40.4587, |
|
"eval_samples_per_second": 110.285, |
|
"eval_steps_per_second": 13.792, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 1.9177738781649536, |
|
"grad_norm": 1.5573841333389282, |
|
"learning_rate": 2.6065393784767244e-05, |
|
"loss": 3.3577, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 1.921952034762263, |
|
"grad_norm": 4.663156509399414, |
|
"learning_rate": 2.6013216780291942e-05, |
|
"loss": 3.3725, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 1.921952034762263, |
|
"eval_loss": 3.3473751544952393, |
|
"eval_runtime": 40.4615, |
|
"eval_samples_per_second": 110.278, |
|
"eval_steps_per_second": 13.791, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 1.9261301913595723, |
|
"grad_norm": 4.893932342529297, |
|
"learning_rate": 2.596093521268142e-05, |
|
"loss": 3.3943, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 1.9303083479568814, |
|
"grad_norm": 1.92241370677948, |
|
"learning_rate": 2.5908653645070897e-05, |
|
"loss": 3.3888, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 1.9303083479568814, |
|
"eval_loss": 3.3468871116638184, |
|
"eval_runtime": 40.474, |
|
"eval_samples_per_second": 110.244, |
|
"eval_steps_per_second": 13.787, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 1.9344865045541906, |
|
"grad_norm": 4.520233631134033, |
|
"learning_rate": 2.5856372077460372e-05, |
|
"loss": 3.3683, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 1.9386646611515, |
|
"grad_norm": 4.232703685760498, |
|
"learning_rate": 2.5804090509849848e-05, |
|
"loss": 3.3707, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 1.9386646611515, |
|
"eval_loss": 3.345097780227661, |
|
"eval_runtime": 40.4911, |
|
"eval_samples_per_second": 110.197, |
|
"eval_steps_per_second": 13.781, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 1.9428428177488093, |
|
"grad_norm": 3.1016223430633545, |
|
"learning_rate": 2.5751913505374546e-05, |
|
"loss": 3.3736, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 1.9470209743461186, |
|
"grad_norm": 4.9680047035217285, |
|
"learning_rate": 2.5699631937764025e-05, |
|
"loss": 3.2975, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 1.9470209743461186, |
|
"eval_loss": 3.3408877849578857, |
|
"eval_runtime": 40.4839, |
|
"eval_samples_per_second": 110.217, |
|
"eval_steps_per_second": 13.783, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 1.9511991309434278, |
|
"grad_norm": 3.1786868572235107, |
|
"learning_rate": 2.56473503701535e-05, |
|
"loss": 3.4885, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 1.955377287540737, |
|
"grad_norm": 4.074703693389893, |
|
"learning_rate": 2.5595068802542976e-05, |
|
"loss": 3.3695, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 1.955377287540737, |
|
"eval_loss": 3.3347997665405273, |
|
"eval_runtime": 40.4887, |
|
"eval_samples_per_second": 110.204, |
|
"eval_steps_per_second": 13.782, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 1.9595554441380463, |
|
"grad_norm": 3.2261080741882324, |
|
"learning_rate": 2.554278723493245e-05, |
|
"loss": 3.3795, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 1.9637336007353556, |
|
"grad_norm": 4.230503559112549, |
|
"learning_rate": 2.549061023045715e-05, |
|
"loss": 3.4472, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 1.9637336007353556, |
|
"eval_loss": 3.3395724296569824, |
|
"eval_runtime": 40.4672, |
|
"eval_samples_per_second": 110.262, |
|
"eval_steps_per_second": 13.789, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 1.967911757332665, |
|
"grad_norm": 7.1046833992004395, |
|
"learning_rate": 2.543832866284663e-05, |
|
"loss": 3.4055, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 1.9720899139299741, |
|
"grad_norm": 5.35427713394165, |
|
"learning_rate": 2.5386047095236104e-05, |
|
"loss": 3.3572, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 1.9720899139299741, |
|
"eval_loss": 3.3387038707733154, |
|
"eval_runtime": 40.4842, |
|
"eval_samples_per_second": 110.216, |
|
"eval_steps_per_second": 13.783, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 1.9762680705272833, |
|
"grad_norm": 3.269439697265625, |
|
"learning_rate": 2.533376552762558e-05, |
|
"loss": 3.3778, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 1.9804462271245926, |
|
"grad_norm": 6.2255730628967285, |
|
"learning_rate": 2.528158852315028e-05, |
|
"loss": 3.3925, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 1.9804462271245926, |
|
"eval_loss": 3.3347251415252686, |
|
"eval_runtime": 40.4931, |
|
"eval_samples_per_second": 110.191, |
|
"eval_steps_per_second": 13.78, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 1.984624383721902, |
|
"grad_norm": 8.227920532226562, |
|
"learning_rate": 2.5229306955539754e-05, |
|
"loss": 3.2903, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 1.988802540319211, |
|
"grad_norm": 3.8814873695373535, |
|
"learning_rate": 2.5177025387929233e-05, |
|
"loss": 3.3328, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 1.988802540319211, |
|
"eval_loss": 3.3345508575439453, |
|
"eval_runtime": 40.4733, |
|
"eval_samples_per_second": 110.246, |
|
"eval_steps_per_second": 13.787, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 1.9929806969165205, |
|
"grad_norm": 4.1887006759643555, |
|
"learning_rate": 2.512474382031871e-05, |
|
"loss": 3.3947, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 1.9971588535138296, |
|
"grad_norm": 5.336971759796143, |
|
"learning_rate": 2.5072462252708184e-05, |
|
"loss": 3.3977, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 1.9971588535138296, |
|
"eval_loss": 3.333317756652832, |
|
"eval_runtime": 40.4282, |
|
"eval_samples_per_second": 110.369, |
|
"eval_steps_per_second": 13.802, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 2.001337010111139, |
|
"grad_norm": 5.35257625579834, |
|
"learning_rate": 2.502018068509766e-05, |
|
"loss": 3.2881, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 2.0055151667084483, |
|
"grad_norm": 4.52628231048584, |
|
"learning_rate": 2.496789911748714e-05, |
|
"loss": 3.1522, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.0055151667084483, |
|
"eval_loss": 3.3466105461120605, |
|
"eval_runtime": 40.4274, |
|
"eval_samples_per_second": 110.371, |
|
"eval_steps_per_second": 13.803, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 2.0096933233057577, |
|
"grad_norm": 5.243988037109375, |
|
"learning_rate": 2.491572211301184e-05, |
|
"loss": 3.1144, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 2.0138714799030666, |
|
"grad_norm": 6.480501174926758, |
|
"learning_rate": 2.4863440545401316e-05, |
|
"loss": 3.1233, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.0138714799030666, |
|
"eval_loss": 3.3396503925323486, |
|
"eval_runtime": 40.4455, |
|
"eval_samples_per_second": 110.321, |
|
"eval_steps_per_second": 13.796, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 2.018049636500376, |
|
"grad_norm": 4.595026016235352, |
|
"learning_rate": 2.481115897779079e-05, |
|
"loss": 3.0936, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 2.0222277930976853, |
|
"grad_norm": 7.027838706970215, |
|
"learning_rate": 2.4758877410180267e-05, |
|
"loss": 3.1178, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.0222277930976853, |
|
"eval_loss": 3.3471105098724365, |
|
"eval_runtime": 40.4778, |
|
"eval_samples_per_second": 110.233, |
|
"eval_steps_per_second": 13.785, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 2.0264059496949947, |
|
"grad_norm": 3.6396079063415527, |
|
"learning_rate": 2.4706700405704965e-05, |
|
"loss": 3.1851, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 2.030584106292304, |
|
"grad_norm": 6.937821865081787, |
|
"learning_rate": 2.4654418838094444e-05, |
|
"loss": 3.0675, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.030584106292304, |
|
"eval_loss": 3.3515665531158447, |
|
"eval_runtime": 40.4644, |
|
"eval_samples_per_second": 110.27, |
|
"eval_steps_per_second": 13.79, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 2.034762262889613, |
|
"grad_norm": 6.9290008544921875, |
|
"learning_rate": 2.460213727048392e-05, |
|
"loss": 3.1355, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 2.0389404194869223, |
|
"grad_norm": 4.02460241317749, |
|
"learning_rate": 2.4549960266008618e-05, |
|
"loss": 3.1461, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.0389404194869223, |
|
"eval_loss": 3.341926336288452, |
|
"eval_runtime": 40.4246, |
|
"eval_samples_per_second": 110.378, |
|
"eval_steps_per_second": 13.803, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 2.0431185760842316, |
|
"grad_norm": 8.292485237121582, |
|
"learning_rate": 2.4497678698398093e-05, |
|
"loss": 3.2199, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 2.047296732681541, |
|
"grad_norm": 10.30803108215332, |
|
"learning_rate": 2.444539713078757e-05, |
|
"loss": 3.1664, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.047296732681541, |
|
"eval_loss": 3.3483948707580566, |
|
"eval_runtime": 40.4677, |
|
"eval_samples_per_second": 110.261, |
|
"eval_steps_per_second": 13.789, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 2.0514748892788504, |
|
"grad_norm": 4.300713539123535, |
|
"learning_rate": 2.4393115563177048e-05, |
|
"loss": 3.1249, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 2.0556530458761593, |
|
"grad_norm": 4.996560096740723, |
|
"learning_rate": 2.4340833995566527e-05, |
|
"loss": 3.1182, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.0556530458761593, |
|
"eval_loss": 3.342724323272705, |
|
"eval_runtime": 40.4703, |
|
"eval_samples_per_second": 110.254, |
|
"eval_steps_per_second": 13.788, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 2.0598312024734686, |
|
"grad_norm": 4.595823287963867, |
|
"learning_rate": 2.4288552427956002e-05, |
|
"loss": 3.1257, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 2.064009359070778, |
|
"grad_norm": 7.316824913024902, |
|
"learning_rate": 2.4236270860345478e-05, |
|
"loss": 3.2508, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.064009359070778, |
|
"eval_loss": 3.341085910797119, |
|
"eval_runtime": 40.4526, |
|
"eval_samples_per_second": 110.302, |
|
"eval_steps_per_second": 13.794, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 2.0681875156680873, |
|
"grad_norm": 4.907251358032227, |
|
"learning_rate": 2.4183989292734953e-05, |
|
"loss": 3.0809, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 2.0723656722653967, |
|
"grad_norm": 5.387881755828857, |
|
"learning_rate": 2.413181228825965e-05, |
|
"loss": 3.1521, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.0723656722653967, |
|
"eval_loss": 3.3404438495635986, |
|
"eval_runtime": 40.4564, |
|
"eval_samples_per_second": 110.292, |
|
"eval_steps_per_second": 13.793, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 2.0765438288627056, |
|
"grad_norm": 5.49289608001709, |
|
"learning_rate": 2.407953072064913e-05, |
|
"loss": 3.2411, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 2.080721985460015, |
|
"grad_norm": 4.194238662719727, |
|
"learning_rate": 2.4027249153038606e-05, |
|
"loss": 3.1179, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.080721985460015, |
|
"eval_loss": 3.342294931411743, |
|
"eval_runtime": 40.4613, |
|
"eval_samples_per_second": 110.278, |
|
"eval_steps_per_second": 13.791, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 2.0849001420573243, |
|
"grad_norm": 6.045317649841309, |
|
"learning_rate": 2.397496758542808e-05, |
|
"loss": 3.186, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 2.0890782986546337, |
|
"grad_norm": 6.206670761108398, |
|
"learning_rate": 2.3922686017817557e-05, |
|
"loss": 3.0654, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.0890782986546337, |
|
"eval_loss": 3.345398426055908, |
|
"eval_runtime": 40.4592, |
|
"eval_samples_per_second": 110.284, |
|
"eval_steps_per_second": 13.792, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 2.093256455251943, |
|
"grad_norm": 4.87103796005249, |
|
"learning_rate": 2.3870404450207036e-05, |
|
"loss": 3.2034, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 2.097434611849252, |
|
"grad_norm": 4.335954189300537, |
|
"learning_rate": 2.3818227445731734e-05, |
|
"loss": 3.2126, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.097434611849252, |
|
"eval_loss": 3.340569019317627, |
|
"eval_runtime": 40.4548, |
|
"eval_samples_per_second": 110.296, |
|
"eval_steps_per_second": 13.793, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 2.1016127684465613, |
|
"grad_norm": 6.09652042388916, |
|
"learning_rate": 2.376594587812121e-05, |
|
"loss": 3.1864, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 2.1057909250438707, |
|
"grad_norm": 9.233617782592773, |
|
"learning_rate": 2.371366431051069e-05, |
|
"loss": 3.1725, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.1057909250438707, |
|
"eval_loss": 3.342285394668579, |
|
"eval_runtime": 40.5048, |
|
"eval_samples_per_second": 110.16, |
|
"eval_steps_per_second": 13.776, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 2.10996908164118, |
|
"grad_norm": 6.006067276000977, |
|
"learning_rate": 2.3661382742900164e-05, |
|
"loss": 3.1333, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 2.1141472382384894, |
|
"grad_norm": 6.246615886688232, |
|
"learning_rate": 2.360910117528964e-05, |
|
"loss": 3.0531, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.1141472382384894, |
|
"eval_loss": 3.3393425941467285, |
|
"eval_runtime": 40.4551, |
|
"eval_samples_per_second": 110.295, |
|
"eval_steps_per_second": 13.793, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 2.1183253948357983, |
|
"grad_norm": 6.1417765617370605, |
|
"learning_rate": 2.355692417081434e-05, |
|
"loss": 3.1325, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 2.1225035514331076, |
|
"grad_norm": 4.473926067352295, |
|
"learning_rate": 2.3504642603203817e-05, |
|
"loss": 3.1911, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.1225035514331076, |
|
"eval_loss": 3.3351833820343018, |
|
"eval_runtime": 40.4613, |
|
"eval_samples_per_second": 110.278, |
|
"eval_steps_per_second": 13.791, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 2.126681708030417, |
|
"grad_norm": 8.741314888000488, |
|
"learning_rate": 2.3452361035593293e-05, |
|
"loss": 3.1, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 2.1308598646277264, |
|
"grad_norm": 7.776458740234375, |
|
"learning_rate": 2.3400079467982768e-05, |
|
"loss": 3.1181, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.1308598646277264, |
|
"eval_loss": 3.3358802795410156, |
|
"eval_runtime": 40.4824, |
|
"eval_samples_per_second": 110.221, |
|
"eval_steps_per_second": 13.784, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 2.1350380212250357, |
|
"grad_norm": 4.580501556396484, |
|
"learning_rate": 2.3347902463507467e-05, |
|
"loss": 3.1562, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 2.1392161778223446, |
|
"grad_norm": 4.885230541229248, |
|
"learning_rate": 2.3295620895896946e-05, |
|
"loss": 3.1109, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.1392161778223446, |
|
"eval_loss": 3.3395299911499023, |
|
"eval_runtime": 40.4629, |
|
"eval_samples_per_second": 110.274, |
|
"eval_steps_per_second": 13.79, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 2.143394334419654, |
|
"grad_norm": 5.507672309875488, |
|
"learning_rate": 2.324333932828642e-05, |
|
"loss": 3.2034, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 2.1475724910169633, |
|
"grad_norm": 4.263452529907227, |
|
"learning_rate": 2.3191057760675897e-05, |
|
"loss": 3.1226, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.1475724910169633, |
|
"eval_loss": 3.333815336227417, |
|
"eval_runtime": 40.4332, |
|
"eval_samples_per_second": 110.355, |
|
"eval_steps_per_second": 13.801, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 2.1517506476142727, |
|
"grad_norm": 4.941694736480713, |
|
"learning_rate": 2.3138776193065372e-05, |
|
"loss": 3.1588, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 2.155928804211582, |
|
"grad_norm": 3.2227394580841064, |
|
"learning_rate": 2.308659918859007e-05, |
|
"loss": 3.0437, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.155928804211582, |
|
"eval_loss": 3.3339126110076904, |
|
"eval_runtime": 40.4431, |
|
"eval_samples_per_second": 110.328, |
|
"eval_steps_per_second": 13.797, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 2.160106960808891, |
|
"grad_norm": 6.820015907287598, |
|
"learning_rate": 2.303431762097955e-05, |
|
"loss": 3.1968, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 2.1642851174062003, |
|
"grad_norm": 7.1376800537109375, |
|
"learning_rate": 2.2982036053369025e-05, |
|
"loss": 3.1401, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.1642851174062003, |
|
"eval_loss": 3.3317220211029053, |
|
"eval_runtime": 40.454, |
|
"eval_samples_per_second": 110.298, |
|
"eval_steps_per_second": 13.793, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 2.1684632740035097, |
|
"grad_norm": 10.74910831451416, |
|
"learning_rate": 2.2929754485758504e-05, |
|
"loss": 3.1429, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 2.172641430600819, |
|
"grad_norm": 5.5305585861206055, |
|
"learning_rate": 2.28775774812832e-05, |
|
"loss": 3.1238, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.172641430600819, |
|
"eval_loss": 3.33156156539917, |
|
"eval_runtime": 40.4787, |
|
"eval_samples_per_second": 110.231, |
|
"eval_steps_per_second": 13.785, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 2.1768195871981284, |
|
"grad_norm": 4.4482927322387695, |
|
"learning_rate": 2.2825295913672674e-05, |
|
"loss": 3.1677, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 2.1809977437954373, |
|
"grad_norm": 7.104944229125977, |
|
"learning_rate": 2.2773014346062153e-05, |
|
"loss": 3.1312, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 2.1809977437954373, |
|
"eval_loss": 3.3333513736724854, |
|
"eval_runtime": 40.4765, |
|
"eval_samples_per_second": 110.237, |
|
"eval_steps_per_second": 13.786, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 2.1851759003927467, |
|
"grad_norm": 4.176535129547119, |
|
"learning_rate": 2.2720732778451632e-05, |
|
"loss": 3.1194, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 2.189354056990056, |
|
"grad_norm": 10.3728609085083, |
|
"learning_rate": 2.2668451210841108e-05, |
|
"loss": 3.1206, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 2.189354056990056, |
|
"eval_loss": 3.3327999114990234, |
|
"eval_runtime": 40.4475, |
|
"eval_samples_per_second": 110.316, |
|
"eval_steps_per_second": 13.796, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 2.1935322135873654, |
|
"grad_norm": 5.367616653442383, |
|
"learning_rate": 2.2616274206365803e-05, |
|
"loss": 3.1405, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 2.1977103701846747, |
|
"grad_norm": 4.320235729217529, |
|
"learning_rate": 2.256399263875528e-05, |
|
"loss": 3.0658, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 2.1977103701846747, |
|
"eval_loss": 3.3294503688812256, |
|
"eval_runtime": 40.4559, |
|
"eval_samples_per_second": 110.293, |
|
"eval_steps_per_second": 13.793, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 2.2018885267819837, |
|
"grad_norm": 6.458812236785889, |
|
"learning_rate": 2.2511711071144757e-05, |
|
"loss": 3.1988, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 2.206066683379293, |
|
"grad_norm": 6.220398426055908, |
|
"learning_rate": 2.2459429503534236e-05, |
|
"loss": 3.1453, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 2.206066683379293, |
|
"eval_loss": 3.3279049396514893, |
|
"eval_runtime": 40.4446, |
|
"eval_samples_per_second": 110.324, |
|
"eval_steps_per_second": 13.797, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 2.2102448399766024, |
|
"grad_norm": 4.93303108215332, |
|
"learning_rate": 2.2407252499058934e-05, |
|
"loss": 3.1548, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 2.2144229965739117, |
|
"grad_norm": 4.953177452087402, |
|
"learning_rate": 2.235497093144841e-05, |
|
"loss": 3.1606, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 2.2144229965739117, |
|
"eval_loss": 3.3168184757232666, |
|
"eval_runtime": 40.4624, |
|
"eval_samples_per_second": 110.275, |
|
"eval_steps_per_second": 13.791, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 2.218601153171221, |
|
"grad_norm": 7.259415626525879, |
|
"learning_rate": 2.2302689363837885e-05, |
|
"loss": 3.1821, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 2.22277930976853, |
|
"grad_norm": 3.314384937286377, |
|
"learning_rate": 2.2250407796227364e-05, |
|
"loss": 3.1333, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 2.22277930976853, |
|
"eval_loss": 3.323383092880249, |
|
"eval_runtime": 40.4626, |
|
"eval_samples_per_second": 110.275, |
|
"eval_steps_per_second": 13.791, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 2.2269574663658394, |
|
"grad_norm": 6.855319499969482, |
|
"learning_rate": 2.219812622861684e-05, |
|
"loss": 3.1626, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 2.2311356229631487, |
|
"grad_norm": 8.510984420776367, |
|
"learning_rate": 2.2145949224141538e-05, |
|
"loss": 3.1123, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 2.2311356229631487, |
|
"eval_loss": 3.3220582008361816, |
|
"eval_runtime": 40.4492, |
|
"eval_samples_per_second": 110.311, |
|
"eval_steps_per_second": 13.795, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 2.235313779560458, |
|
"grad_norm": 5.909183502197266, |
|
"learning_rate": 2.2093667656531014e-05, |
|
"loss": 3.1804, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 2.2394919361577674, |
|
"grad_norm": 4.291212558746338, |
|
"learning_rate": 2.204138608892049e-05, |
|
"loss": 3.1723, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 2.2394919361577674, |
|
"eval_loss": 3.3167879581451416, |
|
"eval_runtime": 40.4755, |
|
"eval_samples_per_second": 110.24, |
|
"eval_steps_per_second": 13.786, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 2.2436700927550763, |
|
"grad_norm": 6.5470404624938965, |
|
"learning_rate": 2.1989104521309968e-05, |
|
"loss": 3.0986, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 2.2478482493523857, |
|
"grad_norm": 3.792804718017578, |
|
"learning_rate": 2.1936822953699447e-05, |
|
"loss": 3.1596, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 2.2478482493523857, |
|
"eval_loss": 3.3173413276672363, |
|
"eval_runtime": 40.4588, |
|
"eval_samples_per_second": 110.285, |
|
"eval_steps_per_second": 13.792, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 2.252026405949695, |
|
"grad_norm": 4.821345806121826, |
|
"learning_rate": 2.1884645949224142e-05, |
|
"loss": 3.1677, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 2.2562045625470044, |
|
"grad_norm": 11.906582832336426, |
|
"learning_rate": 2.1832364381613618e-05, |
|
"loss": 3.1157, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 2.2562045625470044, |
|
"eval_loss": 3.321375846862793, |
|
"eval_runtime": 40.4674, |
|
"eval_samples_per_second": 110.262, |
|
"eval_steps_per_second": 13.789, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 2.2603827191443138, |
|
"grad_norm": 5.045265197753906, |
|
"learning_rate": 2.1780082814003097e-05, |
|
"loss": 3.0692, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 2.2645608757416227, |
|
"grad_norm": 8.908251762390137, |
|
"learning_rate": 2.1727801246392572e-05, |
|
"loss": 3.0644, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 2.2645608757416227, |
|
"eval_loss": 3.3190419673919678, |
|
"eval_runtime": 40.4315, |
|
"eval_samples_per_second": 110.359, |
|
"eval_steps_per_second": 13.801, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 2.268739032338932, |
|
"grad_norm": 7.89150857925415, |
|
"learning_rate": 2.167551967878205e-05, |
|
"loss": 3.0926, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 2.2729171889362414, |
|
"grad_norm": 4.523589134216309, |
|
"learning_rate": 2.1623238111171526e-05, |
|
"loss": 3.1457, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 2.2729171889362414, |
|
"eval_loss": 3.316985845565796, |
|
"eval_runtime": 40.4542, |
|
"eval_samples_per_second": 110.298, |
|
"eval_steps_per_second": 13.793, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 2.2770953455335508, |
|
"grad_norm": 3.6266989707946777, |
|
"learning_rate": 2.1571061106696225e-05, |
|
"loss": 3.1394, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 2.28127350213086, |
|
"grad_norm": 3.1620638370513916, |
|
"learning_rate": 2.15187795390857e-05, |
|
"loss": 3.1771, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 2.28127350213086, |
|
"eval_loss": 3.316102981567383, |
|
"eval_runtime": 40.5105, |
|
"eval_samples_per_second": 110.144, |
|
"eval_steps_per_second": 13.774, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 2.285451658728169, |
|
"grad_norm": 12.058000564575195, |
|
"learning_rate": 2.1466497971475176e-05, |
|
"loss": 3.148, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 2.2896298153254784, |
|
"grad_norm": 5.249194145202637, |
|
"learning_rate": 2.1414216403864655e-05, |
|
"loss": 3.1185, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 2.2896298153254784, |
|
"eval_loss": 3.31988787651062, |
|
"eval_runtime": 40.4699, |
|
"eval_samples_per_second": 110.255, |
|
"eval_steps_per_second": 13.788, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 2.2938079719227877, |
|
"grad_norm": 7.467369079589844, |
|
"learning_rate": 2.1362039399389353e-05, |
|
"loss": 3.1526, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 2.297986128520097, |
|
"grad_norm": 6.099387168884277, |
|
"learning_rate": 2.130975783177883e-05, |
|
"loss": 3.1502, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 2.297986128520097, |
|
"eval_loss": 3.316650390625, |
|
"eval_runtime": 40.4528, |
|
"eval_samples_per_second": 110.301, |
|
"eval_steps_per_second": 13.794, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 2.302164285117406, |
|
"grad_norm": 4.817805767059326, |
|
"learning_rate": 2.1257476264168304e-05, |
|
"loss": 3.1442, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 2.3063424417147154, |
|
"grad_norm": 5.789041042327881, |
|
"learning_rate": 2.120519469655778e-05, |
|
"loss": 3.134, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 2.3063424417147154, |
|
"eval_loss": 3.3131399154663086, |
|
"eval_runtime": 40.4546, |
|
"eval_samples_per_second": 110.296, |
|
"eval_steps_per_second": 13.793, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 2.3105205983120247, |
|
"grad_norm": 5.171199321746826, |
|
"learning_rate": 2.1152913128947262e-05, |
|
"loss": 3.108, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 2.314698754909334, |
|
"grad_norm": 5.447643280029297, |
|
"learning_rate": 2.1100736124471957e-05, |
|
"loss": 3.1813, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 2.314698754909334, |
|
"eval_loss": 3.313253402709961, |
|
"eval_runtime": 40.5148, |
|
"eval_samples_per_second": 110.133, |
|
"eval_steps_per_second": 13.773, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 2.3188769115066434, |
|
"grad_norm": 4.172455787658691, |
|
"learning_rate": 2.1048454556861433e-05, |
|
"loss": 3.1188, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 2.3230550681039523, |
|
"grad_norm": 5.615136623382568, |
|
"learning_rate": 2.099617298925091e-05, |
|
"loss": 3.1557, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 2.3230550681039523, |
|
"eval_loss": 3.310053586959839, |
|
"eval_runtime": 40.4633, |
|
"eval_samples_per_second": 110.273, |
|
"eval_steps_per_second": 13.79, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 2.3272332247012617, |
|
"grad_norm": 3.739656448364258, |
|
"learning_rate": 2.0943891421640387e-05, |
|
"loss": 3.2203, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 2.331411381298571, |
|
"grad_norm": 6.395985126495361, |
|
"learning_rate": 2.0891609854029866e-05, |
|
"loss": 3.1628, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 2.331411381298571, |
|
"eval_loss": 3.306927442550659, |
|
"eval_runtime": 40.4619, |
|
"eval_samples_per_second": 110.277, |
|
"eval_steps_per_second": 13.791, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 2.3355895378958804, |
|
"grad_norm": 7.713507652282715, |
|
"learning_rate": 2.0839432849554564e-05, |
|
"loss": 3.0996, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 2.3397676944931898, |
|
"grad_norm": 5.105501651763916, |
|
"learning_rate": 2.078715128194404e-05, |
|
"loss": 3.0756, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 2.3397676944931898, |
|
"eval_loss": 3.3022100925445557, |
|
"eval_runtime": 40.4441, |
|
"eval_samples_per_second": 110.325, |
|
"eval_steps_per_second": 13.797, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 2.3439458510904987, |
|
"grad_norm": 4.082066535949707, |
|
"learning_rate": 2.0734869714333515e-05, |
|
"loss": 3.0481, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 2.348124007687808, |
|
"grad_norm": 5.3705596923828125, |
|
"learning_rate": 2.068258814672299e-05, |
|
"loss": 3.0449, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 2.348124007687808, |
|
"eval_loss": 3.301225185394287, |
|
"eval_runtime": 40.4433, |
|
"eval_samples_per_second": 110.327, |
|
"eval_steps_per_second": 13.797, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 2.3523021642851174, |
|
"grad_norm": 6.272683620452881, |
|
"learning_rate": 2.063041114224769e-05, |
|
"loss": 3.0657, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 2.3564803208824268, |
|
"grad_norm": 7.7794976234436035, |
|
"learning_rate": 2.0578129574637168e-05, |
|
"loss": 3.0655, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 2.3564803208824268, |
|
"eval_loss": 3.302551507949829, |
|
"eval_runtime": 40.4599, |
|
"eval_samples_per_second": 110.282, |
|
"eval_steps_per_second": 13.791, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 2.360658477479736, |
|
"grad_norm": 8.646065711975098, |
|
"learning_rate": 2.0525848007026644e-05, |
|
"loss": 3.0216, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 2.364836634077045, |
|
"grad_norm": 5.221062183380127, |
|
"learning_rate": 2.047356643941612e-05, |
|
"loss": 3.1117, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 2.364836634077045, |
|
"eval_loss": 3.297528028488159, |
|
"eval_runtime": 40.4511, |
|
"eval_samples_per_second": 110.306, |
|
"eval_steps_per_second": 13.794, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 2.3690147906743544, |
|
"grad_norm": 10.375117301940918, |
|
"learning_rate": 2.0421284871805595e-05, |
|
"loss": 3.1431, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 2.3731929472716637, |
|
"grad_norm": 8.012441635131836, |
|
"learning_rate": 2.0369107867330296e-05, |
|
"loss": 3.1707, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 2.3731929472716637, |
|
"eval_loss": 3.2980761528015137, |
|
"eval_runtime": 40.4552, |
|
"eval_samples_per_second": 110.295, |
|
"eval_steps_per_second": 13.793, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 2.377371103868973, |
|
"grad_norm": 3.3558294773101807, |
|
"learning_rate": 2.0316826299719772e-05, |
|
"loss": 3.1075, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 2.3815492604662825, |
|
"grad_norm": 4.599654674530029, |
|
"learning_rate": 2.0264544732109248e-05, |
|
"loss": 3.1749, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 2.3815492604662825, |
|
"eval_loss": 3.291957139968872, |
|
"eval_runtime": 40.4393, |
|
"eval_samples_per_second": 110.338, |
|
"eval_steps_per_second": 13.798, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 2.3857274170635914, |
|
"grad_norm": 5.334101676940918, |
|
"learning_rate": 2.0212263164498726e-05, |
|
"loss": 3.0942, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 2.3899055736609007, |
|
"grad_norm": 6.905644416809082, |
|
"learning_rate": 2.016008616002342e-05, |
|
"loss": 3.1453, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 2.3899055736609007, |
|
"eval_loss": 3.2937710285186768, |
|
"eval_runtime": 40.4861, |
|
"eval_samples_per_second": 110.211, |
|
"eval_steps_per_second": 13.783, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 2.39408373025821, |
|
"grad_norm": 5.987873554229736, |
|
"learning_rate": 2.01078045924129e-05, |
|
"loss": 3.1466, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 2.3982618868555194, |
|
"grad_norm": 6.2963361740112305, |
|
"learning_rate": 2.005552302480238e-05, |
|
"loss": 3.1464, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 2.3982618868555194, |
|
"eval_loss": 3.2935688495635986, |
|
"eval_runtime": 40.4586, |
|
"eval_samples_per_second": 110.286, |
|
"eval_steps_per_second": 13.792, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 2.402440043452829, |
|
"grad_norm": 4.570940971374512, |
|
"learning_rate": 2.0003241457191855e-05, |
|
"loss": 3.0698, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 2.4066182000501377, |
|
"grad_norm": 3.961766242980957, |
|
"learning_rate": 1.995106445271655e-05, |
|
"loss": 3.11, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 2.4066182000501377, |
|
"eval_loss": 3.3014984130859375, |
|
"eval_runtime": 40.4731, |
|
"eval_samples_per_second": 110.246, |
|
"eval_steps_per_second": 13.787, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 2.410796356647447, |
|
"grad_norm": 3.297183036804199, |
|
"learning_rate": 1.989878288510603e-05, |
|
"loss": 3.1153, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 2.4149745132447564, |
|
"grad_norm": 3.5967624187469482, |
|
"learning_rate": 1.9846501317495504e-05, |
|
"loss": 3.1049, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 2.4149745132447564, |
|
"eval_loss": 3.2914204597473145, |
|
"eval_runtime": 40.4566, |
|
"eval_samples_per_second": 110.291, |
|
"eval_steps_per_second": 13.793, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 2.419152669842066, |
|
"grad_norm": 5.414641857147217, |
|
"learning_rate": 1.9794219749884983e-05, |
|
"loss": 3.1524, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 2.423330826439375, |
|
"grad_norm": 9.401138305664062, |
|
"learning_rate": 1.974204274540968e-05, |
|
"loss": 3.1139, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 2.423330826439375, |
|
"eval_loss": 3.2959020137786865, |
|
"eval_runtime": 40.456, |
|
"eval_samples_per_second": 110.293, |
|
"eval_steps_per_second": 13.793, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 2.427508983036684, |
|
"grad_norm": 6.612576961517334, |
|
"learning_rate": 1.9689761177799157e-05, |
|
"loss": 3.1063, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 2.4316871396339934, |
|
"grad_norm": 5.513810157775879, |
|
"learning_rate": 1.9637479610188633e-05, |
|
"loss": 3.1631, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 2.4316871396339934, |
|
"eval_loss": 3.2913076877593994, |
|
"eval_runtime": 40.4983, |
|
"eval_samples_per_second": 110.177, |
|
"eval_steps_per_second": 13.778, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 2.4358652962313028, |
|
"grad_norm": 4.694360733032227, |
|
"learning_rate": 1.9585198042578108e-05, |
|
"loss": 3.1455, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 2.440043452828612, |
|
"grad_norm": 7.717827320098877, |
|
"learning_rate": 1.9532916474967587e-05, |
|
"loss": 3.1298, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 2.440043452828612, |
|
"eval_loss": 3.290560483932495, |
|
"eval_runtime": 40.466, |
|
"eval_samples_per_second": 110.265, |
|
"eval_steps_per_second": 13.789, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 2.444221609425921, |
|
"grad_norm": 5.080421447753906, |
|
"learning_rate": 1.9480634907357062e-05, |
|
"loss": 3.0543, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 2.4483997660232304, |
|
"grad_norm": 4.552901744842529, |
|
"learning_rate": 1.942845790288176e-05, |
|
"loss": 3.1352, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 2.4483997660232304, |
|
"eval_loss": 3.2912685871124268, |
|
"eval_runtime": 40.4768, |
|
"eval_samples_per_second": 110.236, |
|
"eval_steps_per_second": 13.786, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 2.4525779226205398, |
|
"grad_norm": 15.8800630569458, |
|
"learning_rate": 1.9376176335271236e-05, |
|
"loss": 3.1242, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 2.456756079217849, |
|
"grad_norm": 4.738433837890625, |
|
"learning_rate": 1.9323894767660712e-05, |
|
"loss": 3.0613, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 2.456756079217849, |
|
"eval_loss": 3.296588182449341, |
|
"eval_runtime": 40.4568, |
|
"eval_samples_per_second": 110.291, |
|
"eval_steps_per_second": 13.792, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 2.4609342358151585, |
|
"grad_norm": 7.438526630401611, |
|
"learning_rate": 1.9271613200050194e-05, |
|
"loss": 3.1276, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 2.4651123924124674, |
|
"grad_norm": 7.734090328216553, |
|
"learning_rate": 1.921943619557489e-05, |
|
"loss": 3.1559, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 2.4651123924124674, |
|
"eval_loss": 3.2927656173706055, |
|
"eval_runtime": 40.5018, |
|
"eval_samples_per_second": 110.168, |
|
"eval_steps_per_second": 13.777, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 2.4692905490097767, |
|
"grad_norm": 9.876496315002441, |
|
"learning_rate": 1.9167154627964365e-05, |
|
"loss": 3.119, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 2.473468705607086, |
|
"grad_norm": 11.522672653198242, |
|
"learning_rate": 1.9114873060353844e-05, |
|
"loss": 3.2179, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 2.473468705607086, |
|
"eval_loss": 3.295011281967163, |
|
"eval_runtime": 40.4852, |
|
"eval_samples_per_second": 110.213, |
|
"eval_steps_per_second": 13.783, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 2.4776468622043955, |
|
"grad_norm": 4.019059181213379, |
|
"learning_rate": 1.906259149274332e-05, |
|
"loss": 3.1037, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 2.481825018801705, |
|
"grad_norm": 3.5689988136291504, |
|
"learning_rate": 1.9010309925132798e-05, |
|
"loss": 3.1687, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 2.481825018801705, |
|
"eval_loss": 3.291651964187622, |
|
"eval_runtime": 40.4797, |
|
"eval_samples_per_second": 110.228, |
|
"eval_steps_per_second": 13.785, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 2.4860031753990137, |
|
"grad_norm": 9.938336372375488, |
|
"learning_rate": 1.8958132920657493e-05, |
|
"loss": 3.1607, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 2.490181331996323, |
|
"grad_norm": 5.011236667633057, |
|
"learning_rate": 1.8905851353046972e-05, |
|
"loss": 3.0375, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 2.490181331996323, |
|
"eval_loss": 3.2923643589019775, |
|
"eval_runtime": 40.4644, |
|
"eval_samples_per_second": 110.27, |
|
"eval_steps_per_second": 13.79, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 2.4943594885936324, |
|
"grad_norm": 4.473458766937256, |
|
"learning_rate": 1.8853569785436447e-05, |
|
"loss": 3.1018, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 2.498537645190942, |
|
"grad_norm": 8.452305793762207, |
|
"learning_rate": 1.8801288217825923e-05, |
|
"loss": 3.1135, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 2.498537645190942, |
|
"eval_loss": 3.2886199951171875, |
|
"eval_runtime": 40.4752, |
|
"eval_samples_per_second": 110.24, |
|
"eval_steps_per_second": 13.786, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 2.502715801788251, |
|
"grad_norm": 7.06476354598999, |
|
"learning_rate": 1.874911121335062e-05, |
|
"loss": 3.133, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 2.50689395838556, |
|
"grad_norm": 5.118690490722656, |
|
"learning_rate": 1.86968296457401e-05, |
|
"loss": 3.0742, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 2.50689395838556, |
|
"eval_loss": 3.2926464080810547, |
|
"eval_runtime": 40.4914, |
|
"eval_samples_per_second": 110.196, |
|
"eval_steps_per_second": 13.781, |
|
"step": 300000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 478680, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 2000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.357960662471148e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|