|
{ |
|
"best_metric": 1.0460100173950195, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 1.322314049586777, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006611570247933884, |
|
"grad_norm": 0.9164348840713501, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 4.3726, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006611570247933884, |
|
"eval_loss": 1.6257771253585815, |
|
"eval_runtime": 14.0709, |
|
"eval_samples_per_second": 18.123, |
|
"eval_steps_per_second": 9.097, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.013223140495867768, |
|
"grad_norm": 0.8439459800720215, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 4.5082, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.019834710743801654, |
|
"grad_norm": 0.8232372403144836, |
|
"learning_rate": 1e-05, |
|
"loss": 4.7082, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.026446280991735537, |
|
"grad_norm": 1.0905650854110718, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 4.778, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03305785123966942, |
|
"grad_norm": 0.9605323672294617, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 5.4669, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03966942148760331, |
|
"grad_norm": 1.00596022605896, |
|
"learning_rate": 2e-05, |
|
"loss": 4.9968, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04628099173553719, |
|
"grad_norm": 1.1926850080490112, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 4.9989, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05289256198347107, |
|
"grad_norm": 1.3035999536514282, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 5.3719, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05950413223140496, |
|
"grad_norm": 1.09066641330719, |
|
"learning_rate": 3e-05, |
|
"loss": 5.2938, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06611570247933884, |
|
"grad_norm": 1.2595545053482056, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 5.4152, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 1.241004228591919, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 5.6351, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07933884297520662, |
|
"grad_norm": 1.1945130825042725, |
|
"learning_rate": 4e-05, |
|
"loss": 5.4813, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0859504132231405, |
|
"grad_norm": 1.2076464891433716, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 5.0437, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.09256198347107437, |
|
"grad_norm": 1.3075780868530273, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 5.4353, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.09917355371900827, |
|
"grad_norm": 1.3237282037734985, |
|
"learning_rate": 5e-05, |
|
"loss": 5.5073, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10578512396694215, |
|
"grad_norm": 1.3928046226501465, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 5.7509, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.11239669421487604, |
|
"grad_norm": 1.4480962753295898, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 5.6378, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.11900826446280992, |
|
"grad_norm": 1.5458139181137085, |
|
"learning_rate": 6e-05, |
|
"loss": 5.4339, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1256198347107438, |
|
"grad_norm": 1.5952441692352295, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 5.2634, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1322314049586777, |
|
"grad_norm": 1.6097549200057983, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 5.4341, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.13884297520661157, |
|
"grad_norm": 1.6773674488067627, |
|
"learning_rate": 7e-05, |
|
"loss": 5.2169, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 1.8317288160324097, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 5.3538, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.15206611570247933, |
|
"grad_norm": 1.7912957668304443, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 5.867, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.15867768595041323, |
|
"grad_norm": 1.8667272329330444, |
|
"learning_rate": 8e-05, |
|
"loss": 5.2434, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1652892561983471, |
|
"grad_norm": 1.8276578187942505, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 5.4768, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.171900826446281, |
|
"grad_norm": 2.0258138179779053, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 5.7999, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.17851239669421487, |
|
"grad_norm": 2.286808967590332, |
|
"learning_rate": 9e-05, |
|
"loss": 4.9834, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.18512396694214875, |
|
"grad_norm": 1.9957102537155151, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 5.8383, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.19173553719008266, |
|
"grad_norm": 2.4122209548950195, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 5.8001, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.19834710743801653, |
|
"grad_norm": 2.4733917713165283, |
|
"learning_rate": 0.0001, |
|
"loss": 6.1634, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2049586776859504, |
|
"grad_norm": 2.4017982482910156, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 6.1423, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2115702479338843, |
|
"grad_norm": 2.3116109371185303, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 5.8569, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 2.673358678817749, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 5.7446, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.22479338842975208, |
|
"grad_norm": 4.113661289215088, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 5.0621, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.23140495867768596, |
|
"grad_norm": 2.5697896480560303, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 5.4427, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.23801652892561984, |
|
"grad_norm": 3.4522359371185303, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 6.1919, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.24462809917355371, |
|
"grad_norm": 4.006448268890381, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 5.7438, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2512396694214876, |
|
"grad_norm": 1.7176642417907715, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 4.2525, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2578512396694215, |
|
"grad_norm": 1.2814679145812988, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 4.1325, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2644628099173554, |
|
"grad_norm": 1.0523453950881958, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 3.9787, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.27107438016528923, |
|
"grad_norm": 0.9023359417915344, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 4.4185, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.27768595041322314, |
|
"grad_norm": 0.7565967440605164, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 4.6076, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.28429752066115704, |
|
"grad_norm": 0.740451455116272, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 4.4481, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 0.7501481175422668, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 4.7943, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2975206611570248, |
|
"grad_norm": 0.8105722665786743, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 4.4628, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.30413223140495865, |
|
"grad_norm": 1.0324733257293701, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 4.873, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.31074380165289256, |
|
"grad_norm": 0.9048627018928528, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 4.4168, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.31735537190082647, |
|
"grad_norm": 0.9578453302383423, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 4.8329, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.3239669421487603, |
|
"grad_norm": 0.9514991641044617, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 5.0063, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3305785123966942, |
|
"grad_norm": 0.9336065649986267, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 4.6913, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3305785123966942, |
|
"eval_loss": 1.1803337335586548, |
|
"eval_runtime": 14.155, |
|
"eval_samples_per_second": 18.015, |
|
"eval_steps_per_second": 9.043, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3371900826446281, |
|
"grad_norm": 0.9709968566894531, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 4.9291, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.343801652892562, |
|
"grad_norm": 0.9347379803657532, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 4.3365, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3504132231404959, |
|
"grad_norm": 1.0081912279129028, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 5.2088, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.35702479338842974, |
|
"grad_norm": 1.0832982063293457, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 5.0906, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 1.1196680068969727, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 4.6937, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3702479338842975, |
|
"grad_norm": 1.208888292312622, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 4.599, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3768595041322314, |
|
"grad_norm": 1.2675275802612305, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 4.9299, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3834710743801653, |
|
"grad_norm": 1.3029289245605469, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 4.7858, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.39008264462809916, |
|
"grad_norm": 1.3226304054260254, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 4.9087, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.39669421487603307, |
|
"grad_norm": 1.4871385097503662, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 4.8845, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4033057851239669, |
|
"grad_norm": 1.5894980430603027, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 5.3603, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.4099173553719008, |
|
"grad_norm": 1.7288110256195068, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 5.3581, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.41652892561983473, |
|
"grad_norm": 1.6703156232833862, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 4.4855, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.4231404958677686, |
|
"grad_norm": 1.7052879333496094, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 4.1406, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4297520661157025, |
|
"grad_norm": 1.659575343132019, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 5.162, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 1.8146637678146362, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 5.2744, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.44297520661157025, |
|
"grad_norm": 1.932379126548767, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 5.659, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.44958677685950416, |
|
"grad_norm": 2.114957332611084, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 5.7257, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.456198347107438, |
|
"grad_norm": 2.27786922454834, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 6.0124, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.4628099173553719, |
|
"grad_norm": 2.2900562286376953, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 4.4817, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.46942148760330576, |
|
"grad_norm": 2.6403229236602783, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 4.2367, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.47603305785123967, |
|
"grad_norm": 2.5031816959381104, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 4.8966, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4826446280991736, |
|
"grad_norm": 3.2316620349884033, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 5.2742, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.48925619834710743, |
|
"grad_norm": 4.309781074523926, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 4.9375, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.49586776859504134, |
|
"grad_norm": 1.4185460805892944, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 3.5337, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5024793388429752, |
|
"grad_norm": 1.15443754196167, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 3.638, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.509090909090909, |
|
"grad_norm": 0.8915994763374329, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 3.9779, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.515702479338843, |
|
"grad_norm": 0.8362914323806763, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 4.2846, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5223140495867769, |
|
"grad_norm": 0.7924783825874329, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 4.506, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5289256198347108, |
|
"grad_norm": 0.7508944869041443, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 4.4354, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5355371900826447, |
|
"grad_norm": 0.7106804251670837, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 4.4622, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5421487603305785, |
|
"grad_norm": 0.7450253367424011, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 4.2489, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5487603305785124, |
|
"grad_norm": 0.8078528046607971, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 4.4358, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5553719008264463, |
|
"grad_norm": 0.8072985410690308, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 4.5949, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5619834710743802, |
|
"grad_norm": 0.9731318950653076, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 4.6721, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5685950413223141, |
|
"grad_norm": 0.9238491058349609, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 4.4037, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5752066115702479, |
|
"grad_norm": 0.9422857761383057, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 4.1799, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 1.0530166625976562, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 4.4404, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5884297520661157, |
|
"grad_norm": 1.1109483242034912, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 4.4643, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5950413223140496, |
|
"grad_norm": 1.0867807865142822, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 4.8055, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6016528925619835, |
|
"grad_norm": 1.1179331541061401, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 4.6242, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.6082644628099173, |
|
"grad_norm": 1.1954206228256226, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 4.6997, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.6148760330578512, |
|
"grad_norm": 1.1525213718414307, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 4.7898, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.6214876033057851, |
|
"grad_norm": 1.2016812562942505, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 4.5403, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.628099173553719, |
|
"grad_norm": 1.323207974433899, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 4.7135, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6347107438016529, |
|
"grad_norm": 1.2732231616973877, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 4.869, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6413223140495867, |
|
"grad_norm": 1.5098425149917603, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 5.4372, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6479338842975206, |
|
"grad_norm": 1.40667724609375, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 5.2639, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6545454545454545, |
|
"grad_norm": 1.5204859972000122, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 4.8975, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6611570247933884, |
|
"grad_norm": 1.6273950338363647, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 4.8439, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6611570247933884, |
|
"eval_loss": 1.090095043182373, |
|
"eval_runtime": 14.1768, |
|
"eval_samples_per_second": 17.987, |
|
"eval_steps_per_second": 9.029, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6677685950413224, |
|
"grad_norm": 1.766390323638916, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 4.463, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6743801652892562, |
|
"grad_norm": 1.9048361778259277, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 5.3703, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6809917355371901, |
|
"grad_norm": 1.8197228908538818, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 4.9689, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.687603305785124, |
|
"grad_norm": 2.086484432220459, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 5.6172, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6942148760330579, |
|
"grad_norm": 2.267761707305908, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 4.8357, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.7008264462809918, |
|
"grad_norm": 2.259352445602417, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 5.1136, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.7074380165289256, |
|
"grad_norm": 2.6360812187194824, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 5.1514, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.7140495867768595, |
|
"grad_norm": 2.8661835193634033, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 4.3032, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.7206611570247934, |
|
"grad_norm": 2.702547073364258, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 4.8666, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 2.970484495162964, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 4.8789, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7338842975206612, |
|
"grad_norm": 3.8799359798431396, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 5.1477, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.740495867768595, |
|
"grad_norm": 0.6784772276878357, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 3.4236, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7471074380165289, |
|
"grad_norm": 0.7395262718200684, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 3.885, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7537190082644628, |
|
"grad_norm": 0.7164163589477539, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 3.8037, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7603305785123967, |
|
"grad_norm": 0.7306711077690125, |
|
"learning_rate": 5e-05, |
|
"loss": 3.8429, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7669421487603306, |
|
"grad_norm": 0.7196017503738403, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 4.1837, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.7735537190082644, |
|
"grad_norm": 0.7127442359924316, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 3.8733, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7801652892561983, |
|
"grad_norm": 0.7182602882385254, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 3.9125, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.7867768595041322, |
|
"grad_norm": 0.7722130417823792, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 4.1602, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.7933884297520661, |
|
"grad_norm": 0.8073102831840515, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 4.1581, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8239835500717163, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 4.1797, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.8066115702479338, |
|
"grad_norm": 0.8800137639045715, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 4.2168, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.8132231404958677, |
|
"grad_norm": 0.8760910034179688, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 4.4288, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.8198347107438017, |
|
"grad_norm": 0.9671458601951599, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 4.2465, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8264462809917356, |
|
"grad_norm": 0.9589430689811707, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 4.6314, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8330578512396695, |
|
"grad_norm": 0.9631523489952087, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 3.8995, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8396694214876033, |
|
"grad_norm": 1.1293405294418335, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 4.4208, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8462809917355372, |
|
"grad_norm": 1.1248273849487305, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 4.797, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8528925619834711, |
|
"grad_norm": 1.1502659320831299, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 4.5282, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.859504132231405, |
|
"grad_norm": 1.3296188116073608, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 4.5665, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8661157024793389, |
|
"grad_norm": 1.222717523574829, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 4.4934, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 1.3489303588867188, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 4.9627, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8793388429752066, |
|
"grad_norm": 1.3762129545211792, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 4.9849, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.8859504132231405, |
|
"grad_norm": 1.4506964683532715, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 4.5126, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.8925619834710744, |
|
"grad_norm": 1.541321039199829, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 4.6768, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8991735537190083, |
|
"grad_norm": 1.6167188882827759, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 4.4696, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.9057851239669421, |
|
"grad_norm": 1.765872597694397, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 4.9427, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.912396694214876, |
|
"grad_norm": 1.81801438331604, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 5.1971, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.9190082644628099, |
|
"grad_norm": 1.7761266231536865, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 3.5547, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.9256198347107438, |
|
"grad_norm": 1.9924731254577637, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 5.4103, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9322314049586777, |
|
"grad_norm": 2.269597291946411, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 5.2679, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.9388429752066115, |
|
"grad_norm": 2.1679582595825195, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 5.2392, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9454545454545454, |
|
"grad_norm": 2.2235004901885986, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 5.3694, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9520661157024793, |
|
"grad_norm": 2.366504430770874, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 4.5467, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9586776859504132, |
|
"grad_norm": 2.4724178314208984, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 4.0637, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9652892561983472, |
|
"grad_norm": 2.8730733394622803, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 4.465, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.971900826446281, |
|
"grad_norm": 3.2861361503601074, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 5.3115, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9785123966942149, |
|
"grad_norm": 3.9318833351135254, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 5.3298, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9851239669421488, |
|
"grad_norm": 0.7852454781532288, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 3.8637, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.9917355371900827, |
|
"grad_norm": 1.412460446357727, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 4.5326, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9917355371900827, |
|
"eval_loss": 1.0575369596481323, |
|
"eval_runtime": 14.1494, |
|
"eval_samples_per_second": 18.022, |
|
"eval_steps_per_second": 9.046, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9983471074380166, |
|
"grad_norm": 2.1073575019836426, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 4.4085, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.0049586776859505, |
|
"grad_norm": 2.898435115814209, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 3.9522, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.0115702479338844, |
|
"grad_norm": 0.6054397821426392, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 3.8139, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.018181818181818, |
|
"grad_norm": 0.6304721236228943, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 3.5105, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.024793388429752, |
|
"grad_norm": 0.6456805467605591, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 3.4875, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.031404958677686, |
|
"grad_norm": 0.6970878839492798, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 3.9056, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.0380165289256198, |
|
"grad_norm": 0.7542885541915894, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 3.6381, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.0446280991735537, |
|
"grad_norm": 0.8014428615570068, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 4.0426, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0512396694214876, |
|
"grad_norm": 0.7932935953140259, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 4.1678, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.0578512396694215, |
|
"grad_norm": 0.8434247970581055, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 3.845, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0644628099173554, |
|
"grad_norm": 1.026289701461792, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 4.0639, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.0710743801652893, |
|
"grad_norm": 0.9154835939407349, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 4.1187, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.0776859504132232, |
|
"grad_norm": 1.0376970767974854, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 3.7358, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.084297520661157, |
|
"grad_norm": 0.9799293875694275, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 3.7865, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 1.3073195219039917, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 3.8564, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0975206611570247, |
|
"grad_norm": 1.0965800285339355, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 4.1914, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.1041322314049586, |
|
"grad_norm": 1.167807936668396, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 4.4906, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.1107438016528925, |
|
"grad_norm": 1.1979173421859741, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 4.169, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.1173553719008265, |
|
"grad_norm": 1.294745922088623, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 4.7301, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.1239669421487604, |
|
"grad_norm": 1.308195948600769, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 4.0708, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1305785123966943, |
|
"grad_norm": 1.4223898649215698, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 4.5165, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.1371900826446282, |
|
"grad_norm": 1.3016583919525146, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 4.0952, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.143801652892562, |
|
"grad_norm": 1.5517874956130981, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 4.2077, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.1504132231404958, |
|
"grad_norm": 1.4684065580368042, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 4.1166, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.1570247933884297, |
|
"grad_norm": 1.5590767860412598, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 4.4264, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.1636363636363636, |
|
"grad_norm": 1.6318552494049072, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 4.4621, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.1702479338842975, |
|
"grad_norm": 1.6523213386535645, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 4.2173, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.1768595041322314, |
|
"grad_norm": 1.782486081123352, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 3.9855, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.1834710743801653, |
|
"grad_norm": 2.0244011878967285, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 3.1813, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.1900826446280992, |
|
"grad_norm": 1.8634884357452393, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 4.2433, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.1966942148760331, |
|
"grad_norm": 3.6111536026000977, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 4.7134, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.203305785123967, |
|
"grad_norm": 2.2132115364074707, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 5.0016, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.2099173553719007, |
|
"grad_norm": 2.191588878631592, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 4.8401, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.2165289256198348, |
|
"grad_norm": 2.5043983459472656, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 4.4028, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.2231404958677685, |
|
"grad_norm": 2.4573326110839844, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 3.3899, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2297520661157024, |
|
"grad_norm": 2.752991199493408, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 3.9257, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.2363636363636363, |
|
"grad_norm": 3.2671091556549072, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 4.4646, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.2429752066115702, |
|
"grad_norm": 3.656493663787842, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 3.8991, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.2495867768595041, |
|
"grad_norm": 2.3350541591644287, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 3.9637, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.256198347107438, |
|
"grad_norm": 0.6289302706718445, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 3.5885, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.262809917355372, |
|
"grad_norm": 0.6117539405822754, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 2.8164, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.2694214876033059, |
|
"grad_norm": 0.6665360331535339, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 3.8419, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.2760330578512398, |
|
"grad_norm": 0.6839153170585632, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 4.0613, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.2826446280991735, |
|
"grad_norm": 0.8262344598770142, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 3.8355, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.2892561983471074, |
|
"grad_norm": 0.8175384402275085, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 4.2571, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2958677685950413, |
|
"grad_norm": 0.8560000658035278, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 3.9339, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.3024793388429752, |
|
"grad_norm": 0.8736144304275513, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 4.096, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.309090909090909, |
|
"grad_norm": 0.9289442300796509, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 3.9761, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.315702479338843, |
|
"grad_norm": 0.9881563186645508, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 3.987, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.322314049586777, |
|
"grad_norm": 1.0067229270935059, |
|
"learning_rate": 0.0, |
|
"loss": 4.4433, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.322314049586777, |
|
"eval_loss": 1.0460100173950195, |
|
"eval_runtime": 14.1299, |
|
"eval_samples_per_second": 18.047, |
|
"eval_steps_per_second": 9.059, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6229100552192e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|