|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 37560, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 5e-05, |
|
"loss": 2.2617, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.932541824069077e-05, |
|
"loss": 1.6381, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.865083648138155e-05, |
|
"loss": 1.4975, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.7976254722072315e-05, |
|
"loss": 1.4772, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.730167296276309e-05, |
|
"loss": 1.4015, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.662709120345386e-05, |
|
"loss": 1.3626, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.595250944414463e-05, |
|
"loss": 1.3631, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.5277927684835404e-05, |
|
"loss": 1.3027, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.460334592552617e-05, |
|
"loss": 1.2998, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.392876416621695e-05, |
|
"loss": 1.2911, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.325418240690772e-05, |
|
"loss": 1.2764, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.2579600647598486e-05, |
|
"loss": 1.2656, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.190501888828926e-05, |
|
"loss": 1.2599, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.123043712898003e-05, |
|
"loss": 1.2488, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.055585536967081e-05, |
|
"loss": 1.2533, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bleu1_score": 0.9978, |
|
"eval_bleu2_score": 0.9978, |
|
"eval_bleu3_score": 0.9978, |
|
"eval_bleu4_score": 0.9978, |
|
"eval_loss": 1.2135246992111206, |
|
"eval_rougeL": 0.9976, |
|
"eval_runtime": 348.2564, |
|
"eval_samples_per_second": 4.795, |
|
"eval_steps_per_second": 2.398, |
|
"step": 7512 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.988127361036158e-05, |
|
"loss": 1.0527, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.920669185105235e-05, |
|
"loss": 1.0452, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.8532110091743125e-05, |
|
"loss": 1.0486, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.7857528332433894e-05, |
|
"loss": 1.0376, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.718294657312467e-05, |
|
"loss": 1.05, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.650836481381544e-05, |
|
"loss": 1.0601, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.583378305450621e-05, |
|
"loss": 1.0451, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.515920129519698e-05, |
|
"loss": 1.0377, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.448461953588775e-05, |
|
"loss": 1.0407, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.3810037776578527e-05, |
|
"loss": 1.0325, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 3.3135456017269295e-05, |
|
"loss": 1.0092, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 3.2460874257960064e-05, |
|
"loss": 1.049, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.178629249865084e-05, |
|
"loss": 1.0264, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.111171073934161e-05, |
|
"loss": 1.0011, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3.043712898003238e-05, |
|
"loss": 1.0329, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu1_score": 0.9978, |
|
"eval_bleu2_score": 0.9978, |
|
"eval_bleu3_score": 0.9978, |
|
"eval_bleu4_score": 0.9978, |
|
"eval_loss": 1.1185928583145142, |
|
"eval_rougeL": 0.9976, |
|
"eval_runtime": 347.235, |
|
"eval_samples_per_second": 4.809, |
|
"eval_steps_per_second": 2.405, |
|
"step": 15024 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.9762547220723153e-05, |
|
"loss": 0.8112, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.9087965461413925e-05, |
|
"loss": 0.8095, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.8413383702104697e-05, |
|
"loss": 0.8018, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.7738801942795465e-05, |
|
"loss": 0.7999, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.7064220183486238e-05, |
|
"loss": 0.8078, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.638963842417701e-05, |
|
"loss": 0.8053, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 2.5715056664867782e-05, |
|
"loss": 0.8008, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.5040474905558554e-05, |
|
"loss": 0.8187, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.4365893146249326e-05, |
|
"loss": 0.797, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.3691311386940098e-05, |
|
"loss": 0.7971, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 2.301672962763087e-05, |
|
"loss": 0.8096, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2342147868321642e-05, |
|
"loss": 0.7909, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.1667566109012415e-05, |
|
"loss": 0.7939, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.0992984349703183e-05, |
|
"loss": 0.7963, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 2.0318402590393955e-05, |
|
"loss": 0.7886, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bleu1_score": 0.9978, |
|
"eval_bleu2_score": 0.9978, |
|
"eval_bleu3_score": 0.9978, |
|
"eval_bleu4_score": 0.9978, |
|
"eval_loss": 1.1093112230300903, |
|
"eval_rougeL": 0.9976, |
|
"eval_runtime": 347.4161, |
|
"eval_samples_per_second": 4.807, |
|
"eval_steps_per_second": 2.403, |
|
"step": 22536 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.9643820831084728e-05, |
|
"loss": 0.576, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.89692390717755e-05, |
|
"loss": 0.5781, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.8294657312466272e-05, |
|
"loss": 0.5698, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.762007555315704e-05, |
|
"loss": 0.5717, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.6945493793847816e-05, |
|
"loss": 0.584, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.6270912034538588e-05, |
|
"loss": 0.5758, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.559633027522936e-05, |
|
"loss": 0.5808, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.492174851592013e-05, |
|
"loss": 0.5762, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.4247166756610903e-05, |
|
"loss": 0.5701, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.3572584997301673e-05, |
|
"loss": 0.58, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.2898003237992445e-05, |
|
"loss": 0.5683, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.2223421478683218e-05, |
|
"loss": 0.5589, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.1548839719373988e-05, |
|
"loss": 0.5842, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 1.087425796006476e-05, |
|
"loss": 0.5717, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.0199676200755532e-05, |
|
"loss": 0.5618, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bleu1_score": 0.9978, |
|
"eval_bleu2_score": 0.9978, |
|
"eval_bleu3_score": 0.9978, |
|
"eval_bleu4_score": 0.9978, |
|
"eval_loss": 1.2060836553573608, |
|
"eval_rougeL": 0.9976, |
|
"eval_runtime": 347.0486, |
|
"eval_samples_per_second": 4.812, |
|
"eval_steps_per_second": 2.406, |
|
"step": 30048 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.525094441446304e-06, |
|
"loss": 0.3953, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 8.850512682137076e-06, |
|
"loss": 0.3749, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 8.175930922827847e-06, |
|
"loss": 0.3731, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 7.501349163518619e-06, |
|
"loss": 0.3752, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 6.82676740420939e-06, |
|
"loss": 0.3789, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 6.152185644900162e-06, |
|
"loss": 0.3768, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 5.477603885590934e-06, |
|
"loss": 0.3665, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.803022126281705e-06, |
|
"loss": 0.3763, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 4.128440366972477e-06, |
|
"loss": 0.3715, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.4538586076632488e-06, |
|
"loss": 0.3763, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.7792768483540205e-06, |
|
"loss": 0.3727, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.104695089044792e-06, |
|
"loss": 0.365, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 1.4301133297355639e-06, |
|
"loss": 0.3707, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 7.555315704263357e-07, |
|
"loss": 0.3691, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 8.09498111171074e-08, |
|
"loss": 0.3724, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_bleu1_score": 0.9978, |
|
"eval_bleu2_score": 0.9978, |
|
"eval_bleu3_score": 0.9978, |
|
"eval_bleu4_score": 0.9978, |
|
"eval_loss": 1.51046884059906, |
|
"eval_rougeL": 0.9976, |
|
"eval_runtime": 347.5328, |
|
"eval_samples_per_second": 4.805, |
|
"eval_steps_per_second": 2.403, |
|
"step": 37560 |
|
} |
|
], |
|
"max_steps": 37560, |
|
"num_train_epochs": 5, |
|
"total_flos": 1.08999726956544e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|