|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 75115, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9866870798109567e-05, |
|
"loss": 1.968, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9733741596219132e-05, |
|
"loss": 1.7712, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9600612394328697e-05, |
|
"loss": 1.7087, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9467483192438262e-05, |
|
"loss": 1.6706, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9334353990547827e-05, |
|
"loss": 1.6431, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9201224788657392e-05, |
|
"loss": 1.6151, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.906809558676696e-05, |
|
"loss": 1.6047, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8934966384876526e-05, |
|
"loss": 1.5783, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8801837182986088e-05, |
|
"loss": 1.5813, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8668707981095656e-05, |
|
"loss": 1.5733, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.853557877920522e-05, |
|
"loss": 1.4983, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8402449577314786e-05, |
|
"loss": 1.5354, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.826932037542435e-05, |
|
"loss": 1.5037, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.8136191173533916e-05, |
|
"loss": 1.5545, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.800306197164348e-05, |
|
"loss": 1.5555, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.7869932769753047e-05, |
|
"loss": 1.5364, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.773680356786261e-05, |
|
"loss": 1.5221, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7603674365972177e-05, |
|
"loss": 1.4612, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.7470545164081742e-05, |
|
"loss": 1.4768, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7337415962191307e-05, |
|
"loss": 1.4906, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7204286760300872e-05, |
|
"loss": 1.4838, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7071157558410437e-05, |
|
"loss": 1.4675, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6938028356520006e-05, |
|
"loss": 1.4579, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.680489915462957e-05, |
|
"loss": 1.4626, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6671769952739136e-05, |
|
"loss": 1.455, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.65386407508487e-05, |
|
"loss": 1.4327, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6405511548958266e-05, |
|
"loss": 1.4114, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.627238234706783e-05, |
|
"loss": 1.4188, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6139253145177396e-05, |
|
"loss": 1.4114, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.600612394328696e-05, |
|
"loss": 1.4332, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.412462830543518, |
|
"eval_runtime": 72.1932, |
|
"eval_samples_per_second": 23.132, |
|
"eval_steps_per_second": 23.132, |
|
"step": 15023 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5872994741396526e-05, |
|
"loss": 1.1332, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.573986553950609e-05, |
|
"loss": 1.109, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.5606736337615657e-05, |
|
"loss": 1.1059, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.547360713572522e-05, |
|
"loss": 1.1183, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.5340477933834787e-05, |
|
"loss": 1.1313, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.5207348731944354e-05, |
|
"loss": 1.1148, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.507421953005392e-05, |
|
"loss": 1.0957, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4941090328163484e-05, |
|
"loss": 1.1238, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.4807961126273049e-05, |
|
"loss": 1.142, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.4674831924382616e-05, |
|
"loss": 1.1324, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.4541702722492179e-05, |
|
"loss": 1.1169, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.4408573520601746e-05, |
|
"loss": 1.1449, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.4275444318711311e-05, |
|
"loss": 1.1185, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.4142315116820874e-05, |
|
"loss": 1.1091, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.4009185914930441e-05, |
|
"loss": 1.107, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.3876056713040006e-05, |
|
"loss": 1.1264, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.3742927511149571e-05, |
|
"loss": 1.1408, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.3609798309259136e-05, |
|
"loss": 1.1363, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.3476669107368703e-05, |
|
"loss": 1.1575, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3343539905478268e-05, |
|
"loss": 1.1315, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.3210410703587833e-05, |
|
"loss": 1.1345, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.3077281501697398e-05, |
|
"loss": 1.1212, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.2944152299806965e-05, |
|
"loss": 1.1461, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.2811023097916529e-05, |
|
"loss": 1.1087, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.2677893896026094e-05, |
|
"loss": 1.1313, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.254476469413566e-05, |
|
"loss": 1.0995, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.2411635492245224e-05, |
|
"loss": 1.1351, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.227850629035479e-05, |
|
"loss": 1.1288, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.2145377088464356e-05, |
|
"loss": 1.1376, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.201224788657392e-05, |
|
"loss": 1.1405, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.3536027669906616, |
|
"eval_runtime": 72.2307, |
|
"eval_samples_per_second": 23.12, |
|
"eval_steps_per_second": 23.12, |
|
"step": 30046 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1879118684683486e-05, |
|
"loss": 0.8064, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.1745989482793053e-05, |
|
"loss": 0.775, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.1612860280902616e-05, |
|
"loss": 0.7799, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.1479731079012181e-05, |
|
"loss": 0.775, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.1346601877121748e-05, |
|
"loss": 0.7728, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.1213472675231313e-05, |
|
"loss": 0.777, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.1080343473340878e-05, |
|
"loss": 0.7864, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.0947214271450443e-05, |
|
"loss": 0.7762, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.081408506956001e-05, |
|
"loss": 0.7831, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.0680955867669574e-05, |
|
"loss": 0.8028, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0547826665779139e-05, |
|
"loss": 0.7768, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0414697463888705e-05, |
|
"loss": 0.7954, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.0281568261998269e-05, |
|
"loss": 0.784, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.0148439060107836e-05, |
|
"loss": 0.7873, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.00153098582174e-05, |
|
"loss": 0.795, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 9.882180656326966e-06, |
|
"loss": 0.7861, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.749051454436531e-06, |
|
"loss": 0.7789, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.615922252546098e-06, |
|
"loss": 0.7747, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.482793050655663e-06, |
|
"loss": 0.7668, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 9.349663848765226e-06, |
|
"loss": 0.789, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.216534646874793e-06, |
|
"loss": 0.796, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.083405444984358e-06, |
|
"loss": 0.7821, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 8.950276243093923e-06, |
|
"loss": 0.7798, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.817147041203488e-06, |
|
"loss": 0.7994, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 8.684017839313053e-06, |
|
"loss": 0.8004, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 8.55088863742262e-06, |
|
"loss": 0.8001, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 8.417759435532185e-06, |
|
"loss": 0.8043, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 8.284630233641749e-06, |
|
"loss": 0.8109, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.151501031751316e-06, |
|
"loss": 0.7883, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.01837182986088e-06, |
|
"loss": 0.7896, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.4429526329040527, |
|
"eval_runtime": 72.2054, |
|
"eval_samples_per_second": 23.128, |
|
"eval_steps_per_second": 23.128, |
|
"step": 45069 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 7.885242627970446e-06, |
|
"loss": 0.5414, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.75211342608001e-06, |
|
"loss": 0.5111, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 7.618984224189576e-06, |
|
"loss": 0.5074, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 7.485855022299142e-06, |
|
"loss": 0.4974, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 7.352725820408707e-06, |
|
"loss": 0.5118, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 7.219596618518273e-06, |
|
"loss": 0.5211, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 7.086467416627838e-06, |
|
"loss": 0.5031, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 6.953338214737403e-06, |
|
"loss": 0.5106, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 6.820209012846969e-06, |
|
"loss": 0.5179, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 6.687079810956534e-06, |
|
"loss": 0.5159, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 6.553950609066098e-06, |
|
"loss": 0.5186, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 6.420821407175664e-06, |
|
"loss": 0.507, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 6.287692205285229e-06, |
|
"loss": 0.5058, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 6.154563003394795e-06, |
|
"loss": 0.5236, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 6.0214338015043604e-06, |
|
"loss": 0.5067, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 5.8883045996139255e-06, |
|
"loss": 0.5169, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 5.7551753977234915e-06, |
|
"loss": 0.5207, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 5.6220461958330566e-06, |
|
"loss": 0.4919, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 5.488916993942621e-06, |
|
"loss": 0.5258, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 5.355787792052188e-06, |
|
"loss": 0.5061, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 5.222658590161752e-06, |
|
"loss": 0.4913, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 5.089529388271318e-06, |
|
"loss": 0.4932, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 4.956400186380883e-06, |
|
"loss": 0.4958, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.823270984490449e-06, |
|
"loss": 0.5071, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.690141782600014e-06, |
|
"loss": 0.505, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 4.557012580709579e-06, |
|
"loss": 0.5174, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 4.423883378819144e-06, |
|
"loss": 0.5126, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.29075417692871e-06, |
|
"loss": 0.4965, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.157624975038275e-06, |
|
"loss": 0.4889, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 4.02449577314784e-06, |
|
"loss": 0.5011, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.6782159805297852, |
|
"eval_runtime": 72.2227, |
|
"eval_samples_per_second": 23.123, |
|
"eval_steps_per_second": 23.123, |
|
"step": 60092 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.891366571257405e-06, |
|
"loss": 0.3499, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.758237369366971e-06, |
|
"loss": 0.3204, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.6251081674765364e-06, |
|
"loss": 0.313, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.4919789655861015e-06, |
|
"loss": 0.3279, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 3.358849763695667e-06, |
|
"loss": 0.3198, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.225720561805232e-06, |
|
"loss": 0.3223, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.0925913599147976e-06, |
|
"loss": 0.3115, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.9594621580243627e-06, |
|
"loss": 0.3261, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.8263329561339283e-06, |
|
"loss": 0.3108, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.6932037542434938e-06, |
|
"loss": 0.3062, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.560074552353059e-06, |
|
"loss": 0.3057, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.4269453504626244e-06, |
|
"loss": 0.3065, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.2938161485721895e-06, |
|
"loss": 0.3097, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.160686946681755e-06, |
|
"loss": 0.3092, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.02755774479132e-06, |
|
"loss": 0.3106, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 1.8944285429008854e-06, |
|
"loss": 0.3164, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 1.761299341010451e-06, |
|
"loss": 0.3091, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 1.628170139120016e-06, |
|
"loss": 0.312, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 1.4950409372295815e-06, |
|
"loss": 0.3055, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 1.3619117353391466e-06, |
|
"loss": 0.3138, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.228782533448712e-06, |
|
"loss": 0.3078, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.0956533315582773e-06, |
|
"loss": 0.3088, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 9.625241296678428e-07, |
|
"loss": 0.3036, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 8.29394927777408e-07, |
|
"loss": 0.3113, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 6.962657258869733e-07, |
|
"loss": 0.3, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 5.631365239965387e-07, |
|
"loss": 0.3071, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.30007322106104e-07, |
|
"loss": 0.3062, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.968781202156693e-07, |
|
"loss": 0.3108, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 1.6374891832523464e-07, |
|
"loss": 0.307, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 3.061971643479997e-08, |
|
"loss": 0.3092, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.9851311445236206, |
|
"eval_runtime": 72.2556, |
|
"eval_samples_per_second": 23.112, |
|
"eval_steps_per_second": 23.112, |
|
"step": 75115 |
|
} |
|
], |
|
"max_steps": 75115, |
|
"num_train_epochs": 5, |
|
"total_flos": 5.581632376971264e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|