|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 100, |
|
"global_step": 1026, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0029239766081871343, |
|
"grad_norm": 6.1624870962638925, |
|
"learning_rate": 1.941747572815534e-07, |
|
"loss": 0.913, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.014619883040935672, |
|
"grad_norm": 11.812860926129314, |
|
"learning_rate": 9.70873786407767e-07, |
|
"loss": 0.8859, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.029239766081871343, |
|
"grad_norm": 2.6140064947786015, |
|
"learning_rate": 1.941747572815534e-06, |
|
"loss": 0.8409, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.043859649122807015, |
|
"grad_norm": 2.2391899324318154, |
|
"learning_rate": 2.912621359223301e-06, |
|
"loss": 0.8325, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05847953216374269, |
|
"grad_norm": 2.11742528178852, |
|
"learning_rate": 3.883495145631068e-06, |
|
"loss": 0.8169, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07309941520467836, |
|
"grad_norm": 2.088530291916816, |
|
"learning_rate": 4.854368932038836e-06, |
|
"loss": 0.8168, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08771929824561403, |
|
"grad_norm": 2.028716635962563, |
|
"learning_rate": 5.825242718446602e-06, |
|
"loss": 0.8075, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1023391812865497, |
|
"grad_norm": 2.3256620167562234, |
|
"learning_rate": 6.79611650485437e-06, |
|
"loss": 0.7953, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.11695906432748537, |
|
"grad_norm": 2.349007546868107, |
|
"learning_rate": 7.766990291262136e-06, |
|
"loss": 0.7841, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13157894736842105, |
|
"grad_norm": 2.2423399474385666, |
|
"learning_rate": 8.737864077669904e-06, |
|
"loss": 0.7888, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.14619883040935672, |
|
"grad_norm": 1.942782247095399, |
|
"learning_rate": 9.708737864077671e-06, |
|
"loss": 0.8042, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1608187134502924, |
|
"grad_norm": 2.1945226195890033, |
|
"learning_rate": 1.0679611650485437e-05, |
|
"loss": 0.8018, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.17543859649122806, |
|
"grad_norm": 1.8960792380527152, |
|
"learning_rate": 1.1650485436893204e-05, |
|
"loss": 0.7832, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.19005847953216373, |
|
"grad_norm": 2.050931671194075, |
|
"learning_rate": 1.2621359223300974e-05, |
|
"loss": 0.8048, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2046783625730994, |
|
"grad_norm": 2.0914042301099043, |
|
"learning_rate": 1.359223300970874e-05, |
|
"loss": 0.7988, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21929824561403508, |
|
"grad_norm": 2.1780402538204804, |
|
"learning_rate": 1.4563106796116507e-05, |
|
"loss": 0.8046, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.23391812865497075, |
|
"grad_norm": 2.2507384711871077, |
|
"learning_rate": 1.5533980582524273e-05, |
|
"loss": 0.8101, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24853801169590642, |
|
"grad_norm": 2.0740107954310987, |
|
"learning_rate": 1.650485436893204e-05, |
|
"loss": 0.7992, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2631578947368421, |
|
"grad_norm": 2.1703649782676426, |
|
"learning_rate": 1.7475728155339808e-05, |
|
"loss": 0.8168, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2777777777777778, |
|
"grad_norm": 2.4978786575354817, |
|
"learning_rate": 1.8446601941747575e-05, |
|
"loss": 0.806, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.29239766081871343, |
|
"grad_norm": 2.0698254347384983, |
|
"learning_rate": 1.9417475728155343e-05, |
|
"loss": 0.8286, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.29239766081871343, |
|
"eval_loss": 0.8580158948898315, |
|
"eval_runtime": 94.9126, |
|
"eval_samples_per_second": 22.99, |
|
"eval_steps_per_second": 0.369, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.30701754385964913, |
|
"grad_norm": 2.0735037748593723, |
|
"learning_rate": 1.999976830073192e-05, |
|
"loss": 0.8281, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3216374269005848, |
|
"grad_norm": 2.095073626207817, |
|
"learning_rate": 1.99971618072711e-05, |
|
"loss": 0.8174, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3362573099415205, |
|
"grad_norm": 2.1603082170396455, |
|
"learning_rate": 1.9991659953668803e-05, |
|
"loss": 0.8217, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3508771929824561, |
|
"grad_norm": 2.0326092354167637, |
|
"learning_rate": 1.998326433336204e-05, |
|
"loss": 0.8381, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3654970760233918, |
|
"grad_norm": 2.226630427942925, |
|
"learning_rate": 1.997197737787537e-05, |
|
"loss": 0.8261, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.38011695906432746, |
|
"grad_norm": 2.119406491731454, |
|
"learning_rate": 1.9957802356116665e-05, |
|
"loss": 0.8208, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.39473684210526316, |
|
"grad_norm": 2.8253278464362084, |
|
"learning_rate": 1.994074337343041e-05, |
|
"loss": 0.8129, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4093567251461988, |
|
"grad_norm": 2.4882232695538464, |
|
"learning_rate": 1.9920805370408695e-05, |
|
"loss": 0.8135, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4239766081871345, |
|
"grad_norm": 4.780582760365185, |
|
"learning_rate": 1.9897994121460322e-05, |
|
"loss": 0.8364, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.43859649122807015, |
|
"grad_norm": 11.5182408239238, |
|
"learning_rate": 1.9872316233138463e-05, |
|
"loss": 0.8277, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.45321637426900585, |
|
"grad_norm": 12.627096663789404, |
|
"learning_rate": 1.9843779142227258e-05, |
|
"loss": 0.8196, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.4678362573099415, |
|
"grad_norm": 2.740111270404608, |
|
"learning_rate": 1.981239111358799e-05, |
|
"loss": 0.8042, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4824561403508772, |
|
"grad_norm": 2.174778620165311, |
|
"learning_rate": 1.9778161237765438e-05, |
|
"loss": 0.8064, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.49707602339181284, |
|
"grad_norm": 1.9246861870071321, |
|
"learning_rate": 1.9741099428355075e-05, |
|
"loss": 0.8024, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5116959064327485, |
|
"grad_norm": 9.798315852556506, |
|
"learning_rate": 1.9701216419131934e-05, |
|
"loss": 0.8352, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 2.9139360459829082, |
|
"learning_rate": 1.9658523760941882e-05, |
|
"loss": 0.8033, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5409356725146199, |
|
"grad_norm": 14.543721579870974, |
|
"learning_rate": 1.9613033818356322e-05, |
|
"loss": 0.8631, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 11.532560091003726, |
|
"learning_rate": 1.956475976609114e-05, |
|
"loss": 0.8304, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5701754385964912, |
|
"grad_norm": 10.191690209836944, |
|
"learning_rate": 1.951371558519111e-05, |
|
"loss": 0.8021, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5847953216374269, |
|
"grad_norm": 2.490323350347935, |
|
"learning_rate": 1.9459916058980707e-05, |
|
"loss": 0.798, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5847953216374269, |
|
"eval_loss": 0.8706345558166504, |
|
"eval_runtime": 94.8283, |
|
"eval_samples_per_second": 23.01, |
|
"eval_steps_per_second": 0.369, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5994152046783626, |
|
"grad_norm": 2.025711297854962, |
|
"learning_rate": 1.9403376768782592e-05, |
|
"loss": 0.7884, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6140350877192983, |
|
"grad_norm": 1.9488358870140705, |
|
"learning_rate": 1.9344114089404983e-05, |
|
"loss": 0.7804, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6286549707602339, |
|
"grad_norm": 1.8564140890417253, |
|
"learning_rate": 1.9282145184399197e-05, |
|
"loss": 0.808, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6432748538011696, |
|
"grad_norm": 2.0881170345282727, |
|
"learning_rate": 1.9217488001088784e-05, |
|
"loss": 0.799, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6578947368421053, |
|
"grad_norm": 1.9678597098298427, |
|
"learning_rate": 1.9150161265371663e-05, |
|
"loss": 0.795, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.672514619883041, |
|
"grad_norm": 2.865460519803586, |
|
"learning_rate": 1.908018447629674e-05, |
|
"loss": 0.7915, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6871345029239766, |
|
"grad_norm": 2.398699191637217, |
|
"learning_rate": 1.9007577900416648e-05, |
|
"loss": 0.7814, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7017543859649122, |
|
"grad_norm": 2.1260051093101353, |
|
"learning_rate": 1.8932362565918167e-05, |
|
"loss": 0.7983, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.716374269005848, |
|
"grad_norm": 5.80073712189035, |
|
"learning_rate": 1.8854560256532098e-05, |
|
"loss": 0.7691, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7309941520467836, |
|
"grad_norm": 2.5341535901655896, |
|
"learning_rate": 1.877419350522429e-05, |
|
"loss": 0.8155, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7456140350877193, |
|
"grad_norm": 1.95626689403789, |
|
"learning_rate": 1.869128558766965e-05, |
|
"loss": 0.7878, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7602339181286549, |
|
"grad_norm": 15.120401535093198, |
|
"learning_rate": 1.8605860515511158e-05, |
|
"loss": 0.7883, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7748538011695907, |
|
"grad_norm": 2.2607309079656135, |
|
"learning_rate": 1.8517943029405577e-05, |
|
"loss": 0.7999, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7894736842105263, |
|
"grad_norm": 2.295750513860874, |
|
"learning_rate": 1.8427558591858164e-05, |
|
"loss": 0.7697, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.804093567251462, |
|
"grad_norm": 4.1135088544849445, |
|
"learning_rate": 1.833473337984823e-05, |
|
"loss": 0.8065, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8187134502923976, |
|
"grad_norm": 3.2241188905244895, |
|
"learning_rate": 1.823949427724785e-05, |
|
"loss": 0.799, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 1.6843445283218286, |
|
"learning_rate": 1.8141868867035745e-05, |
|
"loss": 0.7909, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.847953216374269, |
|
"grad_norm": 1.7626650921963332, |
|
"learning_rate": 1.8041885423308808e-05, |
|
"loss": 0.781, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8625730994152047, |
|
"grad_norm": 10.38812175281534, |
|
"learning_rate": 1.7939572903093383e-05, |
|
"loss": 0.7923, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8771929824561403, |
|
"grad_norm": 3.1451964354292596, |
|
"learning_rate": 1.7834960937958775e-05, |
|
"loss": 0.7677, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8771929824561403, |
|
"eval_loss": 0.8726791739463806, |
|
"eval_runtime": 93.9416, |
|
"eval_samples_per_second": 23.227, |
|
"eval_steps_per_second": 0.373, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8918128654970761, |
|
"grad_norm": 1.9970765983454766, |
|
"learning_rate": 1.7728079825435426e-05, |
|
"loss": 0.7966, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.9064327485380117, |
|
"grad_norm": 1.8313570465257998, |
|
"learning_rate": 1.761896052024019e-05, |
|
"loss": 0.7784, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9210526315789473, |
|
"grad_norm": 2.028945141590537, |
|
"learning_rate": 1.7507634625311283e-05, |
|
"loss": 0.7569, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.935672514619883, |
|
"grad_norm": 3.1385176933971706, |
|
"learning_rate": 1.7394134382655496e-05, |
|
"loss": 0.7805, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9502923976608187, |
|
"grad_norm": 1.7723196199368927, |
|
"learning_rate": 1.7278492664010326e-05, |
|
"loss": 0.7681, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9649122807017544, |
|
"grad_norm": 1.7577488585231846, |
|
"learning_rate": 1.7160742961323714e-05, |
|
"loss": 0.7672, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.97953216374269, |
|
"grad_norm": 2.3636063665348757, |
|
"learning_rate": 1.7040919377054182e-05, |
|
"loss": 0.7795, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.9941520467836257, |
|
"grad_norm": 1.7709587208644213, |
|
"learning_rate": 1.6919056614294133e-05, |
|
"loss": 0.7624, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0087719298245614, |
|
"grad_norm": 3.8073195101773765, |
|
"learning_rate": 1.6795189966719182e-05, |
|
"loss": 0.6426, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.023391812865497, |
|
"grad_norm": 2.787413972058394, |
|
"learning_rate": 1.666935530836651e-05, |
|
"loss": 0.5366, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0380116959064327, |
|
"grad_norm": 2.362170165993678, |
|
"learning_rate": 1.654158908324504e-05, |
|
"loss": 0.5377, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0526315789473684, |
|
"grad_norm": 2.311139734884461, |
|
"learning_rate": 1.6411928294780626e-05, |
|
"loss": 0.5252, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.067251461988304, |
|
"grad_norm": 1.886267380269756, |
|
"learning_rate": 1.6280410495099165e-05, |
|
"loss": 0.5028, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.0818713450292399, |
|
"grad_norm": 1.8701236760118256, |
|
"learning_rate": 1.6147073774150834e-05, |
|
"loss": 0.5121, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0964912280701755, |
|
"grad_norm": 1.7743194110147134, |
|
"learning_rate": 1.601195674867853e-05, |
|
"loss": 0.5189, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 36.55479744540479, |
|
"learning_rate": 1.5875098551033765e-05, |
|
"loss": 0.5071, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1257309941520468, |
|
"grad_norm": 2.1840870340337823, |
|
"learning_rate": 1.5736538817843228e-05, |
|
"loss": 0.5489, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.1403508771929824, |
|
"grad_norm": 2.096047543627006, |
|
"learning_rate": 1.559631767852929e-05, |
|
"loss": 0.5115, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.154970760233918, |
|
"grad_norm": 8.700584160621043, |
|
"learning_rate": 1.54544757436878e-05, |
|
"loss": 0.5061, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.1695906432748537, |
|
"grad_norm": 1.7774756036197978, |
|
"learning_rate": 1.5311054093326508e-05, |
|
"loss": 0.5584, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1695906432748537, |
|
"eval_loss": 0.9159147143363953, |
|
"eval_runtime": 94.2366, |
|
"eval_samples_per_second": 23.154, |
|
"eval_steps_per_second": 0.371, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1842105263157894, |
|
"grad_norm": 2.550427212943654, |
|
"learning_rate": 1.5166094264967553e-05, |
|
"loss": 0.5166, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.198830409356725, |
|
"grad_norm": 1.9907997528305226, |
|
"learning_rate": 1.5019638241617429e-05, |
|
"loss": 0.5131, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.213450292397661, |
|
"grad_norm": 1.7648223360572315, |
|
"learning_rate": 1.4871728439607967e-05, |
|
"loss": 0.5182, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.2280701754385965, |
|
"grad_norm": 2.23899567188947, |
|
"learning_rate": 1.472240769631176e-05, |
|
"loss": 0.5062, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2426900584795322, |
|
"grad_norm": 1.763198949927548, |
|
"learning_rate": 1.4571719257735702e-05, |
|
"loss": 0.5203, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2573099415204678, |
|
"grad_norm": 4.030641844892038, |
|
"learning_rate": 1.4419706765996153e-05, |
|
"loss": 0.5176, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2719298245614035, |
|
"grad_norm": 1.8591284929042133, |
|
"learning_rate": 1.4266414246679379e-05, |
|
"loss": 0.5099, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.286549707602339, |
|
"grad_norm": 1.8559628967021533, |
|
"learning_rate": 1.4111886096090953e-05, |
|
"loss": 0.5129, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.301169590643275, |
|
"grad_norm": 2.862306979981545, |
|
"learning_rate": 1.3956167068397756e-05, |
|
"loss": 0.5075, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3157894736842106, |
|
"grad_norm": 1.8928522544167012, |
|
"learning_rate": 1.3799302262666388e-05, |
|
"loss": 0.5177, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3304093567251463, |
|
"grad_norm": 1.8170476120999561, |
|
"learning_rate": 1.364133710980162e-05, |
|
"loss": 0.516, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.345029239766082, |
|
"grad_norm": 1.735070100858528, |
|
"learning_rate": 1.3482317359388806e-05, |
|
"loss": 0.5086, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.3596491228070176, |
|
"grad_norm": 2.0140408558653737, |
|
"learning_rate": 1.3322289066443947e-05, |
|
"loss": 0.5169, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.3742690058479532, |
|
"grad_norm": 1.7311234684729015, |
|
"learning_rate": 1.316129857807534e-05, |
|
"loss": 0.5077, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.3888888888888888, |
|
"grad_norm": 1.7834637989309219, |
|
"learning_rate": 1.2999392520060594e-05, |
|
"loss": 0.5181, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4035087719298245, |
|
"grad_norm": 1.725680916190487, |
|
"learning_rate": 1.2836617783342968e-05, |
|
"loss": 0.5102, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4181286549707601, |
|
"grad_norm": 1.8726265727670828, |
|
"learning_rate": 1.2673021510450893e-05, |
|
"loss": 0.5169, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.4327485380116958, |
|
"grad_norm": 2.056653138512768, |
|
"learning_rate": 1.250865108184464e-05, |
|
"loss": 0.5085, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4473684210526316, |
|
"grad_norm": 1.6627221599390933, |
|
"learning_rate": 1.2343554102194073e-05, |
|
"loss": 0.4981, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.4619883040935673, |
|
"grad_norm": 1.7476089665186805, |
|
"learning_rate": 1.2177778386591475e-05, |
|
"loss": 0.5186, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4619883040935673, |
|
"eval_loss": 0.8920474648475647, |
|
"eval_runtime": 93.9752, |
|
"eval_samples_per_second": 23.219, |
|
"eval_steps_per_second": 0.372, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.476608187134503, |
|
"grad_norm": 1.6946408040860308, |
|
"learning_rate": 1.2011371946703416e-05, |
|
"loss": 0.5197, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.4912280701754386, |
|
"grad_norm": 1.7569189927952173, |
|
"learning_rate": 1.1844382976865714e-05, |
|
"loss": 0.515, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5058479532163744, |
|
"grad_norm": 1.9990864745103767, |
|
"learning_rate": 1.1676859840125468e-05, |
|
"loss": 0.5188, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.52046783625731, |
|
"grad_norm": 1.659671819522036, |
|
"learning_rate": 1.1508851054234236e-05, |
|
"loss": 0.4952, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5350877192982457, |
|
"grad_norm": 1.8125623588878788, |
|
"learning_rate": 1.1340405277596427e-05, |
|
"loss": 0.5029, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.5497076023391814, |
|
"grad_norm": 1.6209359235184773, |
|
"learning_rate": 1.1171571295176915e-05, |
|
"loss": 0.5016, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.564327485380117, |
|
"grad_norm": 1.7381191339168205, |
|
"learning_rate": 1.1002398004372048e-05, |
|
"loss": 0.4938, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.5789473684210527, |
|
"grad_norm": 1.7440328988243268, |
|
"learning_rate": 1.0832934400848063e-05, |
|
"loss": 0.508, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.5935672514619883, |
|
"grad_norm": 1.6873580611700445, |
|
"learning_rate": 1.066322956435104e-05, |
|
"loss": 0.4994, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.608187134502924, |
|
"grad_norm": 1.6955737592784719, |
|
"learning_rate": 1.0493332644492534e-05, |
|
"loss": 0.494, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6228070175438596, |
|
"grad_norm": 1.8639586363579819, |
|
"learning_rate": 1.0323292846514927e-05, |
|
"loss": 0.5086, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.6374269005847952, |
|
"grad_norm": 1.6602403945581443, |
|
"learning_rate": 1.015315941704071e-05, |
|
"loss": 0.4958, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6520467836257309, |
|
"grad_norm": 1.6463228018665035, |
|
"learning_rate": 9.982981629809776e-06, |
|
"loss": 0.4947, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 1.8439520823673021, |
|
"learning_rate": 9.812808771408804e-06, |
|
"loss": 0.4943, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6812865497076024, |
|
"grad_norm": 1.6567753225002235, |
|
"learning_rate": 9.64269012699702e-06, |
|
"loss": 0.4815, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.695906432748538, |
|
"grad_norm": 1.6625638574779524, |
|
"learning_rate": 9.472674966032276e-06, |
|
"loss": 0.5006, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7105263157894737, |
|
"grad_norm": 1.898719426059897, |
|
"learning_rate": 9.302812528001741e-06, |
|
"loss": 0.5086, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.7251461988304093, |
|
"grad_norm": 1.8795535435362318, |
|
"learning_rate": 9.133152008161235e-06, |
|
"loss": 0.5094, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7397660818713452, |
|
"grad_norm": 1.7067878022572074, |
|
"learning_rate": 8.963742543287397e-06, |
|
"loss": 0.5001, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.7543859649122808, |
|
"grad_norm": 1.6684322823190847, |
|
"learning_rate": 8.79463319744677e-06, |
|
"loss": 0.5067, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7543859649122808, |
|
"eval_loss": 0.8780078291893005, |
|
"eval_runtime": 94.1156, |
|
"eval_samples_per_second": 23.184, |
|
"eval_steps_per_second": 0.372, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7690058479532165, |
|
"grad_norm": 1.575226401438412, |
|
"learning_rate": 8.625872947785968e-06, |
|
"loss": 0.5163, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.7836257309941521, |
|
"grad_norm": 1.665839831059325, |
|
"learning_rate": 8.457510670346976e-06, |
|
"loss": 0.5185, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.7982456140350878, |
|
"grad_norm": 1.668476308962127, |
|
"learning_rate": 8.2895951259118e-06, |
|
"loss": 0.4891, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.8128654970760234, |
|
"grad_norm": 1.6099746797995087, |
|
"learning_rate": 8.122174945880409e-06, |
|
"loss": 0.4941, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.827485380116959, |
|
"grad_norm": 1.5972670521003787, |
|
"learning_rate": 7.955298618186227e-06, |
|
"loss": 0.4983, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.8421052631578947, |
|
"grad_norm": 1.6567002288888941, |
|
"learning_rate": 7.78901447325314e-06, |
|
"loss": 0.4923, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.8567251461988303, |
|
"grad_norm": 1.633625117649634, |
|
"learning_rate": 7.623370669998115e-06, |
|
"loss": 0.4952, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.871345029239766, |
|
"grad_norm": 1.6702231621007573, |
|
"learning_rate": 7.458415181883506e-06, |
|
"loss": 0.4781, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.8859649122807016, |
|
"grad_norm": 1.5695315130710974, |
|
"learning_rate": 7.294195783023072e-06, |
|
"loss": 0.4772, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9005847953216373, |
|
"grad_norm": 1.5932464772328656, |
|
"learning_rate": 7.130760034345708e-06, |
|
"loss": 0.4766, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.9152046783625731, |
|
"grad_norm": 1.5938375889822713, |
|
"learning_rate": 6.968155269820951e-06, |
|
"loss": 0.4954, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.9298245614035088, |
|
"grad_norm": 1.6068162124707588, |
|
"learning_rate": 6.806428582750191e-06, |
|
"loss": 0.5031, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9444444444444444, |
|
"grad_norm": 1.6369099221197159, |
|
"learning_rate": 6.645626812127588e-06, |
|
"loss": 0.4871, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.95906432748538, |
|
"grad_norm": 1.8488127966750827, |
|
"learning_rate": 6.485796529074662e-06, |
|
"loss": 0.4793, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.973684210526316, |
|
"grad_norm": 1.5795572120040184, |
|
"learning_rate": 6.326984023352435e-06, |
|
"loss": 0.4728, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.9883040935672516, |
|
"grad_norm": 1.8151786682783004, |
|
"learning_rate": 6.169235289955073e-06, |
|
"loss": 0.4659, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.002923976608187, |
|
"grad_norm": 3.8300611957194977, |
|
"learning_rate": 6.012596015788903e-06, |
|
"loss": 0.4261, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.017543859649123, |
|
"grad_norm": 2.621763538813387, |
|
"learning_rate": 5.8571115664406655e-06, |
|
"loss": 0.2426, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.0321637426900585, |
|
"grad_norm": 1.8343232864531938, |
|
"learning_rate": 5.702826973038776e-06, |
|
"loss": 0.24, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.046783625730994, |
|
"grad_norm": 1.9497619965124309, |
|
"learning_rate": 5.549786919211532e-06, |
|
"loss": 0.2223, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.046783625730994, |
|
"eval_loss": 0.951133131980896, |
|
"eval_runtime": 93.9878, |
|
"eval_samples_per_second": 23.216, |
|
"eval_steps_per_second": 0.372, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.06140350877193, |
|
"grad_norm": 1.7439743078429233, |
|
"learning_rate": 5.3980357281459e-06, |
|
"loss": 0.229, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.0760233918128654, |
|
"grad_norm": 1.5354726993504848, |
|
"learning_rate": 5.247617349750717e-06, |
|
"loss": 0.2083, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.090643274853801, |
|
"grad_norm": 7.788063462658685, |
|
"learning_rate": 5.0985753479279824e-06, |
|
"loss": 0.2391, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 1.5764797068116867, |
|
"learning_rate": 4.950952887955992e-06, |
|
"loss": 0.2146, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.1198830409356724, |
|
"grad_norm": 1.5338239957853983, |
|
"learning_rate": 4.80479272398786e-06, |
|
"loss": 0.2145, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.134502923976608, |
|
"grad_norm": 1.5400470674701994, |
|
"learning_rate": 4.660137186669131e-06, |
|
"loss": 0.2131, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1491228070175437, |
|
"grad_norm": 1.5891715585976705, |
|
"learning_rate": 4.5170281708780865e-06, |
|
"loss": 0.2273, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.1637426900584797, |
|
"grad_norm": 1.6323196857228797, |
|
"learning_rate": 4.375507123592194e-06, |
|
"loss": 0.2221, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.1783625730994154, |
|
"grad_norm": 3.4202645186771896, |
|
"learning_rate": 4.235615031884326e-06, |
|
"loss": 0.2139, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.192982456140351, |
|
"grad_norm": 1.6425009664630226, |
|
"learning_rate": 4.097392411052149e-06, |
|
"loss": 0.2165, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.2076023391812867, |
|
"grad_norm": 1.5960953283192305, |
|
"learning_rate": 3.9608792928841596e-06, |
|
"loss": 0.201, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 1.5527267754127971, |
|
"learning_rate": 3.826115214065739e-06, |
|
"loss": 0.2162, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.236842105263158, |
|
"grad_norm": 1.5345330165293922, |
|
"learning_rate": 3.693139204728623e-06, |
|
"loss": 0.2033, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.2514619883040936, |
|
"grad_norm": 1.5602344960819443, |
|
"learning_rate": 3.561989777147059e-06, |
|
"loss": 0.2175, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.2660818713450293, |
|
"grad_norm": 1.543288744735399, |
|
"learning_rate": 3.4327049145839496e-06, |
|
"loss": 0.2044, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.280701754385965, |
|
"grad_norm": 1.5668680143830462, |
|
"learning_rate": 3.3053220602902057e-06, |
|
"loss": 0.2067, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.2953216374269005, |
|
"grad_norm": 1.4653721970803488, |
|
"learning_rate": 3.1798781066605076e-06, |
|
"loss": 0.2072, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.309941520467836, |
|
"grad_norm": 1.5895314411422212, |
|
"learning_rate": 3.056409384548575e-06, |
|
"loss": 0.2077, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.324561403508772, |
|
"grad_norm": 1.5324432023481178, |
|
"learning_rate": 2.934951652745123e-06, |
|
"loss": 0.2067, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.3391812865497075, |
|
"grad_norm": 1.6877695775778203, |
|
"learning_rate": 2.8155400876214365e-06, |
|
"loss": 0.2114, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3391812865497075, |
|
"eval_loss": 0.9491869807243347, |
|
"eval_runtime": 94.0592, |
|
"eval_samples_per_second": 23.198, |
|
"eval_steps_per_second": 0.372, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.353801169590643, |
|
"grad_norm": 1.797333972810049, |
|
"learning_rate": 2.698209272941659e-06, |
|
"loss": 0.2066, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.3684210526315788, |
|
"grad_norm": 1.4817024828655516, |
|
"learning_rate": 2.5829931898467143e-06, |
|
"loss": 0.2003, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.383040935672515, |
|
"grad_norm": 1.5944694035375513, |
|
"learning_rate": 2.469925207012741e-06, |
|
"loss": 0.2111, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.39766081871345, |
|
"grad_norm": 1.5437718528241764, |
|
"learning_rate": 2.3590380709869175e-06, |
|
"loss": 0.2036, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.412280701754386, |
|
"grad_norm": 1.5545155179428847, |
|
"learning_rate": 2.2503638967034668e-06, |
|
"loss": 0.2119, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.426900584795322, |
|
"grad_norm": 1.5090412576695187, |
|
"learning_rate": 2.1439341581825855e-06, |
|
"loss": 0.2079, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.4415204678362574, |
|
"grad_norm": 1.5651792272296057, |
|
"learning_rate": 2.039779679414996e-06, |
|
"loss": 0.2049, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.456140350877193, |
|
"grad_norm": 1.629599771732464, |
|
"learning_rate": 1.9379306254347487e-06, |
|
"loss": 0.2063, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.4707602339181287, |
|
"grad_norm": 1.517424552525727, |
|
"learning_rate": 1.838416493582893e-06, |
|
"loss": 0.2054, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.4853801169590644, |
|
"grad_norm": 1.5665838306665794, |
|
"learning_rate": 1.7412661049645097e-06, |
|
"loss": 0.2072, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.5709890486248266, |
|
"learning_rate": 1.6465075961015697e-06, |
|
"loss": 0.1969, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.5146198830409356, |
|
"grad_norm": 1.433926747309705, |
|
"learning_rate": 1.554168410784117e-06, |
|
"loss": 0.2025, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.5292397660818713, |
|
"grad_norm": 1.56024876676377, |
|
"learning_rate": 1.4642752921220272e-06, |
|
"loss": 0.2019, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.543859649122807, |
|
"grad_norm": 1.4917168419123434, |
|
"learning_rate": 1.3768542747997215e-06, |
|
"loss": 0.2092, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.5584795321637426, |
|
"grad_norm": 1.5176036989413515, |
|
"learning_rate": 1.2919306775360495e-06, |
|
"loss": 0.2053, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.573099415204678, |
|
"grad_norm": 1.5727527454387282, |
|
"learning_rate": 1.209529095751527e-06, |
|
"loss": 0.2014, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.587719298245614, |
|
"grad_norm": 1.489918411889695, |
|
"learning_rate": 1.1296733944450445e-06, |
|
"loss": 0.2012, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.60233918128655, |
|
"grad_norm": 1.5532738891332698, |
|
"learning_rate": 1.0523867012821444e-06, |
|
"loss": 0.2031, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.616959064327485, |
|
"grad_norm": 1.4483198492727338, |
|
"learning_rate": 9.776913998968196e-07, |
|
"loss": 0.2012, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"grad_norm": 1.5331083693362297, |
|
"learning_rate": 9.056091234088038e-07, |
|
"loss": 0.1983, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.6315789473684212, |
|
"eval_loss": 0.9508408904075623, |
|
"eval_runtime": 93.8904, |
|
"eval_samples_per_second": 23.24, |
|
"eval_steps_per_second": 0.373, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.646198830409357, |
|
"grad_norm": 1.4959941578534954, |
|
"learning_rate": 8.361607481582312e-07, |
|
"loss": 0.2031, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.6608187134502925, |
|
"grad_norm": 1.533846604598029, |
|
"learning_rate": 7.693663876594648e-07, |
|
"loss": 0.2086, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.675438596491228, |
|
"grad_norm": 1.4882252878887594, |
|
"learning_rate": 7.052453867758525e-07, |
|
"loss": 0.2044, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.690058479532164, |
|
"grad_norm": 1.463561383109587, |
|
"learning_rate": 6.438163161171096e-07, |
|
"loss": 0.199, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.7046783625730995, |
|
"grad_norm": 1.4904086029449222, |
|
"learning_rate": 5.850969666609363e-07, |
|
"loss": 0.1949, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.719298245614035, |
|
"grad_norm": 1.4484608706071034, |
|
"learning_rate": 5.291043446004074e-07, |
|
"loss": 0.1941, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.7339181286549707, |
|
"grad_norm": 1.4693647586602272, |
|
"learning_rate": 4.7585466641868696e-07, |
|
"loss": 0.1925, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.7485380116959064, |
|
"grad_norm": 1.4804496226260073, |
|
"learning_rate": 4.25363354192434e-07, |
|
"loss": 0.2027, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.763157894736842, |
|
"grad_norm": 1.5177590267070082, |
|
"learning_rate": 3.776450311252866e-07, |
|
"loss": 0.2068, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.7777777777777777, |
|
"grad_norm": 1.4962603716375011, |
|
"learning_rate": 3.3271351731271717e-07, |
|
"loss": 0.1993, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.7923976608187133, |
|
"grad_norm": 1.53541672293609, |
|
"learning_rate": 2.905818257394799e-07, |
|
"loss": 0.2126, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.807017543859649, |
|
"grad_norm": 1.4669550373356948, |
|
"learning_rate": 2.512621585108155e-07, |
|
"loss": 0.2021, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.821637426900585, |
|
"grad_norm": 1.4475885818879743, |
|
"learning_rate": 2.1476590331849566e-07, |
|
"loss": 0.195, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.8362573099415203, |
|
"grad_norm": 1.487795902053431, |
|
"learning_rate": 1.811036301427449e-07, |
|
"loss": 0.2073, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.8508771929824563, |
|
"grad_norm": 1.4972431353793751, |
|
"learning_rate": 1.502850881909801e-07, |
|
"loss": 0.2017, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.8654970760233915, |
|
"grad_norm": 1.506977096918749, |
|
"learning_rate": 1.2231920307425927e-07, |
|
"loss": 0.2081, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.8801169590643276, |
|
"grad_norm": 1.5752046829905806, |
|
"learning_rate": 9.721407422226492e-08, |
|
"loss": 0.2089, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.8947368421052633, |
|
"grad_norm": 1.4564645802512894, |
|
"learning_rate": 7.497697253756265e-08, |
|
"loss": 0.1933, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.909356725146199, |
|
"grad_norm": 1.493498674859337, |
|
"learning_rate": 5.5614338289812216e-08, |
|
"loss": 0.1974, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.9239766081871346, |
|
"grad_norm": 1.4762567685805037, |
|
"learning_rate": 3.913177925055189e-08, |
|
"loss": 0.2014, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.9239766081871346, |
|
"eval_loss": 0.946516215801239, |
|
"eval_runtime": 94.0428, |
|
"eval_samples_per_second": 23.202, |
|
"eval_steps_per_second": 0.372, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.93859649122807, |
|
"grad_norm": 1.47647651184245, |
|
"learning_rate": 2.5534069069081957e-08, |
|
"loss": 0.2044, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.953216374269006, |
|
"grad_norm": 1.4575732503367402, |
|
"learning_rate": 1.482514588993067e-08, |
|
"loss": 0.2001, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.9678362573099415, |
|
"grad_norm": 1.5453006830413416, |
|
"learning_rate": 7.0081112122966086e-09, |
|
"loss": 0.2042, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.982456140350877, |
|
"grad_norm": 1.7014593329310401, |
|
"learning_rate": 2.0852289917971947e-09, |
|
"loss": 0.1995, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.997076023391813, |
|
"grad_norm": 1.5593115324810627, |
|
"learning_rate": 5.792498478651709e-11, |
|
"loss": 0.2055, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1026, |
|
"total_flos": 429437673799680.0, |
|
"train_loss": 0.5057662985420134, |
|
"train_runtime": 26805.8841, |
|
"train_samples_per_second": 4.896, |
|
"train_steps_per_second": 0.038 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1026, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 429437673799680.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|