|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"eval_steps": 500, |
|
"global_step": 24996, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008001280204832774, |
|
"grad_norm": 0.9324016571044922, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.0562, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.016002560409665547, |
|
"grad_norm": 0.4582217037677765, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.9037, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02400384061449832, |
|
"grad_norm": 0.6721668243408203, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6957, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.032005120819331094, |
|
"grad_norm": 0.6238133907318115, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.5897, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04000640102416387, |
|
"grad_norm": 0.6062737703323364, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5664, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.04800768122899664, |
|
"grad_norm": 0.6185649633407593, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.5315, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.05600896143382941, |
|
"grad_norm": 0.6351732611656189, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.4996, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.06401024163866219, |
|
"grad_norm": 0.6717058420181274, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.4784, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.07201152184349496, |
|
"grad_norm": 0.6665578484535217, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.4795, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.08001280204832774, |
|
"grad_norm": 0.5694233775138855, |
|
"learning_rate": 2e-05, |
|
"loss": 1.4583, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0880140822531605, |
|
"grad_norm": 0.6044241786003113, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.4392, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.09601536245799328, |
|
"grad_norm": 0.6153579354286194, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.436, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.10401664266282605, |
|
"grad_norm": 0.5287560224533081, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 1.4244, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.11201792286765883, |
|
"grad_norm": 0.5597755908966064, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.4369, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.1200192030724916, |
|
"grad_norm": 0.4981745183467865, |
|
"learning_rate": 3e-05, |
|
"loss": 1.3981, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.12802048327732438, |
|
"grad_norm": 0.48743560910224915, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.4009, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.13602176348215714, |
|
"grad_norm": 0.5637604594230652, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.4028, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.14402304368698993, |
|
"grad_norm": 0.470486581325531, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.3938, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.1520243238918227, |
|
"grad_norm": 0.5075104236602783, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.3859, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.16002560409665548, |
|
"grad_norm": 0.4708271324634552, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3865, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.16802688430148824, |
|
"grad_norm": 0.3994545638561249, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.4072, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.176028164506321, |
|
"grad_norm": 0.4936317801475525, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.3828, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.1840294447111538, |
|
"grad_norm": 0.3775925934314728, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.4046, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.19203072491598655, |
|
"grad_norm": 0.45916563272476196, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.3725, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.20003200512081934, |
|
"grad_norm": 0.42247775197029114, |
|
"learning_rate": 5e-05, |
|
"loss": 1.371, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.2080332853256521, |
|
"grad_norm": 0.4264281392097473, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.3729, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.2160345655304849, |
|
"grad_norm": 0.3741685152053833, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.3621, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.22403584573531765, |
|
"grad_norm": 0.392640084028244, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.3557, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.2320371259401504, |
|
"grad_norm": 0.3837718367576599, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.3599, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.2400384061449832, |
|
"grad_norm": 0.36931145191192627, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3527, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.24803968634981596, |
|
"grad_norm": 0.4106380343437195, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.3531, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.25604096655464875, |
|
"grad_norm": 0.41261059045791626, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.3722, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.2640422467594815, |
|
"grad_norm": 0.3928689658641815, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.3423, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.2720435269643143, |
|
"grad_norm": 0.4072926640510559, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.3458, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.28004480716914704, |
|
"grad_norm": 0.3700864315032959, |
|
"learning_rate": 7e-05, |
|
"loss": 1.3566, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.28804608737397985, |
|
"grad_norm": 0.36805135011672974, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.3124, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.2960473675788126, |
|
"grad_norm": 0.39832988381385803, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.3408, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.3040486477836454, |
|
"grad_norm": 0.4116155505180359, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.3259, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.31204992798847814, |
|
"grad_norm": 0.3844102919101715, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.3223, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.32005120819331095, |
|
"grad_norm": 0.36435940861701965, |
|
"learning_rate": 8e-05, |
|
"loss": 1.3493, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.3280524883981437, |
|
"grad_norm": 0.40303346514701843, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.313, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.3360537686029765, |
|
"grad_norm": 0.3959302604198456, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.3116, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.34405504880780924, |
|
"grad_norm": 0.42148756980895996, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.3172, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.352056329012642, |
|
"grad_norm": 0.35398295521736145, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.3123, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.3600576092174748, |
|
"grad_norm": 0.32611459493637085, |
|
"learning_rate": 9e-05, |
|
"loss": 1.3291, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.3680588894223076, |
|
"grad_norm": 0.3089679777622223, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.3162, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.37606016962714034, |
|
"grad_norm": 0.38052898645401, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.2888, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.3840614498319731, |
|
"grad_norm": 0.40810805559158325, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.292, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.39206273003680586, |
|
"grad_norm": 0.3573181927204132, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.3094, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.4000640102416387, |
|
"grad_norm": 0.3486098349094391, |
|
"learning_rate": 0.0001, |
|
"loss": 1.2908, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.40806529044647144, |
|
"grad_norm": 0.4237920045852661, |
|
"learning_rate": 9.999878110192404e-05, |
|
"loss": 1.2868, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.4160665706513042, |
|
"grad_norm": 0.3493655025959015, |
|
"learning_rate": 9.999512446712467e-05, |
|
"loss": 1.3221, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.42406785085613696, |
|
"grad_norm": 0.3716041147708893, |
|
"learning_rate": 9.998903027388448e-05, |
|
"loss": 1.2728, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.4320691310609698, |
|
"grad_norm": 0.43094709515571594, |
|
"learning_rate": 9.998049881933149e-05, |
|
"loss": 1.2864, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.44007041126580254, |
|
"grad_norm": 0.379719078540802, |
|
"learning_rate": 9.996953051942463e-05, |
|
"loss": 1.2956, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.4480716914706353, |
|
"grad_norm": 0.45424267649650574, |
|
"learning_rate": 9.995612590893352e-05, |
|
"loss": 1.2927, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.45607297167546806, |
|
"grad_norm": 0.34709858894348145, |
|
"learning_rate": 9.99402856414123e-05, |
|
"loss": 1.2801, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.4640742518803008, |
|
"grad_norm": 0.3956676423549652, |
|
"learning_rate": 9.992201048916783e-05, |
|
"loss": 1.2869, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.47207553208513364, |
|
"grad_norm": 0.3462512493133545, |
|
"learning_rate": 9.990130134322201e-05, |
|
"loss": 1.268, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.4800768122899664, |
|
"grad_norm": 0.3835497796535492, |
|
"learning_rate": 9.987815921326842e-05, |
|
"loss": 1.254, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.48807809249479917, |
|
"grad_norm": 0.37989285588264465, |
|
"learning_rate": 9.98525852276229e-05, |
|
"loss": 1.2869, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.4960793726996319, |
|
"grad_norm": 0.3443322777748108, |
|
"learning_rate": 9.982458063316877e-05, |
|
"loss": 1.2933, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.5040806529044647, |
|
"grad_norm": 0.32426556944847107, |
|
"learning_rate": 9.979414679529588e-05, |
|
"loss": 1.2756, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.5120819331092975, |
|
"grad_norm": 0.4362022578716278, |
|
"learning_rate": 9.976128519783405e-05, |
|
"loss": 1.251, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.5200832133141302, |
|
"grad_norm": 0.3969637155532837, |
|
"learning_rate": 9.972599744298086e-05, |
|
"loss": 1.2593, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.528084493518963, |
|
"grad_norm": 0.41199642419815063, |
|
"learning_rate": 9.96882852512233e-05, |
|
"loss": 1.2484, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.5360857737237958, |
|
"grad_norm": 0.3282572329044342, |
|
"learning_rate": 9.964815046125412e-05, |
|
"loss": 1.2559, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.5440870539286286, |
|
"grad_norm": 0.4450869858264923, |
|
"learning_rate": 9.960559502988208e-05, |
|
"loss": 1.2488, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.5520883341334614, |
|
"grad_norm": 0.356630802154541, |
|
"learning_rate": 9.956062103193648e-05, |
|
"loss": 1.2688, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.5600896143382941, |
|
"grad_norm": 0.35085442662239075, |
|
"learning_rate": 9.95132306601661e-05, |
|
"loss": 1.2398, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.5680908945431269, |
|
"grad_norm": 0.4403521716594696, |
|
"learning_rate": 9.946342622513229e-05, |
|
"loss": 1.2442, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.5760921747479597, |
|
"grad_norm": 0.4456493854522705, |
|
"learning_rate": 9.941121015509621e-05, |
|
"loss": 1.2413, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.5840934549527924, |
|
"grad_norm": 0.4137299954891205, |
|
"learning_rate": 9.935658499590059e-05, |
|
"loss": 1.2546, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.5920947351576252, |
|
"grad_norm": 0.3572884798049927, |
|
"learning_rate": 9.929955341084547e-05, |
|
"loss": 1.2533, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.600096015362458, |
|
"grad_norm": 0.42966127395629883, |
|
"learning_rate": 9.924011818055843e-05, |
|
"loss": 1.2389, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.6080972955672908, |
|
"grad_norm": 0.4403945803642273, |
|
"learning_rate": 9.917828220285896e-05, |
|
"loss": 1.2198, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.6160985757721236, |
|
"grad_norm": 0.4667624235153198, |
|
"learning_rate": 9.911404849261726e-05, |
|
"loss": 1.2417, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.6240998559769563, |
|
"grad_norm": 0.6005256772041321, |
|
"learning_rate": 9.904742018160715e-05, |
|
"loss": 1.2006, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.6321011361817891, |
|
"grad_norm": 0.3864199221134186, |
|
"learning_rate": 9.897840051835343e-05, |
|
"loss": 1.2164, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.6401024163866219, |
|
"grad_norm": 0.5411766171455383, |
|
"learning_rate": 9.89069928679735e-05, |
|
"loss": 1.2187, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.6481036965914546, |
|
"grad_norm": 0.38500306010246277, |
|
"learning_rate": 9.883320071201325e-05, |
|
"loss": 1.2363, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.6561049767962874, |
|
"grad_norm": 0.4081592559814453, |
|
"learning_rate": 9.875702764827737e-05, |
|
"loss": 1.2247, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.6641062570011201, |
|
"grad_norm": 0.43678033351898193, |
|
"learning_rate": 9.86784773906539e-05, |
|
"loss": 1.2323, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.672107537205953, |
|
"grad_norm": 0.4319170117378235, |
|
"learning_rate": 9.859755376893314e-05, |
|
"loss": 1.1906, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.6801088174107858, |
|
"grad_norm": 0.4729995131492615, |
|
"learning_rate": 9.851426072862098e-05, |
|
"loss": 1.2098, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.6881100976156185, |
|
"grad_norm": 0.4233373701572418, |
|
"learning_rate": 9.842860233074645e-05, |
|
"loss": 1.204, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.6961113778204513, |
|
"grad_norm": 0.48369497060775757, |
|
"learning_rate": 9.834058275166383e-05, |
|
"loss": 1.2046, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.704112658025284, |
|
"grad_norm": 0.36634331941604614, |
|
"learning_rate": 9.825020628284896e-05, |
|
"loss": 1.2117, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.7121139382301168, |
|
"grad_norm": 0.41213107109069824, |
|
"learning_rate": 9.815747733068995e-05, |
|
"loss": 1.1977, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.7201152184349496, |
|
"grad_norm": 0.37744736671447754, |
|
"learning_rate": 9.80624004162725e-05, |
|
"loss": 1.2372, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.7281164986397823, |
|
"grad_norm": 0.49204540252685547, |
|
"learning_rate": 9.79649801751593e-05, |
|
"loss": 1.2191, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.7361177788446152, |
|
"grad_norm": 0.42844048142433167, |
|
"learning_rate": 9.786522135716415e-05, |
|
"loss": 1.2114, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.7441190590494479, |
|
"grad_norm": 0.5176280736923218, |
|
"learning_rate": 9.776312882612028e-05, |
|
"loss": 1.2041, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.7521203392542807, |
|
"grad_norm": 0.44180744886398315, |
|
"learning_rate": 9.765870755964331e-05, |
|
"loss": 1.2096, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.7601216194591135, |
|
"grad_norm": 0.4751160442829132, |
|
"learning_rate": 9.755196264888846e-05, |
|
"loss": 1.1928, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.7681228996639462, |
|
"grad_norm": 0.4147936999797821, |
|
"learning_rate": 9.744289929830236e-05, |
|
"loss": 1.2183, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.776124179868779, |
|
"grad_norm": 0.45143744349479675, |
|
"learning_rate": 9.733152282536936e-05, |
|
"loss": 1.1798, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.7841254600736117, |
|
"grad_norm": 0.46376484632492065, |
|
"learning_rate": 9.721783866035218e-05, |
|
"loss": 1.1871, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.7921267402784445, |
|
"grad_norm": 0.4516090154647827, |
|
"learning_rate": 9.710185234602727e-05, |
|
"loss": 1.1901, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.8001280204832774, |
|
"grad_norm": 0.4624463617801666, |
|
"learning_rate": 9.698356953741438e-05, |
|
"loss": 1.1848, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.8081293006881101, |
|
"grad_norm": 0.41177043318748474, |
|
"learning_rate": 9.686299600150108e-05, |
|
"loss": 1.1934, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.8161305808929429, |
|
"grad_norm": 0.46030393242836, |
|
"learning_rate": 9.674013761696136e-05, |
|
"loss": 1.1695, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.8241318610977757, |
|
"grad_norm": 0.48872360587120056, |
|
"learning_rate": 9.661500037386921e-05, |
|
"loss": 1.192, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.8321331413026084, |
|
"grad_norm": 0.36913803219795227, |
|
"learning_rate": 9.648759037340638e-05, |
|
"loss": 1.1624, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.8401344215074412, |
|
"grad_norm": 0.5374457836151123, |
|
"learning_rate": 9.635791382756508e-05, |
|
"loss": 1.1847, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.8481357017122739, |
|
"grad_norm": 0.43082547187805176, |
|
"learning_rate": 9.622597705884496e-05, |
|
"loss": 1.1798, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.8561369819171067, |
|
"grad_norm": 0.4757367968559265, |
|
"learning_rate": 9.609178649994499e-05, |
|
"loss": 1.185, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.8641382621219396, |
|
"grad_norm": 0.41058582067489624, |
|
"learning_rate": 9.595534869344974e-05, |
|
"loss": 1.18, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.8721395423267723, |
|
"grad_norm": 0.4382982552051544, |
|
"learning_rate": 9.581667029151039e-05, |
|
"loss": 1.1977, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.8801408225316051, |
|
"grad_norm": 0.44662296772003174, |
|
"learning_rate": 9.567575805552042e-05, |
|
"loss": 1.1841, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.8881421027364378, |
|
"grad_norm": 0.4811069071292877, |
|
"learning_rate": 9.553261885578597e-05, |
|
"loss": 1.1765, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.8961433829412706, |
|
"grad_norm": 0.4336834251880646, |
|
"learning_rate": 9.538725967119087e-05, |
|
"loss": 1.1879, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.9041446631461034, |
|
"grad_norm": 0.44644805788993835, |
|
"learning_rate": 9.523968758885631e-05, |
|
"loss": 1.2058, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.9121459433509361, |
|
"grad_norm": 0.4499703347682953, |
|
"learning_rate": 9.508990980379537e-05, |
|
"loss": 1.1397, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.920147223555769, |
|
"grad_norm": 0.4322624206542969, |
|
"learning_rate": 9.493793361856226e-05, |
|
"loss": 1.1396, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.9281485037606017, |
|
"grad_norm": 0.43696922063827515, |
|
"learning_rate": 9.478376644289613e-05, |
|
"loss": 1.1698, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.9361497839654345, |
|
"grad_norm": 0.4423019587993622, |
|
"learning_rate": 9.462741579335992e-05, |
|
"loss": 1.1365, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.9441510641702673, |
|
"grad_norm": 0.4975908696651459, |
|
"learning_rate": 9.44688892929739e-05, |
|
"loss": 1.1775, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.9521523443751, |
|
"grad_norm": 0.49656620621681213, |
|
"learning_rate": 9.430819467084392e-05, |
|
"loss": 1.141, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.9601536245799328, |
|
"grad_norm": 0.4862751066684723, |
|
"learning_rate": 9.414533976178458e-05, |
|
"loss": 1.144, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.9681549047847655, |
|
"grad_norm": 0.5158004760742188, |
|
"learning_rate": 9.398033250593733e-05, |
|
"loss": 1.1603, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.9761561849895983, |
|
"grad_norm": 0.4406384229660034, |
|
"learning_rate": 9.381318094838321e-05, |
|
"loss": 1.1351, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.9841574651944311, |
|
"grad_norm": 0.4819541573524475, |
|
"learning_rate": 9.36438932387507e-05, |
|
"loss": 1.172, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.9921587453992639, |
|
"grad_norm": 0.4627222418785095, |
|
"learning_rate": 9.347247763081835e-05, |
|
"loss": 1.1587, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 1.0001600256040966, |
|
"grad_norm": 0.4336191713809967, |
|
"learning_rate": 9.329894248211233e-05, |
|
"loss": 1.1324, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 1.0081613058089294, |
|
"grad_norm": 0.5482844114303589, |
|
"learning_rate": 9.312329625349902e-05, |
|
"loss": 1.0647, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 1.0161625860137622, |
|
"grad_norm": 0.48142924904823303, |
|
"learning_rate": 9.294554750877242e-05, |
|
"loss": 1.0681, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 1.024163866218595, |
|
"grad_norm": 0.45703792572021484, |
|
"learning_rate": 9.276570491423664e-05, |
|
"loss": 1.0956, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 1.0321651464234278, |
|
"grad_norm": 0.5305693745613098, |
|
"learning_rate": 9.258377723828338e-05, |
|
"loss": 1.0476, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 1.0401664266282604, |
|
"grad_norm": 0.5061540603637695, |
|
"learning_rate": 9.23997733509644e-05, |
|
"loss": 1.0613, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.0481677068330932, |
|
"grad_norm": 0.4534345865249634, |
|
"learning_rate": 9.221370222355908e-05, |
|
"loss": 1.0604, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 1.056168987037926, |
|
"grad_norm": 0.5327995419502258, |
|
"learning_rate": 9.202557292813697e-05, |
|
"loss": 1.0396, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 1.0641702672427589, |
|
"grad_norm": 0.5349058508872986, |
|
"learning_rate": 9.183539463711555e-05, |
|
"loss": 1.0909, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 1.0721715474475917, |
|
"grad_norm": 0.5274565815925598, |
|
"learning_rate": 9.16431766228129e-05, |
|
"loss": 1.0539, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 1.0801728276524245, |
|
"grad_norm": 0.5548765659332275, |
|
"learning_rate": 9.144892825699577e-05, |
|
"loss": 1.0438, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 1.088174107857257, |
|
"grad_norm": 0.5511574745178223, |
|
"learning_rate": 9.125265901042251e-05, |
|
"loss": 1.091, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 1.09617538806209, |
|
"grad_norm": 0.5760679841041565, |
|
"learning_rate": 9.105437845238141e-05, |
|
"loss": 1.0522, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 1.1041766682669227, |
|
"grad_norm": 0.6349498629570007, |
|
"learning_rate": 9.085409625022411e-05, |
|
"loss": 1.0592, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 1.1121779484717556, |
|
"grad_norm": 0.6243582963943481, |
|
"learning_rate": 9.065182216889423e-05, |
|
"loss": 1.0683, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 1.1201792286765881, |
|
"grad_norm": 0.5389096140861511, |
|
"learning_rate": 9.04475660704513e-05, |
|
"loss": 1.0549, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.128180508881421, |
|
"grad_norm": 0.4826034605503082, |
|
"learning_rate": 9.024133791358995e-05, |
|
"loss": 1.078, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 1.1361817890862538, |
|
"grad_norm": 0.517662525177002, |
|
"learning_rate": 9.003314775315433e-05, |
|
"loss": 1.0857, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 1.1441830692910866, |
|
"grad_norm": 0.5285195112228394, |
|
"learning_rate": 8.982300573964788e-05, |
|
"loss": 1.0449, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 1.1521843494959194, |
|
"grad_norm": 0.47838062047958374, |
|
"learning_rate": 8.961092211873841e-05, |
|
"loss": 1.035, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 1.1601856297007522, |
|
"grad_norm": 0.5411238670349121, |
|
"learning_rate": 8.939690723075865e-05, |
|
"loss": 1.058, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 1.1681869099055848, |
|
"grad_norm": 0.49349942803382874, |
|
"learning_rate": 8.9180971510202e-05, |
|
"loss": 1.0567, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 1.1761881901104176, |
|
"grad_norm": 0.5397252440452576, |
|
"learning_rate": 8.896312548521382e-05, |
|
"loss": 1.0842, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 1.1841894703152505, |
|
"grad_norm": 0.5732976794242859, |
|
"learning_rate": 8.874337977707817e-05, |
|
"loss": 1.0581, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 1.1921907505200833, |
|
"grad_norm": 0.4691354036331177, |
|
"learning_rate": 8.852174509969984e-05, |
|
"loss": 1.0205, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 1.2001920307249159, |
|
"grad_norm": 0.6526204943656921, |
|
"learning_rate": 8.829823225908214e-05, |
|
"loss": 1.0387, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.2081933109297487, |
|
"grad_norm": 0.46403875946998596, |
|
"learning_rate": 8.80728521527999e-05, |
|
"loss": 1.0554, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 1.2161945911345815, |
|
"grad_norm": 0.462835431098938, |
|
"learning_rate": 8.784561576946824e-05, |
|
"loss": 1.0503, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 1.2241958713394143, |
|
"grad_norm": 0.5204878449440002, |
|
"learning_rate": 8.76165341882068e-05, |
|
"loss": 1.0815, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 1.2321971515442471, |
|
"grad_norm": 0.5061752796173096, |
|
"learning_rate": 8.738561857809949e-05, |
|
"loss": 1.0534, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 1.24019843174908, |
|
"grad_norm": 0.5491166710853577, |
|
"learning_rate": 8.715288019765006e-05, |
|
"loss": 1.0625, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 1.2481997119539125, |
|
"grad_norm": 0.4824821650981903, |
|
"learning_rate": 8.691833039423305e-05, |
|
"loss": 1.0473, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 1.2562009921587454, |
|
"grad_norm": 0.5545431971549988, |
|
"learning_rate": 8.668198060354065e-05, |
|
"loss": 1.0432, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 1.2642022723635782, |
|
"grad_norm": 0.5635075569152832, |
|
"learning_rate": 8.644384234902503e-05, |
|
"loss": 1.0473, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 1.272203552568411, |
|
"grad_norm": 0.49760839343070984, |
|
"learning_rate": 8.620392724133661e-05, |
|
"loss": 1.0616, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 1.2802048327732436, |
|
"grad_norm": 0.6228436231613159, |
|
"learning_rate": 8.596224697775795e-05, |
|
"loss": 1.0722, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.2882061129780764, |
|
"grad_norm": 0.5831848978996277, |
|
"learning_rate": 8.571881334163334e-05, |
|
"loss": 1.0581, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 1.2962073931829092, |
|
"grad_norm": 0.4925413131713867, |
|
"learning_rate": 8.547363820179442e-05, |
|
"loss": 1.0429, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 1.304208673387742, |
|
"grad_norm": 0.5401794910430908, |
|
"learning_rate": 8.522673351198143e-05, |
|
"loss": 1.0366, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 1.3122099535925749, |
|
"grad_norm": 0.5081512928009033, |
|
"learning_rate": 8.497811131026046e-05, |
|
"loss": 1.0464, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 1.3202112337974077, |
|
"grad_norm": 0.5704318881034851, |
|
"learning_rate": 8.472778371843641e-05, |
|
"loss": 1.0509, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 1.3282125140022405, |
|
"grad_norm": 0.505493700504303, |
|
"learning_rate": 8.447576294146209e-05, |
|
"loss": 1.0713, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 1.336213794207073, |
|
"grad_norm": 0.5492855906486511, |
|
"learning_rate": 8.422206126684311e-05, |
|
"loss": 1.0385, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 1.344215074411906, |
|
"grad_norm": 0.4849604666233063, |
|
"learning_rate": 8.39666910640388e-05, |
|
"loss": 1.0422, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 1.3522163546167387, |
|
"grad_norm": 0.5335291624069214, |
|
"learning_rate": 8.370966478385907e-05, |
|
"loss": 1.0235, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 1.3602176348215713, |
|
"grad_norm": 0.5804494619369507, |
|
"learning_rate": 8.345099495785753e-05, |
|
"loss": 1.0069, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.3682189150264041, |
|
"grad_norm": 0.5363398194313049, |
|
"learning_rate": 8.319069419772023e-05, |
|
"loss": 1.0402, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 1.376220195231237, |
|
"grad_norm": 0.6018884181976318, |
|
"learning_rate": 8.292877519465105e-05, |
|
"loss": 1.031, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 1.3842214754360698, |
|
"grad_norm": 0.5088986158370972, |
|
"learning_rate": 8.266525071875273e-05, |
|
"loss": 1.0349, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 1.3922227556409026, |
|
"grad_norm": 0.575141191482544, |
|
"learning_rate": 8.240013361840436e-05, |
|
"loss": 1.019, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.4002240358457354, |
|
"grad_norm": 0.5023386478424072, |
|
"learning_rate": 8.21334368196348e-05, |
|
"loss": 1.0428, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 1.4082253160505682, |
|
"grad_norm": 0.5338187217712402, |
|
"learning_rate": 8.186517332549276e-05, |
|
"loss": 1.0188, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 1.4162265962554008, |
|
"grad_norm": 0.5415542721748352, |
|
"learning_rate": 8.159535621541243e-05, |
|
"loss": 1.0172, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 1.4242278764602336, |
|
"grad_norm": 0.4934610426425934, |
|
"learning_rate": 8.13239986445761e-05, |
|
"loss": 1.0249, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 1.4322291566650664, |
|
"grad_norm": 0.5592052936553955, |
|
"learning_rate": 8.10511138432726e-05, |
|
"loss": 1.0295, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 1.4402304368698993, |
|
"grad_norm": 0.5904792547225952, |
|
"learning_rate": 8.07767151162523e-05, |
|
"loss": 1.0427, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.4482317170747319, |
|
"grad_norm": 0.5952058434486389, |
|
"learning_rate": 8.050081584207843e-05, |
|
"loss": 1.0504, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 1.4562329972795647, |
|
"grad_norm": 0.49933668971061707, |
|
"learning_rate": 8.022342947247474e-05, |
|
"loss": 1.0391, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 1.4642342774843975, |
|
"grad_norm": 0.5853399634361267, |
|
"learning_rate": 7.994456953166973e-05, |
|
"loss": 1.0013, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 1.4722355576892303, |
|
"grad_norm": 0.6290839910507202, |
|
"learning_rate": 7.966424961573722e-05, |
|
"loss": 1.0159, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.4802368378940631, |
|
"grad_norm": 0.6153242588043213, |
|
"learning_rate": 7.938248339193346e-05, |
|
"loss": 1.0312, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 1.488238118098896, |
|
"grad_norm": 0.5723234415054321, |
|
"learning_rate": 7.909928459803077e-05, |
|
"loss": 1.0198, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.4962393983037285, |
|
"grad_norm": 0.5766580700874329, |
|
"learning_rate": 7.881466704164774e-05, |
|
"loss": 0.9915, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 1.5042406785085614, |
|
"grad_norm": 0.6211101412773132, |
|
"learning_rate": 7.852864459957605e-05, |
|
"loss": 1.0324, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.5122419587133942, |
|
"grad_norm": 0.5500127673149109, |
|
"learning_rate": 7.824123121710387e-05, |
|
"loss": 1.0316, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 1.5202432389182268, |
|
"grad_norm": 0.6087145209312439, |
|
"learning_rate": 7.795244090733597e-05, |
|
"loss": 1.0334, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.5282445191230596, |
|
"grad_norm": 0.5456790924072266, |
|
"learning_rate": 7.766228775051045e-05, |
|
"loss": 1.0243, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 1.5362457993278924, |
|
"grad_norm": 0.613003671169281, |
|
"learning_rate": 7.737078589331229e-05, |
|
"loss": 0.9954, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.5442470795327252, |
|
"grad_norm": 0.5732637047767639, |
|
"learning_rate": 7.707794954818363e-05, |
|
"loss": 1.0172, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 1.552248359737558, |
|
"grad_norm": 0.5933035612106323, |
|
"learning_rate": 7.678379299263076e-05, |
|
"loss": 1.0049, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 1.5602496399423909, |
|
"grad_norm": 0.5576400756835938, |
|
"learning_rate": 7.648833056852807e-05, |
|
"loss": 0.9918, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 1.5682509201472237, |
|
"grad_norm": 0.4915982782840729, |
|
"learning_rate": 7.619157668141877e-05, |
|
"loss": 0.9949, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 1.5762522003520565, |
|
"grad_norm": 0.5492275357246399, |
|
"learning_rate": 7.58935457998125e-05, |
|
"loss": 1.0233, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 1.584253480556889, |
|
"grad_norm": 0.5720072984695435, |
|
"learning_rate": 7.559425245448006e-05, |
|
"loss": 1.053, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.592254760761722, |
|
"grad_norm": 0.5385164022445679, |
|
"learning_rate": 7.529371123774471e-05, |
|
"loss": 1.0329, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 1.6002560409665545, |
|
"grad_norm": 0.5508076548576355, |
|
"learning_rate": 7.49919368027709e-05, |
|
"loss": 1.0156, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.6082573211713873, |
|
"grad_norm": 0.5111932754516602, |
|
"learning_rate": 7.468894386284976e-05, |
|
"loss": 1.0061, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 1.6162586013762201, |
|
"grad_norm": 0.5652381777763367, |
|
"learning_rate": 7.438474719068173e-05, |
|
"loss": 0.999, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 1.624259881581053, |
|
"grad_norm": 0.5519747138023376, |
|
"learning_rate": 7.407936161765637e-05, |
|
"loss": 1.0129, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 1.6322611617858858, |
|
"grad_norm": 0.5380906462669373, |
|
"learning_rate": 7.377280203312917e-05, |
|
"loss": 0.9991, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 1.6402624419907186, |
|
"grad_norm": 0.635221540927887, |
|
"learning_rate": 7.346508338369563e-05, |
|
"loss": 0.9889, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 1.6482637221955514, |
|
"grad_norm": 0.4963364005088806, |
|
"learning_rate": 7.315622067246254e-05, |
|
"loss": 0.9808, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 1.6562650024003842, |
|
"grad_norm": 0.5576931238174438, |
|
"learning_rate": 7.28462289583165e-05, |
|
"loss": 1.0122, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 1.6642662826052168, |
|
"grad_norm": 0.5357556343078613, |
|
"learning_rate": 7.253512335518964e-05, |
|
"loss": 1.0193, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 1.6722675628100496, |
|
"grad_norm": 0.5757668018341064, |
|
"learning_rate": 7.22229190313228e-05, |
|
"loss": 1.0032, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 1.6802688430148822, |
|
"grad_norm": 0.5368041396141052, |
|
"learning_rate": 7.1909631208526e-05, |
|
"loss": 1.0159, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.688270123219715, |
|
"grad_norm": 0.5617921948432922, |
|
"learning_rate": 7.15952751614362e-05, |
|
"loss": 1.0178, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 1.6962714034245479, |
|
"grad_norm": 0.6092725396156311, |
|
"learning_rate": 7.127986621677263e-05, |
|
"loss": 1.0112, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 1.7042726836293807, |
|
"grad_norm": 0.6592617034912109, |
|
"learning_rate": 7.096341975258953e-05, |
|
"loss": 0.9944, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 1.7122739638342135, |
|
"grad_norm": 0.5574333667755127, |
|
"learning_rate": 7.064595119752636e-05, |
|
"loss": 0.9896, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 1.7202752440390463, |
|
"grad_norm": 0.5796050429344177, |
|
"learning_rate": 7.032747603005552e-05, |
|
"loss": 1.0099, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 1.7282765242438791, |
|
"grad_norm": 0.5359869599342346, |
|
"learning_rate": 7.00080097777278e-05, |
|
"loss": 0.976, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 1.736277804448712, |
|
"grad_norm": 0.533762514591217, |
|
"learning_rate": 6.968756801641523e-05, |
|
"loss": 1.011, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 1.7442790846535445, |
|
"grad_norm": 0.6239184141159058, |
|
"learning_rate": 6.936616636955164e-05, |
|
"loss": 0.9971, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 1.7522803648583773, |
|
"grad_norm": 0.6219179034233093, |
|
"learning_rate": 6.904382050737096e-05, |
|
"loss": 1.0195, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 1.7602816450632102, |
|
"grad_norm": 0.6290085315704346, |
|
"learning_rate": 6.872054614614328e-05, |
|
"loss": 1.0393, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.7682829252680428, |
|
"grad_norm": 0.6031250953674316, |
|
"learning_rate": 6.839635904740846e-05, |
|
"loss": 1.013, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 1.7762842054728756, |
|
"grad_norm": 0.5307977199554443, |
|
"learning_rate": 6.807127501720773e-05, |
|
"loss": 0.9924, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 1.7842854856777084, |
|
"grad_norm": 0.593413233757019, |
|
"learning_rate": 6.774530990531308e-05, |
|
"loss": 0.9848, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 1.7922867658825412, |
|
"grad_norm": 0.5930994749069214, |
|
"learning_rate": 6.741847960445437e-05, |
|
"loss": 0.9891, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 1.800288046087374, |
|
"grad_norm": 0.5843376517295837, |
|
"learning_rate": 6.709080004954465e-05, |
|
"loss": 1.0075, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 1.8082893262922068, |
|
"grad_norm": 0.6507195830345154, |
|
"learning_rate": 6.676228721690301e-05, |
|
"loss": 0.9872, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 1.8162906064970397, |
|
"grad_norm": 0.502108097076416, |
|
"learning_rate": 6.64329571234759e-05, |
|
"loss": 0.9898, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 1.8242918867018723, |
|
"grad_norm": 0.6289730072021484, |
|
"learning_rate": 6.610282582605597e-05, |
|
"loss": 0.9886, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 1.832293166906705, |
|
"grad_norm": 0.5781500935554504, |
|
"learning_rate": 6.577190942049937e-05, |
|
"loss": 1.0211, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 1.840294447111538, |
|
"grad_norm": 0.6279985308647156, |
|
"learning_rate": 6.544022404094092e-05, |
|
"loss": 1.0123, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.8482957273163705, |
|
"grad_norm": 0.6612405180931091, |
|
"learning_rate": 6.51077858590074e-05, |
|
"loss": 0.9978, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 1.8562970075212033, |
|
"grad_norm": 0.6027297973632812, |
|
"learning_rate": 6.477461108302927e-05, |
|
"loss": 0.9786, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 1.8642982877260361, |
|
"grad_norm": 0.6454290151596069, |
|
"learning_rate": 6.444071595725026e-05, |
|
"loss": 0.9692, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 1.872299567930869, |
|
"grad_norm": 0.5385527014732361, |
|
"learning_rate": 6.410611676103542e-05, |
|
"loss": 0.9789, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 1.8803008481357018, |
|
"grad_norm": 0.5324643850326538, |
|
"learning_rate": 6.37708298080774e-05, |
|
"loss": 0.9892, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 1.8883021283405346, |
|
"grad_norm": 0.5693587064743042, |
|
"learning_rate": 6.343487144560108e-05, |
|
"loss": 0.9881, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 1.8963034085453674, |
|
"grad_norm": 0.6578769683837891, |
|
"learning_rate": 6.309825805356654e-05, |
|
"loss": 0.9871, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 1.9043046887502, |
|
"grad_norm": 0.5471733212471008, |
|
"learning_rate": 6.276100604387039e-05, |
|
"loss": 1.0013, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 1.9123059689550328, |
|
"grad_norm": 0.6353397369384766, |
|
"learning_rate": 6.24231318595457e-05, |
|
"loss": 0.9653, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 1.9203072491598656, |
|
"grad_norm": 0.5183307528495789, |
|
"learning_rate": 6.208465197396013e-05, |
|
"loss": 0.9833, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.9283085293646982, |
|
"grad_norm": 0.539452850818634, |
|
"learning_rate": 6.1745582890013e-05, |
|
"loss": 0.9686, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 1.936309809569531, |
|
"grad_norm": 0.681061863899231, |
|
"learning_rate": 6.140594113933042e-05, |
|
"loss": 0.9814, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 1.9443110897743638, |
|
"grad_norm": 0.6232041716575623, |
|
"learning_rate": 6.106574328145949e-05, |
|
"loss": 0.9858, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 1.9523123699791967, |
|
"grad_norm": 0.5435238480567932, |
|
"learning_rate": 6.072500590306076e-05, |
|
"loss": 0.9921, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 1.9603136501840295, |
|
"grad_norm": 0.6599912047386169, |
|
"learning_rate": 6.038374561709964e-05, |
|
"loss": 0.9627, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 1.9683149303888623, |
|
"grad_norm": 0.6491255760192871, |
|
"learning_rate": 6.0041979062036356e-05, |
|
"loss": 1.0074, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 1.976316210593695, |
|
"grad_norm": 0.545872688293457, |
|
"learning_rate": 5.969972290101478e-05, |
|
"loss": 0.9769, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 1.984317490798528, |
|
"grad_norm": 0.5526396632194519, |
|
"learning_rate": 5.935699382104994e-05, |
|
"loss": 0.9853, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 1.9923187710033605, |
|
"grad_norm": 0.5161740183830261, |
|
"learning_rate": 5.901380853221448e-05, |
|
"loss": 1.0122, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 2.000320051208193, |
|
"grad_norm": 0.501007080078125, |
|
"learning_rate": 5.867018376682396e-05, |
|
"loss": 1.0062, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.008321331413026, |
|
"grad_norm": 0.6925072073936462, |
|
"learning_rate": 5.832613627862097e-05, |
|
"loss": 0.8524, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 2.0163226116178588, |
|
"grad_norm": 0.6264834403991699, |
|
"learning_rate": 5.798168284195837e-05, |
|
"loss": 0.8474, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 2.0243238918226916, |
|
"grad_norm": 0.543258786201477, |
|
"learning_rate": 5.7636840250981405e-05, |
|
"loss": 0.819, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 2.0323251720275244, |
|
"grad_norm": 0.5597353577613831, |
|
"learning_rate": 5.729162531880892e-05, |
|
"loss": 0.8635, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 2.040326452232357, |
|
"grad_norm": 0.8554184436798096, |
|
"learning_rate": 5.694605487671357e-05, |
|
"loss": 0.8283, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 2.04832773243719, |
|
"grad_norm": 0.5572032332420349, |
|
"learning_rate": 5.6600145773301206e-05, |
|
"loss": 0.8744, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 2.056329012642023, |
|
"grad_norm": 0.6553796529769897, |
|
"learning_rate": 5.625391487368949e-05, |
|
"loss": 0.8572, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 2.0643302928468557, |
|
"grad_norm": 0.6666269302368164, |
|
"learning_rate": 5.590737905868551e-05, |
|
"loss": 0.8625, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 2.0723315730516885, |
|
"grad_norm": 0.6420098543167114, |
|
"learning_rate": 5.5560555223962784e-05, |
|
"loss": 0.8571, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 2.080332853256521, |
|
"grad_norm": 0.6100978255271912, |
|
"learning_rate": 5.5213460279237496e-05, |
|
"loss": 0.8579, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.0883341334613537, |
|
"grad_norm": 0.5606010556221008, |
|
"learning_rate": 5.4866111147444085e-05, |
|
"loss": 0.8715, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 2.0963354136661865, |
|
"grad_norm": 0.6860626339912415, |
|
"learning_rate": 5.4518524763910076e-05, |
|
"loss": 0.8513, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 2.1043366938710193, |
|
"grad_norm": 0.7012873291969299, |
|
"learning_rate": 5.4170718075530404e-05, |
|
"loss": 0.8454, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 2.112337974075852, |
|
"grad_norm": 0.6773386597633362, |
|
"learning_rate": 5.382270803994126e-05, |
|
"loss": 0.881, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 2.120339254280685, |
|
"grad_norm": 0.7174808382987976, |
|
"learning_rate": 5.347451162469309e-05, |
|
"loss": 0.858, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 2.1283405344855177, |
|
"grad_norm": 0.6617708206176758, |
|
"learning_rate": 5.3126145806423575e-05, |
|
"loss": 0.8531, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 2.1363418146903506, |
|
"grad_norm": 0.7019938826560974, |
|
"learning_rate": 5.277762757002971e-05, |
|
"loss": 0.8665, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 2.1443430948951834, |
|
"grad_norm": 0.6727155447006226, |
|
"learning_rate": 5.2428973907839804e-05, |
|
"loss": 0.8528, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 2.152344375100016, |
|
"grad_norm": 0.6648008227348328, |
|
"learning_rate": 5.208020181878499e-05, |
|
"loss": 0.8399, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 2.160345655304849, |
|
"grad_norm": 0.6274258494377136, |
|
"learning_rate": 5.173132830757039e-05, |
|
"loss": 0.8662, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.1683469355096814, |
|
"grad_norm": 0.6117835640907288, |
|
"learning_rate": 5.138237038384608e-05, |
|
"loss": 0.8503, |
|
"step": 13550 |
|
}, |
|
{ |
|
"epoch": 2.176348215714514, |
|
"grad_norm": 0.6691561341285706, |
|
"learning_rate": 5.103334506137772e-05, |
|
"loss": 0.8645, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 2.184349495919347, |
|
"grad_norm": 0.5877872109413147, |
|
"learning_rate": 5.0684269357217084e-05, |
|
"loss": 0.8555, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 2.19235077612418, |
|
"grad_norm": 0.717145562171936, |
|
"learning_rate": 5.033516029087231e-05, |
|
"loss": 0.8799, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 2.2003520563290127, |
|
"grad_norm": 0.6587306261062622, |
|
"learning_rate": 4.99860348834782e-05, |
|
"loss": 0.88, |
|
"step": 13750 |
|
}, |
|
{ |
|
"epoch": 2.2083533365338455, |
|
"grad_norm": 0.6128435134887695, |
|
"learning_rate": 4.963691015696624e-05, |
|
"loss": 0.853, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 2.2163546167386783, |
|
"grad_norm": 0.5783178210258484, |
|
"learning_rate": 4.928780313323473e-05, |
|
"loss": 0.8863, |
|
"step": 13850 |
|
}, |
|
{ |
|
"epoch": 2.224355896943511, |
|
"grad_norm": 0.6364635825157166, |
|
"learning_rate": 4.893873083331882e-05, |
|
"loss": 0.8378, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 2.232357177148344, |
|
"grad_norm": 0.7266676425933838, |
|
"learning_rate": 4.8589710276560744e-05, |
|
"loss": 0.8647, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 2.2403584573531763, |
|
"grad_norm": 0.6661261916160583, |
|
"learning_rate": 4.8240758479779884e-05, |
|
"loss": 0.835, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.248359737558009, |
|
"grad_norm": 0.7814577221870422, |
|
"learning_rate": 4.789189245644318e-05, |
|
"loss": 0.8379, |
|
"step": 14050 |
|
}, |
|
{ |
|
"epoch": 2.256361017762842, |
|
"grad_norm": 0.6463228464126587, |
|
"learning_rate": 4.754312921583564e-05, |
|
"loss": 0.8628, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 2.2643622979676747, |
|
"grad_norm": 0.7221981883049011, |
|
"learning_rate": 4.719448576223096e-05, |
|
"loss": 0.8565, |
|
"step": 14150 |
|
}, |
|
{ |
|
"epoch": 2.2723635781725076, |
|
"grad_norm": 0.6834704279899597, |
|
"learning_rate": 4.684597909406253e-05, |
|
"loss": 0.8776, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 2.2803648583773404, |
|
"grad_norm": 0.6285929083824158, |
|
"learning_rate": 4.649762620309466e-05, |
|
"loss": 0.8356, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 2.288366138582173, |
|
"grad_norm": 0.6223785281181335, |
|
"learning_rate": 4.61494440735941e-05, |
|
"loss": 0.834, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 2.296367418787006, |
|
"grad_norm": 0.7635972499847412, |
|
"learning_rate": 4.580144968150192e-05, |
|
"loss": 0.8603, |
|
"step": 14350 |
|
}, |
|
{ |
|
"epoch": 2.304368698991839, |
|
"grad_norm": 0.6658472418785095, |
|
"learning_rate": 4.5453659993605944e-05, |
|
"loss": 0.8393, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 2.3123699791966716, |
|
"grad_norm": 0.6321608424186707, |
|
"learning_rate": 4.510609196671345e-05, |
|
"loss": 0.8416, |
|
"step": 14450 |
|
}, |
|
{ |
|
"epoch": 2.3203712594015045, |
|
"grad_norm": 0.6526052355766296, |
|
"learning_rate": 4.4758762546824364e-05, |
|
"loss": 0.8696, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.328372539606337, |
|
"grad_norm": 0.6651396751403809, |
|
"learning_rate": 4.441168866830516e-05, |
|
"loss": 0.8619, |
|
"step": 14550 |
|
}, |
|
{ |
|
"epoch": 2.3363738198111696, |
|
"grad_norm": 0.6737195253372192, |
|
"learning_rate": 4.406488725306317e-05, |
|
"loss": 0.8447, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 2.3443751000160025, |
|
"grad_norm": 0.5813534259796143, |
|
"learning_rate": 4.3718375209721505e-05, |
|
"loss": 0.8468, |
|
"step": 14650 |
|
}, |
|
{ |
|
"epoch": 2.3523763802208353, |
|
"grad_norm": 0.7838544249534607, |
|
"learning_rate": 4.337216943279464e-05, |
|
"loss": 0.8344, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 2.360377660425668, |
|
"grad_norm": 0.6356272101402283, |
|
"learning_rate": 4.3026286801864854e-05, |
|
"loss": 0.8699, |
|
"step": 14750 |
|
}, |
|
{ |
|
"epoch": 2.368378940630501, |
|
"grad_norm": 0.6201728582382202, |
|
"learning_rate": 4.268074418075903e-05, |
|
"loss": 0.8515, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 2.3763802208353337, |
|
"grad_norm": 0.6424387693405151, |
|
"learning_rate": 4.233555841672663e-05, |
|
"loss": 0.8429, |
|
"step": 14850 |
|
}, |
|
{ |
|
"epoch": 2.3843815010401666, |
|
"grad_norm": 0.6518192291259766, |
|
"learning_rate": 4.1990746339618216e-05, |
|
"loss": 0.8666, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 2.3923827812449994, |
|
"grad_norm": 0.7236935496330261, |
|
"learning_rate": 4.164632476106484e-05, |
|
"loss": 0.8478, |
|
"step": 14950 |
|
}, |
|
{ |
|
"epoch": 2.4003840614498317, |
|
"grad_norm": 0.692077100276947, |
|
"learning_rate": 4.13023104736585e-05, |
|
"loss": 0.8756, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.4083853416546646, |
|
"grad_norm": 0.6063551306724548, |
|
"learning_rate": 4.095872025013333e-05, |
|
"loss": 0.8221, |
|
"step": 15050 |
|
}, |
|
{ |
|
"epoch": 2.4163866218594974, |
|
"grad_norm": 0.5967947840690613, |
|
"learning_rate": 4.0615570842547815e-05, |
|
"loss": 0.8731, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 2.42438790206433, |
|
"grad_norm": 0.7256248593330383, |
|
"learning_rate": 4.0272878981468045e-05, |
|
"loss": 0.8614, |
|
"step": 15150 |
|
}, |
|
{ |
|
"epoch": 2.432389182269163, |
|
"grad_norm": 0.6148253083229065, |
|
"learning_rate": 3.9930661375152045e-05, |
|
"loss": 0.8337, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 2.440390462473996, |
|
"grad_norm": 0.6977733373641968, |
|
"learning_rate": 3.958893470873509e-05, |
|
"loss": 0.8235, |
|
"step": 15250 |
|
}, |
|
{ |
|
"epoch": 2.4483917426788286, |
|
"grad_norm": 0.6713569164276123, |
|
"learning_rate": 3.924771564341621e-05, |
|
"loss": 0.8301, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 2.4563930228836615, |
|
"grad_norm": 0.637150228023529, |
|
"learning_rate": 3.890702081564593e-05, |
|
"loss": 0.8355, |
|
"step": 15350 |
|
}, |
|
{ |
|
"epoch": 2.4643943030884943, |
|
"grad_norm": 0.6515012383460999, |
|
"learning_rate": 3.856686683631502e-05, |
|
"loss": 0.8547, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 2.472395583293327, |
|
"grad_norm": 0.6041115522384644, |
|
"learning_rate": 3.822727028994471e-05, |
|
"loss": 0.8421, |
|
"step": 15450 |
|
}, |
|
{ |
|
"epoch": 2.48039686349816, |
|
"grad_norm": 0.6472122073173523, |
|
"learning_rate": 3.788824773387811e-05, |
|
"loss": 0.838, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.4883981437029927, |
|
"grad_norm": 0.7198679447174072, |
|
"learning_rate": 3.754981569747287e-05, |
|
"loss": 0.8213, |
|
"step": 15550 |
|
}, |
|
{ |
|
"epoch": 2.496399423907825, |
|
"grad_norm": 0.6885024309158325, |
|
"learning_rate": 3.721199068129527e-05, |
|
"loss": 0.8354, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 2.504400704112658, |
|
"grad_norm": 0.6485710740089417, |
|
"learning_rate": 3.6874789156315835e-05, |
|
"loss": 0.8633, |
|
"step": 15650 |
|
}, |
|
{ |
|
"epoch": 2.5124019843174907, |
|
"grad_norm": 0.6844043135643005, |
|
"learning_rate": 3.6538227563106164e-05, |
|
"loss": 0.8105, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 2.5204032645223235, |
|
"grad_norm": 0.8702924847602844, |
|
"learning_rate": 3.6202322311037374e-05, |
|
"loss": 0.8121, |
|
"step": 15750 |
|
}, |
|
{ |
|
"epoch": 2.5284045447271564, |
|
"grad_norm": 0.7209028005599976, |
|
"learning_rate": 3.586708977748012e-05, |
|
"loss": 0.8758, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 2.536405824931989, |
|
"grad_norm": 0.6202672123908997, |
|
"learning_rate": 3.553254630700597e-05, |
|
"loss": 0.8474, |
|
"step": 15850 |
|
}, |
|
{ |
|
"epoch": 2.544407105136822, |
|
"grad_norm": 0.6669834852218628, |
|
"learning_rate": 3.5198708210590626e-05, |
|
"loss": 0.8482, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 2.552408385341655, |
|
"grad_norm": 0.6584005951881409, |
|
"learning_rate": 3.486559176481865e-05, |
|
"loss": 0.8416, |
|
"step": 15950 |
|
}, |
|
{ |
|
"epoch": 2.560409665546487, |
|
"grad_norm": 0.5252745747566223, |
|
"learning_rate": 3.4533213211089825e-05, |
|
"loss": 0.8401, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.56841094575132, |
|
"grad_norm": 0.6504681706428528, |
|
"learning_rate": 3.4201588754827314e-05, |
|
"loss": 0.8486, |
|
"step": 16050 |
|
}, |
|
{ |
|
"epoch": 2.576412225956153, |
|
"grad_norm": 0.7693225741386414, |
|
"learning_rate": 3.387073456468761e-05, |
|
"loss": 0.845, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 2.5844135061609856, |
|
"grad_norm": 0.6965412497520447, |
|
"learning_rate": 3.354066677177214e-05, |
|
"loss": 0.8568, |
|
"step": 16150 |
|
}, |
|
{ |
|
"epoch": 2.5924147863658185, |
|
"grad_norm": 0.6364871859550476, |
|
"learning_rate": 3.321140146884081e-05, |
|
"loss": 0.8693, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 2.6004160665706513, |
|
"grad_norm": 0.7464070916175842, |
|
"learning_rate": 3.288295470952737e-05, |
|
"loss": 0.8566, |
|
"step": 16250 |
|
}, |
|
{ |
|
"epoch": 2.608417346775484, |
|
"grad_norm": 0.6804706454277039, |
|
"learning_rate": 3.255534250755678e-05, |
|
"loss": 0.8708, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 2.616418626980317, |
|
"grad_norm": 0.6775168776512146, |
|
"learning_rate": 3.222858083596431e-05, |
|
"loss": 0.8558, |
|
"step": 16350 |
|
}, |
|
{ |
|
"epoch": 2.6244199071851497, |
|
"grad_norm": 0.7037110924720764, |
|
"learning_rate": 3.190268562631688e-05, |
|
"loss": 0.8313, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 2.6324211873899825, |
|
"grad_norm": 0.5500233173370361, |
|
"learning_rate": 3.157767276793626e-05, |
|
"loss": 0.7998, |
|
"step": 16450 |
|
}, |
|
{ |
|
"epoch": 2.6404224675948154, |
|
"grad_norm": 0.7002319097518921, |
|
"learning_rate": 3.125355810712435e-05, |
|
"loss": 0.8247, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.648423747799648, |
|
"grad_norm": 0.7857663631439209, |
|
"learning_rate": 3.093035744639061e-05, |
|
"loss": 0.8318, |
|
"step": 16550 |
|
}, |
|
{ |
|
"epoch": 2.656425028004481, |
|
"grad_norm": 0.7866588830947876, |
|
"learning_rate": 3.06080865436816e-05, |
|
"loss": 0.814, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 2.6644263082093134, |
|
"grad_norm": 0.6732119917869568, |
|
"learning_rate": 3.0286761111612626e-05, |
|
"loss": 0.8527, |
|
"step": 16650 |
|
}, |
|
{ |
|
"epoch": 2.672427588414146, |
|
"grad_norm": 0.6847143769264221, |
|
"learning_rate": 2.9966396816701725e-05, |
|
"loss": 0.8012, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 2.680428868618979, |
|
"grad_norm": 0.6862397193908691, |
|
"learning_rate": 2.964700927860581e-05, |
|
"loss": 0.817, |
|
"step": 16750 |
|
}, |
|
{ |
|
"epoch": 2.688430148823812, |
|
"grad_norm": 0.6373656988143921, |
|
"learning_rate": 2.9328614069359128e-05, |
|
"loss": 0.8625, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 2.6964314290286446, |
|
"grad_norm": 0.7099259495735168, |
|
"learning_rate": 2.9011226712613937e-05, |
|
"loss": 0.8445, |
|
"step": 16850 |
|
}, |
|
{ |
|
"epoch": 2.7044327092334774, |
|
"grad_norm": 0.6120832562446594, |
|
"learning_rate": 2.8694862682883866e-05, |
|
"loss": 0.7982, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 2.7124339894383103, |
|
"grad_norm": 0.8442961573600769, |
|
"learning_rate": 2.8379537404789124e-05, |
|
"loss": 0.8307, |
|
"step": 16950 |
|
}, |
|
{ |
|
"epoch": 2.7204352696431426, |
|
"grad_norm": 0.6093111634254456, |
|
"learning_rate": 2.8065266252304712e-05, |
|
"loss": 0.7811, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.7284365498479755, |
|
"grad_norm": 0.6981213092803955, |
|
"learning_rate": 2.775206454801079e-05, |
|
"loss": 0.8533, |
|
"step": 17050 |
|
}, |
|
{ |
|
"epoch": 2.7364378300528083, |
|
"grad_norm": 0.696796178817749, |
|
"learning_rate": 2.7439947562345546e-05, |
|
"loss": 0.8182, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 2.744439110257641, |
|
"grad_norm": 0.6803203225135803, |
|
"learning_rate": 2.7128930512860658e-05, |
|
"loss": 0.8391, |
|
"step": 17150 |
|
}, |
|
{ |
|
"epoch": 2.752440390462474, |
|
"grad_norm": 0.6710325479507446, |
|
"learning_rate": 2.6819028563479505e-05, |
|
"loss": 0.8229, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 2.7604416706673067, |
|
"grad_norm": 0.7354364395141602, |
|
"learning_rate": 2.6510256823757667e-05, |
|
"loss": 0.8121, |
|
"step": 17250 |
|
}, |
|
{ |
|
"epoch": 2.7684429508721395, |
|
"grad_norm": 0.7637900710105896, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.84, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 2.7764442310769724, |
|
"grad_norm": 0.587841808795929, |
|
"learning_rate": 2.589616413525824e-05, |
|
"loss": 0.8117, |
|
"step": 17350 |
|
}, |
|
{ |
|
"epoch": 2.784445511281805, |
|
"grad_norm": 0.667797327041626, |
|
"learning_rate": 2.559087312713651e-05, |
|
"loss": 0.8131, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 2.792446791486638, |
|
"grad_norm": 0.5983754992485046, |
|
"learning_rate": 2.5286772208526027e-05, |
|
"loss": 0.8309, |
|
"step": 17450 |
|
}, |
|
{ |
|
"epoch": 2.800448071691471, |
|
"grad_norm": 0.7884683012962341, |
|
"learning_rate": 2.4983876206147776e-05, |
|
"loss": 0.8208, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.8084493518963036, |
|
"grad_norm": 0.6957002878189087, |
|
"learning_rate": 2.4682199887975938e-05, |
|
"loss": 0.8699, |
|
"step": 17550 |
|
}, |
|
{ |
|
"epoch": 2.8164506321011364, |
|
"grad_norm": 0.5889870524406433, |
|
"learning_rate": 2.438175796251786e-05, |
|
"loss": 0.8282, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 2.824451912305969, |
|
"grad_norm": 0.7357444167137146, |
|
"learning_rate": 2.4082565078096935e-05, |
|
"loss": 0.8025, |
|
"step": 17650 |
|
}, |
|
{ |
|
"epoch": 2.8324531925108016, |
|
"grad_norm": 0.8791075348854065, |
|
"learning_rate": 2.3784635822138424e-05, |
|
"loss": 0.8338, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 2.8404544727156344, |
|
"grad_norm": 0.6486666202545166, |
|
"learning_rate": 2.348798472045819e-05, |
|
"loss": 0.8221, |
|
"step": 17750 |
|
}, |
|
{ |
|
"epoch": 2.8484557529204673, |
|
"grad_norm": 0.6927144527435303, |
|
"learning_rate": 2.3192626236554516e-05, |
|
"loss": 0.8249, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 2.8564570331253, |
|
"grad_norm": 0.7874731421470642, |
|
"learning_rate": 2.2898574770902914e-05, |
|
"loss": 0.8267, |
|
"step": 17850 |
|
}, |
|
{ |
|
"epoch": 2.864458313330133, |
|
"grad_norm": 0.6948744654655457, |
|
"learning_rate": 2.260584466025401e-05, |
|
"loss": 0.8139, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 2.8724595935349657, |
|
"grad_norm": 0.6847252249717712, |
|
"learning_rate": 2.231445017693454e-05, |
|
"loss": 0.8432, |
|
"step": 17950 |
|
}, |
|
{ |
|
"epoch": 2.8804608737397985, |
|
"grad_norm": 0.6781948804855347, |
|
"learning_rate": 2.2024405528151547e-05, |
|
"loss": 0.8298, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.888462153944631, |
|
"grad_norm": 0.6336878538131714, |
|
"learning_rate": 2.1735724855299567e-05, |
|
"loss": 0.81, |
|
"step": 18050 |
|
}, |
|
{ |
|
"epoch": 2.8964634341494637, |
|
"grad_norm": 0.6111786365509033, |
|
"learning_rate": 2.1448422233271254e-05, |
|
"loss": 0.7977, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 2.9044647143542965, |
|
"grad_norm": 0.7334992289543152, |
|
"learning_rate": 2.116251166977118e-05, |
|
"loss": 0.8221, |
|
"step": 18150 |
|
}, |
|
{ |
|
"epoch": 2.9124659945591294, |
|
"grad_norm": 0.8408239483833313, |
|
"learning_rate": 2.0878007104632775e-05, |
|
"loss": 0.8597, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 2.920467274763962, |
|
"grad_norm": 0.6892300248146057, |
|
"learning_rate": 2.059492240913866e-05, |
|
"loss": 0.8088, |
|
"step": 18250 |
|
}, |
|
{ |
|
"epoch": 2.928468554968795, |
|
"grad_norm": 0.5887216925621033, |
|
"learning_rate": 2.0313271385344522e-05, |
|
"loss": 0.8207, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 2.936469835173628, |
|
"grad_norm": 0.6477887630462646, |
|
"learning_rate": 2.0033067765406004e-05, |
|
"loss": 0.839, |
|
"step": 18350 |
|
}, |
|
{ |
|
"epoch": 2.9444711153784606, |
|
"grad_norm": 0.7348781228065491, |
|
"learning_rate": 1.9754325210909174e-05, |
|
"loss": 0.8412, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 2.9524723955832934, |
|
"grad_norm": 0.6994746923446655, |
|
"learning_rate": 1.947705731220462e-05, |
|
"loss": 0.8069, |
|
"step": 18450 |
|
}, |
|
{ |
|
"epoch": 2.9604736757881263, |
|
"grad_norm": 0.6778689026832581, |
|
"learning_rate": 1.920127758774466e-05, |
|
"loss": 0.8119, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.968474955992959, |
|
"grad_norm": 0.7225350737571716, |
|
"learning_rate": 1.8926999483424313e-05, |
|
"loss": 0.8504, |
|
"step": 18550 |
|
}, |
|
{ |
|
"epoch": 2.976476236197792, |
|
"grad_norm": 0.8543927073478699, |
|
"learning_rate": 1.8654236371925727e-05, |
|
"loss": 0.8124, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 2.9844775164026243, |
|
"grad_norm": 0.6733004450798035, |
|
"learning_rate": 1.8383001552066164e-05, |
|
"loss": 0.8299, |
|
"step": 18650 |
|
}, |
|
{ |
|
"epoch": 2.992478796607457, |
|
"grad_norm": 0.7595524787902832, |
|
"learning_rate": 1.8113308248149635e-05, |
|
"loss": 0.8177, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 3.00048007681229, |
|
"grad_norm": 0.6430355310440063, |
|
"learning_rate": 1.784516960932211e-05, |
|
"loss": 0.8311, |
|
"step": 18750 |
|
}, |
|
{ |
|
"epoch": 3.0084813570171227, |
|
"grad_norm": 0.6314593553543091, |
|
"learning_rate": 1.7578598708930428e-05, |
|
"loss": 0.7241, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 3.0164826372219555, |
|
"grad_norm": 0.6709548234939575, |
|
"learning_rate": 1.7313608543884868e-05, |
|
"loss": 0.7459, |
|
"step": 18850 |
|
}, |
|
{ |
|
"epoch": 3.0244839174267883, |
|
"grad_norm": 0.6981055736541748, |
|
"learning_rate": 1.7050212034025575e-05, |
|
"loss": 0.7399, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 3.032485197631621, |
|
"grad_norm": 0.7344752550125122, |
|
"learning_rate": 1.6788422021492472e-05, |
|
"loss": 0.7197, |
|
"step": 18950 |
|
}, |
|
{ |
|
"epoch": 3.040486477836454, |
|
"grad_norm": 0.7181177139282227, |
|
"learning_rate": 1.6528251270099256e-05, |
|
"loss": 0.7161, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.048487758041287, |
|
"grad_norm": 0.5588538646697998, |
|
"learning_rate": 1.6269712464711105e-05, |
|
"loss": 0.7488, |
|
"step": 19050 |
|
}, |
|
{ |
|
"epoch": 3.056489038246119, |
|
"grad_norm": 0.7480596303939819, |
|
"learning_rate": 1.6012818210626075e-05, |
|
"loss": 0.712, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 3.064490318450952, |
|
"grad_norm": 0.5872476696968079, |
|
"learning_rate": 1.5757581032960638e-05, |
|
"loss": 0.7154, |
|
"step": 19150 |
|
}, |
|
{ |
|
"epoch": 3.072491598655785, |
|
"grad_norm": 0.6517592072486877, |
|
"learning_rate": 1.550401337603902e-05, |
|
"loss": 0.7134, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 3.0804928788606176, |
|
"grad_norm": 0.6740624308586121, |
|
"learning_rate": 1.5252127602786397e-05, |
|
"loss": 0.7446, |
|
"step": 19250 |
|
}, |
|
{ |
|
"epoch": 3.0884941590654504, |
|
"grad_norm": 0.6558243632316589, |
|
"learning_rate": 1.5001935994126104e-05, |
|
"loss": 0.7354, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 3.0964954392702833, |
|
"grad_norm": 0.6562448740005493, |
|
"learning_rate": 1.4753450748380953e-05, |
|
"loss": 0.7035, |
|
"step": 19350 |
|
}, |
|
{ |
|
"epoch": 3.104496719475116, |
|
"grad_norm": 0.7197704315185547, |
|
"learning_rate": 1.4506683980678503e-05, |
|
"loss": 0.7313, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 3.112497999679949, |
|
"grad_norm": 0.7322149872779846, |
|
"learning_rate": 1.4261647722360278e-05, |
|
"loss": 0.7357, |
|
"step": 19450 |
|
}, |
|
{ |
|
"epoch": 3.1204992798847817, |
|
"grad_norm": 0.7477754354476929, |
|
"learning_rate": 1.4018353920395193e-05, |
|
"loss": 0.7334, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.1285005600896145, |
|
"grad_norm": 0.5789663791656494, |
|
"learning_rate": 1.3776814436797176e-05, |
|
"loss": 0.7435, |
|
"step": 19550 |
|
}, |
|
{ |
|
"epoch": 3.1365018402944473, |
|
"grad_norm": 0.6711790561676025, |
|
"learning_rate": 1.3537041048046695e-05, |
|
"loss": 0.713, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 3.1445031204992797, |
|
"grad_norm": 0.6365526914596558, |
|
"learning_rate": 1.32990454445166e-05, |
|
"loss": 0.7679, |
|
"step": 19650 |
|
}, |
|
{ |
|
"epoch": 3.1525044007041125, |
|
"grad_norm": 0.6711440682411194, |
|
"learning_rate": 1.3062839229902263e-05, |
|
"loss": 0.7199, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 3.1605056809089453, |
|
"grad_norm": 0.7575156688690186, |
|
"learning_rate": 1.2828433920655687e-05, |
|
"loss": 0.7052, |
|
"step": 19750 |
|
}, |
|
{ |
|
"epoch": 3.168506961113778, |
|
"grad_norm": 0.6248161792755127, |
|
"learning_rate": 1.2595840945424093e-05, |
|
"loss": 0.7129, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 3.176508241318611, |
|
"grad_norm": 0.7342621684074402, |
|
"learning_rate": 1.2365071644492682e-05, |
|
"loss": 0.7187, |
|
"step": 19850 |
|
}, |
|
{ |
|
"epoch": 3.184509521523444, |
|
"grad_norm": 0.6555837988853455, |
|
"learning_rate": 1.2136137269231723e-05, |
|
"loss": 0.7378, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 3.1925108017282766, |
|
"grad_norm": 0.7092923521995544, |
|
"learning_rate": 1.1909048981547998e-05, |
|
"loss": 0.7259, |
|
"step": 19950 |
|
}, |
|
{ |
|
"epoch": 3.2005120819331094, |
|
"grad_norm": 0.6612015962600708, |
|
"learning_rate": 1.1683817853340595e-05, |
|
"loss": 0.7168, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 3.2085133621379422, |
|
"grad_norm": 0.7098072171211243, |
|
"learning_rate": 1.1460454865961056e-05, |
|
"loss": 0.7226, |
|
"step": 20050 |
|
}, |
|
{ |
|
"epoch": 3.2165146423427746, |
|
"grad_norm": 0.7153401970863342, |
|
"learning_rate": 1.1238970909677993e-05, |
|
"loss": 0.7304, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 3.2245159225476074, |
|
"grad_norm": 0.6184985637664795, |
|
"learning_rate": 1.1019376783146174e-05, |
|
"loss": 0.7113, |
|
"step": 20150 |
|
}, |
|
{ |
|
"epoch": 3.2325172027524403, |
|
"grad_norm": 0.658746063709259, |
|
"learning_rate": 1.080168319287989e-05, |
|
"loss": 0.7545, |
|
"step": 20200 |
|
}, |
|
{ |
|
"epoch": 3.240518482957273, |
|
"grad_norm": 0.6574704647064209, |
|
"learning_rate": 1.0585900752731077e-05, |
|
"loss": 0.7103, |
|
"step": 20250 |
|
}, |
|
{ |
|
"epoch": 3.248519763162106, |
|
"grad_norm": 0.7436890006065369, |
|
"learning_rate": 1.0372039983371818e-05, |
|
"loss": 0.7335, |
|
"step": 20300 |
|
}, |
|
{ |
|
"epoch": 3.2565210433669387, |
|
"grad_norm": 0.7010487914085388, |
|
"learning_rate": 1.0160111311781284e-05, |
|
"loss": 0.7463, |
|
"step": 20350 |
|
}, |
|
{ |
|
"epoch": 3.2645223235717715, |
|
"grad_norm": 0.7422741651535034, |
|
"learning_rate": 9.950125070737476e-06, |
|
"loss": 0.7253, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 3.2725236037766043, |
|
"grad_norm": 0.6463279724121094, |
|
"learning_rate": 9.742091498313426e-06, |
|
"loss": 0.7334, |
|
"step": 20450 |
|
}, |
|
{ |
|
"epoch": 3.280524883981437, |
|
"grad_norm": 0.7407013177871704, |
|
"learning_rate": 9.536020737377993e-06, |
|
"loss": 0.7104, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 3.28852616418627, |
|
"grad_norm": 0.6367645263671875, |
|
"learning_rate": 9.331922835101282e-06, |
|
"loss": 0.7105, |
|
"step": 20550 |
|
}, |
|
{ |
|
"epoch": 3.296527444391103, |
|
"grad_norm": 0.7848844528198242, |
|
"learning_rate": 9.129807742464957e-06, |
|
"loss": 0.7155, |
|
"step": 20600 |
|
}, |
|
{ |
|
"epoch": 3.304528724595935, |
|
"grad_norm": 0.6976688504219055, |
|
"learning_rate": 8.929685313776903e-06, |
|
"loss": 0.7363, |
|
"step": 20650 |
|
}, |
|
{ |
|
"epoch": 3.312530004800768, |
|
"grad_norm": 0.7478333711624146, |
|
"learning_rate": 8.731565306190852e-06, |
|
"loss": 0.7612, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 3.320531285005601, |
|
"grad_norm": 0.6891793012619019, |
|
"learning_rate": 8.535457379230649e-06, |
|
"loss": 0.7221, |
|
"step": 20750 |
|
}, |
|
{ |
|
"epoch": 3.3285325652104336, |
|
"grad_norm": 0.6285429000854492, |
|
"learning_rate": 8.341371094319289e-06, |
|
"loss": 0.7554, |
|
"step": 20800 |
|
}, |
|
{ |
|
"epoch": 3.3365338454152664, |
|
"grad_norm": 0.7216114401817322, |
|
"learning_rate": 8.149315914312733e-06, |
|
"loss": 0.7297, |
|
"step": 20850 |
|
}, |
|
{ |
|
"epoch": 3.3445351256200992, |
|
"grad_norm": 0.6681531071662903, |
|
"learning_rate": 7.959301203038566e-06, |
|
"loss": 0.7283, |
|
"step": 20900 |
|
}, |
|
{ |
|
"epoch": 3.352536405824932, |
|
"grad_norm": 0.6501981019973755, |
|
"learning_rate": 7.771336224839425e-06, |
|
"loss": 0.723, |
|
"step": 20950 |
|
}, |
|
{ |
|
"epoch": 3.360537686029765, |
|
"grad_norm": 0.7048876285552979, |
|
"learning_rate": 7.585430144121319e-06, |
|
"loss": 0.7239, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 3.3685389662345977, |
|
"grad_norm": 0.6659078001976013, |
|
"learning_rate": 7.401592024906812e-06, |
|
"loss": 0.7417, |
|
"step": 21050 |
|
}, |
|
{ |
|
"epoch": 3.37654024643943, |
|
"grad_norm": 0.632502019405365, |
|
"learning_rate": 7.219830830393093e-06, |
|
"loss": 0.7115, |
|
"step": 21100 |
|
}, |
|
{ |
|
"epoch": 3.384541526644263, |
|
"grad_norm": 0.6977764964103699, |
|
"learning_rate": 7.040155422514977e-06, |
|
"loss": 0.6854, |
|
"step": 21150 |
|
}, |
|
{ |
|
"epoch": 3.3925428068490957, |
|
"grad_norm": 0.6409167051315308, |
|
"learning_rate": 6.862574561512825e-06, |
|
"loss": 0.7016, |
|
"step": 21200 |
|
}, |
|
{ |
|
"epoch": 3.4005440870539285, |
|
"grad_norm": 0.6645964980125427, |
|
"learning_rate": 6.6870969055054246e-06, |
|
"loss": 0.7224, |
|
"step": 21250 |
|
}, |
|
{ |
|
"epoch": 3.4085453672587613, |
|
"grad_norm": 0.6525962352752686, |
|
"learning_rate": 6.51373101006787e-06, |
|
"loss": 0.7248, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 3.416546647463594, |
|
"grad_norm": 0.7221346497535706, |
|
"learning_rate": 6.3424853278144015e-06, |
|
"loss": 0.7159, |
|
"step": 21350 |
|
}, |
|
{ |
|
"epoch": 3.424547927668427, |
|
"grad_norm": 0.7742597460746765, |
|
"learning_rate": 6.173368207986358e-06, |
|
"loss": 0.7158, |
|
"step": 21400 |
|
}, |
|
{ |
|
"epoch": 3.43254920787326, |
|
"grad_norm": 0.7201706171035767, |
|
"learning_rate": 6.00638789604499e-06, |
|
"loss": 0.7351, |
|
"step": 21450 |
|
}, |
|
{ |
|
"epoch": 3.4405504880780926, |
|
"grad_norm": 0.6694920063018799, |
|
"learning_rate": 5.841552533269534e-06, |
|
"loss": 0.7313, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 3.4485517682829254, |
|
"grad_norm": 0.773014485836029, |
|
"learning_rate": 5.6788701563602665e-06, |
|
"loss": 0.7526, |
|
"step": 21550 |
|
}, |
|
{ |
|
"epoch": 3.4565530484877582, |
|
"grad_norm": 0.602942168712616, |
|
"learning_rate": 5.518348697046644e-06, |
|
"loss": 0.7304, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 3.464554328692591, |
|
"grad_norm": 0.7288883924484253, |
|
"learning_rate": 5.359995981700544e-06, |
|
"loss": 0.7086, |
|
"step": 21650 |
|
}, |
|
{ |
|
"epoch": 3.4725556088974234, |
|
"grad_norm": 0.8319910168647766, |
|
"learning_rate": 5.203819730954806e-06, |
|
"loss": 0.7356, |
|
"step": 21700 |
|
}, |
|
{ |
|
"epoch": 3.4805568891022562, |
|
"grad_norm": 0.7911546230316162, |
|
"learning_rate": 5.049827559326686e-06, |
|
"loss": 0.7192, |
|
"step": 21750 |
|
}, |
|
{ |
|
"epoch": 3.488558169307089, |
|
"grad_norm": 0.704937756061554, |
|
"learning_rate": 4.898026974846631e-06, |
|
"loss": 0.7209, |
|
"step": 21800 |
|
}, |
|
{ |
|
"epoch": 3.496559449511922, |
|
"grad_norm": 0.7878553867340088, |
|
"learning_rate": 4.748425378692278e-06, |
|
"loss": 0.7449, |
|
"step": 21850 |
|
}, |
|
{ |
|
"epoch": 3.5045607297167547, |
|
"grad_norm": 0.7807219624519348, |
|
"learning_rate": 4.601030064827527e-06, |
|
"loss": 0.722, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 3.5125620099215875, |
|
"grad_norm": 0.7362565994262695, |
|
"learning_rate": 4.455848219646957e-06, |
|
"loss": 0.7069, |
|
"step": 21950 |
|
}, |
|
{ |
|
"epoch": 3.5205632901264203, |
|
"grad_norm": 0.6918688416481018, |
|
"learning_rate": 4.3128869216254366e-06, |
|
"loss": 0.7198, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 3.528564570331253, |
|
"grad_norm": 0.6748279929161072, |
|
"learning_rate": 4.172153140973012e-06, |
|
"loss": 0.7429, |
|
"step": 22050 |
|
}, |
|
{ |
|
"epoch": 3.5365658505360855, |
|
"grad_norm": 0.7261555194854736, |
|
"learning_rate": 4.033653739295062e-06, |
|
"loss": 0.7453, |
|
"step": 22100 |
|
}, |
|
{ |
|
"epoch": 3.5445671307409183, |
|
"grad_norm": 0.7590264081954956, |
|
"learning_rate": 3.897395469257759e-06, |
|
"loss": 0.7435, |
|
"step": 22150 |
|
}, |
|
{ |
|
"epoch": 3.552568410945751, |
|
"grad_norm": 0.6835177540779114, |
|
"learning_rate": 3.7633849742588213e-06, |
|
"loss": 0.7305, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 3.560569691150584, |
|
"grad_norm": 0.7218915224075317, |
|
"learning_rate": 3.6316287881036306e-06, |
|
"loss": 0.7357, |
|
"step": 22250 |
|
}, |
|
{ |
|
"epoch": 3.568570971355417, |
|
"grad_norm": 0.7100145220756531, |
|
"learning_rate": 3.502133334686669e-06, |
|
"loss": 0.7244, |
|
"step": 22300 |
|
}, |
|
{ |
|
"epoch": 3.5765722515602496, |
|
"grad_norm": 0.6982618570327759, |
|
"learning_rate": 3.374904927678285e-06, |
|
"loss": 0.7172, |
|
"step": 22350 |
|
}, |
|
{ |
|
"epoch": 3.5845735317650824, |
|
"grad_norm": 0.6808996200561523, |
|
"learning_rate": 3.2499497702169035e-06, |
|
"loss": 0.7112, |
|
"step": 22400 |
|
}, |
|
{ |
|
"epoch": 3.5925748119699152, |
|
"grad_norm": 0.6580341458320618, |
|
"learning_rate": 3.1272739546065746e-06, |
|
"loss": 0.7375, |
|
"step": 22450 |
|
}, |
|
{ |
|
"epoch": 3.600576092174748, |
|
"grad_norm": 0.7925476431846619, |
|
"learning_rate": 3.0068834620199103e-06, |
|
"loss": 0.7351, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 3.608577372379581, |
|
"grad_norm": 0.6711053848266602, |
|
"learning_rate": 2.888784162206504e-06, |
|
"loss": 0.7281, |
|
"step": 22550 |
|
}, |
|
{ |
|
"epoch": 3.6165786525844137, |
|
"grad_norm": 0.6397190690040588, |
|
"learning_rate": 2.7729818132067264e-06, |
|
"loss": 0.7277, |
|
"step": 22600 |
|
}, |
|
{ |
|
"epoch": 3.6245799327892465, |
|
"grad_norm": 0.6673882007598877, |
|
"learning_rate": 2.6594820610710057e-06, |
|
"loss": 0.7045, |
|
"step": 22650 |
|
}, |
|
{ |
|
"epoch": 3.6325812129940793, |
|
"grad_norm": 0.6923602223396301, |
|
"learning_rate": 2.5482904395845107e-06, |
|
"loss": 0.7435, |
|
"step": 22700 |
|
}, |
|
{ |
|
"epoch": 3.6405824931989117, |
|
"grad_norm": 0.7044751048088074, |
|
"learning_rate": 2.439412369997374e-06, |
|
"loss": 0.7135, |
|
"step": 22750 |
|
}, |
|
{ |
|
"epoch": 3.6485837734037445, |
|
"grad_norm": 0.7120568156242371, |
|
"learning_rate": 2.33285316076039e-06, |
|
"loss": 0.7115, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 3.6565850536085773, |
|
"grad_norm": 0.6994568705558777, |
|
"learning_rate": 2.22861800726617e-06, |
|
"loss": 0.7212, |
|
"step": 22850 |
|
}, |
|
{ |
|
"epoch": 3.66458633381341, |
|
"grad_norm": 0.7410824298858643, |
|
"learning_rate": 2.1267119915958088e-06, |
|
"loss": 0.6973, |
|
"step": 22900 |
|
}, |
|
{ |
|
"epoch": 3.672587614018243, |
|
"grad_norm": 0.7019298076629639, |
|
"learning_rate": 2.0271400822711894e-06, |
|
"loss": 0.724, |
|
"step": 22950 |
|
}, |
|
{ |
|
"epoch": 3.680588894223076, |
|
"grad_norm": 0.7187603116035461, |
|
"learning_rate": 1.929907134012654e-06, |
|
"loss": 0.699, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 3.6885901744279086, |
|
"grad_norm": 0.7453562617301941, |
|
"learning_rate": 1.8350178875023272e-06, |
|
"loss": 0.7243, |
|
"step": 23050 |
|
}, |
|
{ |
|
"epoch": 3.696591454632741, |
|
"grad_norm": 0.8272750973701477, |
|
"learning_rate": 1.7424769691530284e-06, |
|
"loss": 0.7178, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 3.704592734837574, |
|
"grad_norm": 0.6875506043434143, |
|
"learning_rate": 1.6522888908826473e-06, |
|
"loss": 0.7328, |
|
"step": 23150 |
|
}, |
|
{ |
|
"epoch": 3.7125940150424066, |
|
"grad_norm": 0.6446670889854431, |
|
"learning_rate": 1.5644580498941851e-06, |
|
"loss": 0.7121, |
|
"step": 23200 |
|
}, |
|
{ |
|
"epoch": 3.7205952952472394, |
|
"grad_norm": 0.6092432141304016, |
|
"learning_rate": 1.478988728461367e-06, |
|
"loss": 0.7045, |
|
"step": 23250 |
|
}, |
|
{ |
|
"epoch": 3.7285965754520722, |
|
"grad_norm": 0.6800952553749084, |
|
"learning_rate": 1.3958850937198453e-06, |
|
"loss": 0.7275, |
|
"step": 23300 |
|
}, |
|
{ |
|
"epoch": 3.736597855656905, |
|
"grad_norm": 0.684956431388855, |
|
"learning_rate": 1.315151197464043e-06, |
|
"loss": 0.7272, |
|
"step": 23350 |
|
}, |
|
{ |
|
"epoch": 3.744599135861738, |
|
"grad_norm": 0.7142546772956848, |
|
"learning_rate": 1.236790975949592e-06, |
|
"loss": 0.7213, |
|
"step": 23400 |
|
}, |
|
{ |
|
"epoch": 3.7526004160665707, |
|
"grad_norm": 0.7309266328811646, |
|
"learning_rate": 1.1608082497014228e-06, |
|
"loss": 0.7212, |
|
"step": 23450 |
|
}, |
|
{ |
|
"epoch": 3.7606016962714035, |
|
"grad_norm": 0.7835919260978699, |
|
"learning_rate": 1.087206723327483e-06, |
|
"loss": 0.7411, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 3.7686029764762363, |
|
"grad_norm": 0.6577751040458679, |
|
"learning_rate": 1.0159899853381394e-06, |
|
"loss": 0.7104, |
|
"step": 23550 |
|
}, |
|
{ |
|
"epoch": 3.776604256681069, |
|
"grad_norm": 0.7286819219589233, |
|
"learning_rate": 9.471615079711838e-07, |
|
"loss": 0.7284, |
|
"step": 23600 |
|
}, |
|
{ |
|
"epoch": 3.784605536885902, |
|
"grad_norm": 0.7102928757667542, |
|
"learning_rate": 8.807246470225517e-07, |
|
"loss": 0.7415, |
|
"step": 23650 |
|
}, |
|
{ |
|
"epoch": 3.7926068170907348, |
|
"grad_norm": 0.6017107963562012, |
|
"learning_rate": 8.166826416827422e-07, |
|
"loss": 0.7063, |
|
"step": 23700 |
|
}, |
|
{ |
|
"epoch": 3.800608097295567, |
|
"grad_norm": 0.7728732824325562, |
|
"learning_rate": 7.550386143788224e-07, |
|
"loss": 0.6959, |
|
"step": 23750 |
|
}, |
|
{ |
|
"epoch": 3.8086093775004, |
|
"grad_norm": 0.7075885534286499, |
|
"learning_rate": 6.95795570622243e-07, |
|
"loss": 0.6977, |
|
"step": 23800 |
|
}, |
|
{ |
|
"epoch": 3.816610657705233, |
|
"grad_norm": 0.7628594636917114, |
|
"learning_rate": 6.389563988622948e-07, |
|
"loss": 0.7189, |
|
"step": 23850 |
|
}, |
|
{ |
|
"epoch": 3.8246119379100656, |
|
"grad_norm": 0.6857520341873169, |
|
"learning_rate": 5.845238703452604e-07, |
|
"loss": 0.7281, |
|
"step": 23900 |
|
}, |
|
{ |
|
"epoch": 3.8326132181148984, |
|
"grad_norm": 0.676717221736908, |
|
"learning_rate": 5.325006389793053e-07, |
|
"loss": 0.7275, |
|
"step": 23950 |
|
}, |
|
{ |
|
"epoch": 3.8406144983197312, |
|
"grad_norm": 0.6261276602745056, |
|
"learning_rate": 4.828892412050978e-07, |
|
"loss": 0.7162, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 3.848615778524564, |
|
"grad_norm": 0.761638343334198, |
|
"learning_rate": 4.356920958721311e-07, |
|
"loss": 0.7405, |
|
"step": 24050 |
|
}, |
|
{ |
|
"epoch": 3.856617058729397, |
|
"grad_norm": 0.697512686252594, |
|
"learning_rate": 3.909115041207889e-07, |
|
"loss": 0.7156, |
|
"step": 24100 |
|
}, |
|
{ |
|
"epoch": 3.8646183389342292, |
|
"grad_norm": 0.6379271149635315, |
|
"learning_rate": 3.4854964927015235e-07, |
|
"loss": 0.7443, |
|
"step": 24150 |
|
}, |
|
{ |
|
"epoch": 3.872619619139062, |
|
"grad_norm": 0.7762777209281921, |
|
"learning_rate": 3.086085967115626e-07, |
|
"loss": 0.7186, |
|
"step": 24200 |
|
}, |
|
{ |
|
"epoch": 3.880620899343895, |
|
"grad_norm": 0.6799700260162354, |
|
"learning_rate": 2.7109029380790186e-07, |
|
"loss": 0.6928, |
|
"step": 24250 |
|
}, |
|
{ |
|
"epoch": 3.8886221795487277, |
|
"grad_norm": 0.8152979016304016, |
|
"learning_rate": 2.3599656979866325e-07, |
|
"loss": 0.7145, |
|
"step": 24300 |
|
}, |
|
{ |
|
"epoch": 3.8966234597535605, |
|
"grad_norm": 0.6055078506469727, |
|
"learning_rate": 2.0332913571074476e-07, |
|
"loss": 0.7105, |
|
"step": 24350 |
|
}, |
|
{ |
|
"epoch": 3.9046247399583933, |
|
"grad_norm": 0.7122170329093933, |
|
"learning_rate": 1.7308958427505462e-07, |
|
"loss": 0.7322, |
|
"step": 24400 |
|
}, |
|
{ |
|
"epoch": 3.912626020163226, |
|
"grad_norm": 0.7174406051635742, |
|
"learning_rate": 1.4527938984883471e-07, |
|
"loss": 0.7294, |
|
"step": 24450 |
|
}, |
|
{ |
|
"epoch": 3.920627300368059, |
|
"grad_norm": 0.7419302463531494, |
|
"learning_rate": 1.1989990834378462e-07, |
|
"loss": 0.7272, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 3.9286285805728918, |
|
"grad_norm": 0.7208146452903748, |
|
"learning_rate": 9.695237715994788e-08, |
|
"loss": 0.7066, |
|
"step": 24550 |
|
}, |
|
{ |
|
"epoch": 3.9366298607777246, |
|
"grad_norm": 0.8183807730674744, |
|
"learning_rate": 7.6437915125388e-08, |
|
"loss": 0.721, |
|
"step": 24600 |
|
}, |
|
{ |
|
"epoch": 3.9446311409825574, |
|
"grad_norm": 0.7065560817718506, |
|
"learning_rate": 5.835752244164883e-08, |
|
"loss": 0.737, |
|
"step": 24650 |
|
}, |
|
{ |
|
"epoch": 3.95263242118739, |
|
"grad_norm": 0.7247999310493469, |
|
"learning_rate": 4.2712080634949024e-08, |
|
"loss": 0.7346, |
|
"step": 24700 |
|
}, |
|
{ |
|
"epoch": 3.9606337013922226, |
|
"grad_norm": 0.6564909219741821, |
|
"learning_rate": 2.9502352513255394e-08, |
|
"loss": 0.7305, |
|
"step": 24750 |
|
}, |
|
{ |
|
"epoch": 3.9686349815970554, |
|
"grad_norm": 0.7844890356063843, |
|
"learning_rate": 1.8728982129051497e-08, |
|
"loss": 0.7076, |
|
"step": 24800 |
|
}, |
|
{ |
|
"epoch": 3.9766362618018882, |
|
"grad_norm": 0.7181993722915649, |
|
"learning_rate": 1.0392494747957227e-08, |
|
"loss": 0.7439, |
|
"step": 24850 |
|
}, |
|
{ |
|
"epoch": 3.984637542006721, |
|
"grad_norm": 0.6785070300102234, |
|
"learning_rate": 4.493296823104842e-09, |
|
"loss": 0.7222, |
|
"step": 24900 |
|
}, |
|
{ |
|
"epoch": 3.992638822211554, |
|
"grad_norm": 0.6972596645355225, |
|
"learning_rate": 1.0316759753381534e-09, |
|
"loss": 0.7258, |
|
"step": 24950 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 24996, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 2000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.928976828545164e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|