|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1563, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006397952655150352, |
|
"grad_norm": 8.45120858638771, |
|
"learning_rate": 1.2738853503184715e-07, |
|
"loss": 2.1311, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003198976327575176, |
|
"grad_norm": 8.544993499312284, |
|
"learning_rate": 6.369426751592357e-07, |
|
"loss": 2.1114, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006397952655150352, |
|
"grad_norm": 8.772571070364279, |
|
"learning_rate": 1.2738853503184715e-06, |
|
"loss": 2.1017, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009596928982725527, |
|
"grad_norm": 4.897458678497011, |
|
"learning_rate": 1.9108280254777074e-06, |
|
"loss": 1.961, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012795905310300703, |
|
"grad_norm": 4.075186495410978, |
|
"learning_rate": 2.547770700636943e-06, |
|
"loss": 1.8269, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01599488163787588, |
|
"grad_norm": 3.503726866717514, |
|
"learning_rate": 3.1847133757961785e-06, |
|
"loss": 1.7622, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.019193857965451054, |
|
"grad_norm": 3.027293734155194, |
|
"learning_rate": 3.821656050955415e-06, |
|
"loss": 1.6355, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022392834293026232, |
|
"grad_norm": 2.8891766987266796, |
|
"learning_rate": 4.45859872611465e-06, |
|
"loss": 1.5967, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.025591810620601407, |
|
"grad_norm": 2.9905733143061117, |
|
"learning_rate": 5.095541401273886e-06, |
|
"loss": 1.6482, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.028790786948176585, |
|
"grad_norm": 3.0173836321259175, |
|
"learning_rate": 5.732484076433121e-06, |
|
"loss": 1.6306, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03198976327575176, |
|
"grad_norm": 2.9409129691288483, |
|
"learning_rate": 6.369426751592357e-06, |
|
"loss": 1.6179, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.035188739603326934, |
|
"grad_norm": 2.8249107707715346, |
|
"learning_rate": 7.006369426751593e-06, |
|
"loss": 1.5905, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03838771593090211, |
|
"grad_norm": 3.2414392670664114, |
|
"learning_rate": 7.64331210191083e-06, |
|
"loss": 1.618, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04158669225847729, |
|
"grad_norm": 3.0059278893643753, |
|
"learning_rate": 8.280254777070064e-06, |
|
"loss": 1.6345, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.044785668586052464, |
|
"grad_norm": 3.000977330200516, |
|
"learning_rate": 8.9171974522293e-06, |
|
"loss": 1.6036, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04798464491362764, |
|
"grad_norm": 2.7742457498827395, |
|
"learning_rate": 9.554140127388536e-06, |
|
"loss": 1.6228, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05118362124120281, |
|
"grad_norm": 2.9247171067005846, |
|
"learning_rate": 1.0191082802547772e-05, |
|
"loss": 1.5814, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05438259756877799, |
|
"grad_norm": 2.707250155470774, |
|
"learning_rate": 1.0828025477707008e-05, |
|
"loss": 1.5989, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05758157389635317, |
|
"grad_norm": 2.777451110804097, |
|
"learning_rate": 1.1464968152866242e-05, |
|
"loss": 1.6046, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.060780550223928344, |
|
"grad_norm": 2.750734662498446, |
|
"learning_rate": 1.2101910828025478e-05, |
|
"loss": 1.5866, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06397952655150352, |
|
"grad_norm": 3.231815816845173, |
|
"learning_rate": 1.2738853503184714e-05, |
|
"loss": 1.5973, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0671785028790787, |
|
"grad_norm": 2.8990703853478856, |
|
"learning_rate": 1.337579617834395e-05, |
|
"loss": 1.5865, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07037747920665387, |
|
"grad_norm": 2.5849093940529175, |
|
"learning_rate": 1.4012738853503186e-05, |
|
"loss": 1.6027, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.07357645553422905, |
|
"grad_norm": 2.6093741257086256, |
|
"learning_rate": 1.464968152866242e-05, |
|
"loss": 1.5806, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07677543186180422, |
|
"grad_norm": 2.6475786335268516, |
|
"learning_rate": 1.528662420382166e-05, |
|
"loss": 1.6015, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0799744081893794, |
|
"grad_norm": 2.5612093515246905, |
|
"learning_rate": 1.5923566878980894e-05, |
|
"loss": 1.6285, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08317338451695458, |
|
"grad_norm": 2.635291993226663, |
|
"learning_rate": 1.6560509554140128e-05, |
|
"loss": 1.6063, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08637236084452975, |
|
"grad_norm": 2.3771393339595597, |
|
"learning_rate": 1.7197452229299365e-05, |
|
"loss": 1.6329, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08957133717210493, |
|
"grad_norm": 2.935313002912278, |
|
"learning_rate": 1.78343949044586e-05, |
|
"loss": 1.6015, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0927703134996801, |
|
"grad_norm": 2.5747148263761708, |
|
"learning_rate": 1.8471337579617837e-05, |
|
"loss": 1.5831, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09596928982725528, |
|
"grad_norm": 2.8025363824063576, |
|
"learning_rate": 1.910828025477707e-05, |
|
"loss": 1.5832, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09916826615483046, |
|
"grad_norm": 2.5997598240468474, |
|
"learning_rate": 1.9745222929936306e-05, |
|
"loss": 1.6228, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.10236724248240563, |
|
"grad_norm": 2.6087028065146067, |
|
"learning_rate": 1.9999775332635076e-05, |
|
"loss": 1.5726, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10556621880998081, |
|
"grad_norm": 2.6832519430043043, |
|
"learning_rate": 1.999840240196313e-05, |
|
"loss": 1.5822, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10876519513755598, |
|
"grad_norm": 2.5312510284895575, |
|
"learning_rate": 1.9995781526975738e-05, |
|
"loss": 1.5902, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11196417146513116, |
|
"grad_norm": 2.552059625982884, |
|
"learning_rate": 1.9991913034795768e-05, |
|
"loss": 1.6468, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11516314779270634, |
|
"grad_norm": 2.737621660353208, |
|
"learning_rate": 1.9986797408266636e-05, |
|
"loss": 1.5825, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1183621241202815, |
|
"grad_norm": 2.3504295992454693, |
|
"learning_rate": 1.9980435285892056e-05, |
|
"loss": 1.6176, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.12156110044785669, |
|
"grad_norm": 2.605194872517274, |
|
"learning_rate": 1.9972827461756335e-05, |
|
"loss": 1.6186, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12476007677543186, |
|
"grad_norm": 2.545693165994165, |
|
"learning_rate": 1.9963974885425267e-05, |
|
"loss": 1.6284, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12795905310300704, |
|
"grad_norm": 2.4582821508493535, |
|
"learning_rate": 1.9953878661827603e-05, |
|
"loss": 1.5967, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13115802943058222, |
|
"grad_norm": 2.294144210629627, |
|
"learning_rate": 1.994254005111715e-05, |
|
"loss": 1.5754, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1343570057581574, |
|
"grad_norm": 2.455611945909697, |
|
"learning_rate": 1.992996046851548e-05, |
|
"loss": 1.6293, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.13755598208573255, |
|
"grad_norm": 2.5851678984397037, |
|
"learning_rate": 1.9916141484135297e-05, |
|
"loss": 1.5629, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.14075495841330773, |
|
"grad_norm": 2.42361611389672, |
|
"learning_rate": 1.990108482278446e-05, |
|
"loss": 1.6161, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14395393474088292, |
|
"grad_norm": 2.3505427050524585, |
|
"learning_rate": 1.9884792363750684e-05, |
|
"loss": 1.6044, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1471529110684581, |
|
"grad_norm": 2.388109504758734, |
|
"learning_rate": 1.9867266140567024e-05, |
|
"loss": 1.6312, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.15035188739603328, |
|
"grad_norm": 2.4772439223936313, |
|
"learning_rate": 1.9848508340758014e-05, |
|
"loss": 1.6011, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.15355086372360843, |
|
"grad_norm": 2.684448860110458, |
|
"learning_rate": 1.9828521305566647e-05, |
|
"loss": 1.6624, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15674984005118361, |
|
"grad_norm": 2.373116924740462, |
|
"learning_rate": 1.9807307529662175e-05, |
|
"loss": 1.6455, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1599488163787588, |
|
"grad_norm": 2.3871214467638127, |
|
"learning_rate": 1.9784869660828708e-05, |
|
"loss": 1.6597, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16314779270633398, |
|
"grad_norm": 2.553594848899839, |
|
"learning_rate": 1.9761210499634754e-05, |
|
"loss": 1.5698, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16634676903390916, |
|
"grad_norm": 2.5770365342618318, |
|
"learning_rate": 1.973633299908365e-05, |
|
"loss": 1.6073, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.1695457453614843, |
|
"grad_norm": 2.5069237836308176, |
|
"learning_rate": 1.9710240264245005e-05, |
|
"loss": 1.5877, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1727447216890595, |
|
"grad_norm": 2.4165608550489197, |
|
"learning_rate": 1.9682935551867132e-05, |
|
"loss": 1.6266, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.17594369801663468, |
|
"grad_norm": 2.5380847108727744, |
|
"learning_rate": 1.9654422269970545e-05, |
|
"loss": 1.5476, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.17914267434420986, |
|
"grad_norm": 2.385470144460665, |
|
"learning_rate": 1.9624703977422624e-05, |
|
"loss": 1.5896, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18234165067178504, |
|
"grad_norm": 2.457845761779252, |
|
"learning_rate": 1.959378438349338e-05, |
|
"loss": 1.5958, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1855406269993602, |
|
"grad_norm": 2.4062711922420092, |
|
"learning_rate": 1.956166734739251e-05, |
|
"loss": 1.6074, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.18873960332693537, |
|
"grad_norm": 2.281520144278105, |
|
"learning_rate": 1.95283568777877e-05, |
|
"loss": 1.6134, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.19193857965451055, |
|
"grad_norm": 2.5102158955477956, |
|
"learning_rate": 1.9493857132304295e-05, |
|
"loss": 1.5395, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19513755598208574, |
|
"grad_norm": 2.415759547538384, |
|
"learning_rate": 1.9458172417006347e-05, |
|
"loss": 1.5943, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.19833653230966092, |
|
"grad_norm": 2.3874207818256084, |
|
"learning_rate": 1.942130718585919e-05, |
|
"loss": 1.5539, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.20153550863723607, |
|
"grad_norm": 2.3291818511607625, |
|
"learning_rate": 1.938326604017349e-05, |
|
"loss": 1.5951, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.20473448496481125, |
|
"grad_norm": 2.3503279349130612, |
|
"learning_rate": 1.9344053728030952e-05, |
|
"loss": 1.5543, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.20793346129238643, |
|
"grad_norm": 2.367606596377197, |
|
"learning_rate": 1.9303675143691683e-05, |
|
"loss": 1.5767, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21113243761996162, |
|
"grad_norm": 2.3105097918946624, |
|
"learning_rate": 1.9262135326983326e-05, |
|
"loss": 1.5641, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2143314139475368, |
|
"grad_norm": 2.3186275383828057, |
|
"learning_rate": 1.921943946267201e-05, |
|
"loss": 1.6119, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.21753039027511195, |
|
"grad_norm": 2.3623182317692635, |
|
"learning_rate": 1.9175592879815217e-05, |
|
"loss": 1.5995, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22072936660268713, |
|
"grad_norm": 2.3234032719145383, |
|
"learning_rate": 1.9130601051096655e-05, |
|
"loss": 1.5399, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.22392834293026231, |
|
"grad_norm": 2.2619822250922534, |
|
"learning_rate": 1.9084469592143154e-05, |
|
"loss": 1.5924, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2271273192578375, |
|
"grad_norm": 2.614900556151945, |
|
"learning_rate": 1.9037204260823788e-05, |
|
"loss": 1.5542, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.23032629558541268, |
|
"grad_norm": 2.4810633670440088, |
|
"learning_rate": 1.89888109565312e-05, |
|
"loss": 1.5974, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.23352527191298783, |
|
"grad_norm": 2.2469047163095337, |
|
"learning_rate": 1.893929571944527e-05, |
|
"loss": 1.5219, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.236724248240563, |
|
"grad_norm": 2.4290730257347395, |
|
"learning_rate": 1.8888664729779205e-05, |
|
"loss": 1.596, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2399232245681382, |
|
"grad_norm": 2.486005754480233, |
|
"learning_rate": 1.883692430700818e-05, |
|
"loss": 1.5715, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.24312220089571338, |
|
"grad_norm": 2.3308186805659727, |
|
"learning_rate": 1.8784080909080568e-05, |
|
"loss": 1.49, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.24632117722328856, |
|
"grad_norm": 2.2000178419437275, |
|
"learning_rate": 1.8730141131611882e-05, |
|
"loss": 1.5831, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2495201535508637, |
|
"grad_norm": 2.2521923554518763, |
|
"learning_rate": 1.867511170706157e-05, |
|
"loss": 1.523, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.2527191298784389, |
|
"grad_norm": 2.4262324307986596, |
|
"learning_rate": 1.861899950389269e-05, |
|
"loss": 1.5276, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2559181062060141, |
|
"grad_norm": 2.247962152190113, |
|
"learning_rate": 1.856181152571463e-05, |
|
"loss": 1.6288, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2591170825335892, |
|
"grad_norm": 2.3784897154509914, |
|
"learning_rate": 1.850355491040897e-05, |
|
"loss": 1.6227, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.26231605886116444, |
|
"grad_norm": 2.4615020230179723, |
|
"learning_rate": 1.8444236929238556e-05, |
|
"loss": 1.6161, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2655150351887396, |
|
"grad_norm": 2.553794750075187, |
|
"learning_rate": 1.8383864985939944e-05, |
|
"loss": 1.5419, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2687140115163148, |
|
"grad_norm": 2.2750920584095073, |
|
"learning_rate": 1.8322446615799317e-05, |
|
"loss": 1.6044, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.27191298784388995, |
|
"grad_norm": 2.309582764312615, |
|
"learning_rate": 1.825998948471197e-05, |
|
"loss": 1.5686, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2751119641714651, |
|
"grad_norm": 2.563992220208821, |
|
"learning_rate": 1.819650138822548e-05, |
|
"loss": 1.5716, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.2783109404990403, |
|
"grad_norm": 2.3463821157538463, |
|
"learning_rate": 1.8131990250566733e-05, |
|
"loss": 1.488, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.28150991682661547, |
|
"grad_norm": 2.4105968132480564, |
|
"learning_rate": 1.8066464123652857e-05, |
|
"loss": 1.4974, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2847088931541907, |
|
"grad_norm": 2.3911862739855487, |
|
"learning_rate": 1.7999931186086225e-05, |
|
"loss": 1.5246, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.28790786948176583, |
|
"grad_norm": 2.521273873337624, |
|
"learning_rate": 1.793239974213364e-05, |
|
"loss": 1.5443, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.291106845809341, |
|
"grad_norm": 2.1371439492853987, |
|
"learning_rate": 1.786387822068987e-05, |
|
"loss": 1.5025, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2943058221369162, |
|
"grad_norm": 2.3936981590007766, |
|
"learning_rate": 1.7794375174225565e-05, |
|
"loss": 1.6025, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.29750479846449135, |
|
"grad_norm": 2.353176497945942, |
|
"learning_rate": 1.7723899277719815e-05, |
|
"loss": 1.585, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.30070377479206656, |
|
"grad_norm": 2.430530486800993, |
|
"learning_rate": 1.7652459327577377e-05, |
|
"loss": 1.5918, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.3039027511196417, |
|
"grad_norm": 2.173964316210541, |
|
"learning_rate": 1.7580064240530746e-05, |
|
"loss": 1.5182, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.30710172744721687, |
|
"grad_norm": 2.5772299950374418, |
|
"learning_rate": 1.7506723052527243e-05, |
|
"loss": 1.5798, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3103007037747921, |
|
"grad_norm": 2.4991529420639234, |
|
"learning_rate": 1.7432444917601183e-05, |
|
"loss": 1.5206, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.31349968010236723, |
|
"grad_norm": 2.3323983247114857, |
|
"learning_rate": 1.735723910673132e-05, |
|
"loss": 1.5071, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.31669865642994244, |
|
"grad_norm": 2.3103580608228227, |
|
"learning_rate": 1.7281115006683687e-05, |
|
"loss": 1.5202, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3198976327575176, |
|
"grad_norm": 2.4434232834822955, |
|
"learning_rate": 1.7204082118840035e-05, |
|
"loss": 1.5238, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32309660908509275, |
|
"grad_norm": 2.2314302298797566, |
|
"learning_rate": 1.712615005801185e-05, |
|
"loss": 1.5228, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.32629558541266795, |
|
"grad_norm": 2.2505362844790686, |
|
"learning_rate": 1.704732855124037e-05, |
|
"loss": 1.5464, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3294945617402431, |
|
"grad_norm": 2.335383707358806, |
|
"learning_rate": 1.6967627436582445e-05, |
|
"loss": 1.4524, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3326935380678183, |
|
"grad_norm": 2.243258802450963, |
|
"learning_rate": 1.6887056661882644e-05, |
|
"loss": 1.5224, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.33589251439539347, |
|
"grad_norm": 2.39793844391146, |
|
"learning_rate": 1.6805626283531592e-05, |
|
"loss": 1.5283, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3390914907229686, |
|
"grad_norm": 2.5442566113713445, |
|
"learning_rate": 1.6723346465210815e-05, |
|
"loss": 1.5731, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.34229046705054383, |
|
"grad_norm": 2.2816938873410115, |
|
"learning_rate": 1.6640227476624163e-05, |
|
"loss": 1.5324, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.345489443378119, |
|
"grad_norm": 2.34576166973507, |
|
"learning_rate": 1.655627969221598e-05, |
|
"loss": 1.5316, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3486884197056942, |
|
"grad_norm": 2.4410014523875287, |
|
"learning_rate": 1.6471513589876247e-05, |
|
"loss": 1.5316, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.35188739603326935, |
|
"grad_norm": 2.2216487614131126, |
|
"learning_rate": 1.638593974963278e-05, |
|
"loss": 1.466, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3550863723608445, |
|
"grad_norm": 2.4275497144081086, |
|
"learning_rate": 1.6299568852330703e-05, |
|
"loss": 1.5016, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3582853486884197, |
|
"grad_norm": 2.236501922766204, |
|
"learning_rate": 1.6212411678299306e-05, |
|
"loss": 1.5561, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.36148432501599487, |
|
"grad_norm": 2.27631043483282, |
|
"learning_rate": 1.612447910600652e-05, |
|
"loss": 1.5141, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3646833013435701, |
|
"grad_norm": 2.331933284658563, |
|
"learning_rate": 1.6035782110701125e-05, |
|
"loss": 1.5075, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.36788227767114523, |
|
"grad_norm": 2.248455320446842, |
|
"learning_rate": 1.594633176304287e-05, |
|
"loss": 1.5531, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3710812539987204, |
|
"grad_norm": 2.3056857138761173, |
|
"learning_rate": 1.5856139227720714e-05, |
|
"loss": 1.5867, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3742802303262956, |
|
"grad_norm": 2.2615976412633962, |
|
"learning_rate": 1.5765215762059304e-05, |
|
"loss": 1.5137, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.37747920665387075, |
|
"grad_norm": 2.2246230951068404, |
|
"learning_rate": 1.5673572714613886e-05, |
|
"loss": 1.4676, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.38067818298144596, |
|
"grad_norm": 2.3714970265633224, |
|
"learning_rate": 1.558122152375387e-05, |
|
"loss": 1.5649, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3838771593090211, |
|
"grad_norm": 2.4424554877615505, |
|
"learning_rate": 1.548817371623513e-05, |
|
"loss": 1.5203, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.38707613563659626, |
|
"grad_norm": 2.3241867748802862, |
|
"learning_rate": 1.539444090576132e-05, |
|
"loss": 1.532, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.3902751119641715, |
|
"grad_norm": 2.2841806700533454, |
|
"learning_rate": 1.5300034791534297e-05, |
|
"loss": 1.5341, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3934740882917466, |
|
"grad_norm": 2.3823095399745, |
|
"learning_rate": 1.520496715679391e-05, |
|
"loss": 1.5606, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.39667306461932184, |
|
"grad_norm": 2.404156549397838, |
|
"learning_rate": 1.5109249867347276e-05, |
|
"loss": 1.4964, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.399872040946897, |
|
"grad_norm": 2.3045879883870226, |
|
"learning_rate": 1.5012894870087751e-05, |
|
"loss": 1.4785, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.40307101727447214, |
|
"grad_norm": 2.2564282819228376, |
|
"learning_rate": 1.4915914191503792e-05, |
|
"loss": 1.5593, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.40626999360204735, |
|
"grad_norm": 2.1899628345006508, |
|
"learning_rate": 1.4818319936177885e-05, |
|
"loss": 1.5385, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.4094689699296225, |
|
"grad_norm": 2.6345048580064003, |
|
"learning_rate": 1.4720124285275703e-05, |
|
"loss": 1.5198, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4126679462571977, |
|
"grad_norm": 2.4733399052284613, |
|
"learning_rate": 1.4621339495025731e-05, |
|
"loss": 1.5123, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.41586692258477287, |
|
"grad_norm": 2.2853139912229663, |
|
"learning_rate": 1.4521977895189518e-05, |
|
"loss": 1.586, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.419065898912348, |
|
"grad_norm": 2.348285993958346, |
|
"learning_rate": 1.4422051887522735e-05, |
|
"loss": 1.4982, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.42226487523992323, |
|
"grad_norm": 2.48181732428636, |
|
"learning_rate": 1.4321573944227254e-05, |
|
"loss": 1.4932, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4254638515674984, |
|
"grad_norm": 2.2700624107334146, |
|
"learning_rate": 1.4220556606394465e-05, |
|
"loss": 1.477, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.4286628278950736, |
|
"grad_norm": 2.3121958973335763, |
|
"learning_rate": 1.4119012482439929e-05, |
|
"loss": 1.5362, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.43186180422264875, |
|
"grad_norm": 2.9408602035120657, |
|
"learning_rate": 1.4016954246529697e-05, |
|
"loss": 1.5094, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4350607805502239, |
|
"grad_norm": 2.4183707899529074, |
|
"learning_rate": 1.3914394636998374e-05, |
|
"loss": 1.5137, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4382597568777991, |
|
"grad_norm": 2.3165836903959365, |
|
"learning_rate": 1.3811346454759211e-05, |
|
"loss": 1.4942, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.44145873320537427, |
|
"grad_norm": 2.280419639027186, |
|
"learning_rate": 1.3707822561706336e-05, |
|
"loss": 1.5327, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4446577095329495, |
|
"grad_norm": 2.152749313872907, |
|
"learning_rate": 1.3603835879109442e-05, |
|
"loss": 1.4839, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.44785668586052463, |
|
"grad_norm": 2.3172798106180155, |
|
"learning_rate": 1.349939938600099e-05, |
|
"loss": 1.4732, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4510556621880998, |
|
"grad_norm": 2.328153860662641, |
|
"learning_rate": 1.3394526117556277e-05, |
|
"loss": 1.4322, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.454254638515675, |
|
"grad_norm": 2.3538091734432243, |
|
"learning_rate": 1.3289229163466421e-05, |
|
"loss": 1.4408, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.45745361484325014, |
|
"grad_norm": 2.4036873663190295, |
|
"learning_rate": 1.3183521666304611e-05, |
|
"loss": 1.4778, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.46065259117082535, |
|
"grad_norm": 2.3520908941332785, |
|
"learning_rate": 1.3077416819885707e-05, |
|
"loss": 1.4713, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4638515674984005, |
|
"grad_norm": 2.3375152975193783, |
|
"learning_rate": 1.297092786761946e-05, |
|
"loss": 1.4567, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.46705054382597566, |
|
"grad_norm": 2.2072268508544655, |
|
"learning_rate": 1.2864068100857565e-05, |
|
"loss": 1.5374, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.47024952015355087, |
|
"grad_norm": 2.283182102299493, |
|
"learning_rate": 1.2756850857234686e-05, |
|
"loss": 1.4176, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.473448496481126, |
|
"grad_norm": 2.1958718266912096, |
|
"learning_rate": 1.2649289519003739e-05, |
|
"loss": 1.4892, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.47664747280870123, |
|
"grad_norm": 2.2999635338111064, |
|
"learning_rate": 1.2541397511365584e-05, |
|
"loss": 1.4835, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4798464491362764, |
|
"grad_norm": 2.3494145089078597, |
|
"learning_rate": 1.2433188300793399e-05, |
|
"loss": 1.4686, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.48304542546385154, |
|
"grad_norm": 2.355567026544742, |
|
"learning_rate": 1.2324675393351818e-05, |
|
"loss": 1.4766, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.48624440179142675, |
|
"grad_norm": 2.527624790839194, |
|
"learning_rate": 1.221587233301123e-05, |
|
"loss": 1.4599, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4894433781190019, |
|
"grad_norm": 2.0567117909106547, |
|
"learning_rate": 1.2106792699957264e-05, |
|
"loss": 1.4048, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.4926423544465771, |
|
"grad_norm": 2.3023755472980056, |
|
"learning_rate": 1.1997450108895807e-05, |
|
"loss": 1.4321, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.49584133077415227, |
|
"grad_norm": 2.5825599532144987, |
|
"learning_rate": 1.1887858207353678e-05, |
|
"loss": 1.5275, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4990403071017274, |
|
"grad_norm": 2.6267220106883284, |
|
"learning_rate": 1.1778030673975227e-05, |
|
"loss": 1.4779, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5022392834293026, |
|
"grad_norm": 2.470151589555483, |
|
"learning_rate": 1.166798121681505e-05, |
|
"loss": 1.408, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5054382597568778, |
|
"grad_norm": 2.3968962282049255, |
|
"learning_rate": 1.1557723571627016e-05, |
|
"loss": 1.475, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.508637236084453, |
|
"grad_norm": 2.3568776865369583, |
|
"learning_rate": 1.1447271500149849e-05, |
|
"loss": 1.4876, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5118362124120281, |
|
"grad_norm": 2.2087366194914435, |
|
"learning_rate": 1.1336638788389473e-05, |
|
"loss": 1.4905, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5150351887396033, |
|
"grad_norm": 2.1827765863333073, |
|
"learning_rate": 1.122583924489832e-05, |
|
"loss": 1.4486, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5182341650671785, |
|
"grad_norm": 2.1973594046250753, |
|
"learning_rate": 1.1114886699051803e-05, |
|
"loss": 1.426, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5214331413947537, |
|
"grad_norm": 2.34468852888554, |
|
"learning_rate": 1.1003794999322246e-05, |
|
"loss": 1.4977, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5246321177223289, |
|
"grad_norm": 2.4597383453672723, |
|
"learning_rate": 1.089257801155037e-05, |
|
"loss": 1.3699, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.527831094049904, |
|
"grad_norm": 2.4169566923172936, |
|
"learning_rate": 1.0781249617214642e-05, |
|
"loss": 1.4719, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5310300703774792, |
|
"grad_norm": 2.4012841984365316, |
|
"learning_rate": 1.0669823711698668e-05, |
|
"loss": 1.4233, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5342290467050543, |
|
"grad_norm": 2.6293301110509524, |
|
"learning_rate": 1.0558314202556866e-05, |
|
"loss": 1.4097, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5374280230326296, |
|
"grad_norm": 2.31281873866612, |
|
"learning_rate": 1.0446735007778563e-05, |
|
"loss": 1.4582, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5406269993602048, |
|
"grad_norm": 2.4551815500859777, |
|
"learning_rate": 1.0335100054050877e-05, |
|
"loss": 1.4573, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5438259756877799, |
|
"grad_norm": 2.1492777427961123, |
|
"learning_rate": 1.0223423275020431e-05, |
|
"loss": 1.4539, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5470249520153551, |
|
"grad_norm": 2.3528387051422706, |
|
"learning_rate": 1.0111718609554254e-05, |
|
"loss": 1.429, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5502239283429302, |
|
"grad_norm": 2.4037082928038083, |
|
"learning_rate": 1e-05, |
|
"loss": 1.4023, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5534229046705055, |
|
"grad_norm": 2.4169078651865123, |
|
"learning_rate": 9.888281390445747e-06, |
|
"loss": 1.4645, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5566218809980806, |
|
"grad_norm": 2.3509642991185946, |
|
"learning_rate": 9.776576724979572e-06, |
|
"loss": 1.4286, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5598208573256558, |
|
"grad_norm": 2.457238153216398, |
|
"learning_rate": 9.664899945949128e-06, |
|
"loss": 1.4427, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5630198336532309, |
|
"grad_norm": 2.371528253431419, |
|
"learning_rate": 9.553264992221442e-06, |
|
"loss": 1.4258, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5662188099808061, |
|
"grad_norm": 2.335210876008984, |
|
"learning_rate": 9.441685797443138e-06, |
|
"loss": 1.3789, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5694177863083814, |
|
"grad_norm": 2.295598759088956, |
|
"learning_rate": 9.330176288301332e-06, |
|
"loss": 1.4145, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5726167626359565, |
|
"grad_norm": 2.2663716208289317, |
|
"learning_rate": 9.21875038278536e-06, |
|
"loss": 1.4256, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5758157389635317, |
|
"grad_norm": 2.223770397466441, |
|
"learning_rate": 9.107421988449632e-06, |
|
"loss": 1.4026, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5790147152911068, |
|
"grad_norm": 2.2184853669651057, |
|
"learning_rate": 8.996205000677758e-06, |
|
"loss": 1.4725, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.582213691618682, |
|
"grad_norm": 2.8341820881260724, |
|
"learning_rate": 8.885113300948199e-06, |
|
"loss": 1.3573, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5854126679462572, |
|
"grad_norm": 2.365757078522109, |
|
"learning_rate": 8.774160755101685e-06, |
|
"loss": 1.3802, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5886116442738324, |
|
"grad_norm": 2.8561602749028694, |
|
"learning_rate": 8.663361211610529e-06, |
|
"loss": 1.4659, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5918106206014075, |
|
"grad_norm": 2.4443049468820925, |
|
"learning_rate": 8.552728499850153e-06, |
|
"loss": 1.4615, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5950095969289827, |
|
"grad_norm": 2.3972164664499944, |
|
"learning_rate": 8.442276428372986e-06, |
|
"loss": 1.4139, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5982085732565579, |
|
"grad_norm": 2.4891884762897907, |
|
"learning_rate": 8.332018783184952e-06, |
|
"loss": 1.4309, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.6014075495841331, |
|
"grad_norm": 2.3294746323733686, |
|
"learning_rate": 8.221969326024776e-06, |
|
"loss": 1.39, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6046065259117083, |
|
"grad_norm": 2.238699313962783, |
|
"learning_rate": 8.112141792646326e-06, |
|
"loss": 1.3905, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.6078055022392834, |
|
"grad_norm": 2.3435384537017003, |
|
"learning_rate": 8.002549891104196e-06, |
|
"loss": 1.5152, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6110044785668586, |
|
"grad_norm": 2.3600066440243377, |
|
"learning_rate": 7.89320730004274e-06, |
|
"loss": 1.4606, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.6142034548944337, |
|
"grad_norm": 2.2541559813958467, |
|
"learning_rate": 7.784127666988774e-06, |
|
"loss": 1.4048, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.617402431222009, |
|
"grad_norm": 2.4458519460871693, |
|
"learning_rate": 7.675324606648187e-06, |
|
"loss": 1.4341, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.6206014075495841, |
|
"grad_norm": 2.4908912138238044, |
|
"learning_rate": 7.566811699206604e-06, |
|
"loss": 1.3651, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6238003838771593, |
|
"grad_norm": 2.4994047187151875, |
|
"learning_rate": 7.458602488634416e-06, |
|
"loss": 1.4161, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6269993602047345, |
|
"grad_norm": 2.6056798824189706, |
|
"learning_rate": 7.350710480996266e-06, |
|
"loss": 1.4776, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6301983365323096, |
|
"grad_norm": 2.2341977902816996, |
|
"learning_rate": 7.243149142765317e-06, |
|
"loss": 1.4339, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.6333973128598849, |
|
"grad_norm": 2.4523191182695783, |
|
"learning_rate": 7.135931899142438e-06, |
|
"loss": 1.3499, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.63659628918746, |
|
"grad_norm": 3.348154064863444, |
|
"learning_rate": 7.029072132380543e-06, |
|
"loss": 1.3616, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6397952655150352, |
|
"grad_norm": 2.6387667521173874, |
|
"learning_rate": 6.922583180114299e-06, |
|
"loss": 1.4326, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6429942418426103, |
|
"grad_norm": 2.5816480070087633, |
|
"learning_rate": 6.816478333695392e-06, |
|
"loss": 1.4003, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6461932181701855, |
|
"grad_norm": 2.460254187908023, |
|
"learning_rate": 6.710770836533584e-06, |
|
"loss": 1.4448, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6493921944977608, |
|
"grad_norm": 2.308012258918398, |
|
"learning_rate": 6.605473882443725e-06, |
|
"loss": 1.4194, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6525911708253359, |
|
"grad_norm": 2.2607156095613195, |
|
"learning_rate": 6.5006006139990115e-06, |
|
"loss": 1.3956, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6557901471529111, |
|
"grad_norm": 2.578914066440304, |
|
"learning_rate": 6.396164120890562e-06, |
|
"loss": 1.4555, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6589891234804862, |
|
"grad_norm": 2.3468191923371746, |
|
"learning_rate": 6.292177438293665e-06, |
|
"loss": 1.373, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6621880998080614, |
|
"grad_norm": 2.3299643839647395, |
|
"learning_rate": 6.188653545240793e-06, |
|
"loss": 1.4025, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6653870761356366, |
|
"grad_norm": 2.5769450903031017, |
|
"learning_rate": 6.085605363001628e-06, |
|
"loss": 1.3718, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6685860524632118, |
|
"grad_norm": 2.5207006902093894, |
|
"learning_rate": 5.983045753470308e-06, |
|
"loss": 1.3983, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6717850287907869, |
|
"grad_norm": 2.2867546268489725, |
|
"learning_rate": 5.880987517560075e-06, |
|
"loss": 1.4102, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6749840051183621, |
|
"grad_norm": 2.5843293143331936, |
|
"learning_rate": 5.779443393605536e-06, |
|
"loss": 1.3837, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6781829814459372, |
|
"grad_norm": 2.466294876267873, |
|
"learning_rate": 5.678426055772746e-06, |
|
"loss": 1.4049, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6813819577735125, |
|
"grad_norm": 2.3003489185145205, |
|
"learning_rate": 5.577948112477271e-06, |
|
"loss": 1.37, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6845809341010877, |
|
"grad_norm": 2.4936655314476495, |
|
"learning_rate": 5.478022104810483e-06, |
|
"loss": 1.4096, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6877799104286628, |
|
"grad_norm": 2.47082758044549, |
|
"learning_rate": 5.378660504974271e-06, |
|
"loss": 1.3426, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.690978886756238, |
|
"grad_norm": 2.3960847639394673, |
|
"learning_rate": 5.2798757147242986e-06, |
|
"loss": 1.3714, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6941778630838131, |
|
"grad_norm": 2.4530021700307993, |
|
"learning_rate": 5.1816800638221176e-06, |
|
"loss": 1.414, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6973768394113884, |
|
"grad_norm": 2.2603715033506453, |
|
"learning_rate": 5.084085808496211e-06, |
|
"loss": 1.3491, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.7005758157389635, |
|
"grad_norm": 2.608868595746518, |
|
"learning_rate": 4.987105129912252e-06, |
|
"loss": 1.3972, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7037747920665387, |
|
"grad_norm": 2.297568946845784, |
|
"learning_rate": 4.890750132652724e-06, |
|
"loss": 1.4044, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7069737683941139, |
|
"grad_norm": 2.90064685740098, |
|
"learning_rate": 4.795032843206091e-06, |
|
"loss": 1.4374, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.710172744721689, |
|
"grad_norm": 2.443681227970343, |
|
"learning_rate": 4.699965208465702e-06, |
|
"loss": 1.4167, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7133717210492643, |
|
"grad_norm": 2.425095972610202, |
|
"learning_rate": 4.605559094238681e-06, |
|
"loss": 1.4242, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.7165706973768394, |
|
"grad_norm": 2.488579755452615, |
|
"learning_rate": 4.511826283764872e-06, |
|
"loss": 1.3888, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.7197696737044146, |
|
"grad_norm": 2.550407448039906, |
|
"learning_rate": 4.418778476246132e-06, |
|
"loss": 1.4245, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7229686500319897, |
|
"grad_norm": 2.3572217906367974, |
|
"learning_rate": 4.326427285386118e-06, |
|
"loss": 1.3536, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7261676263595649, |
|
"grad_norm": 2.898475510497588, |
|
"learning_rate": 4.234784237940705e-06, |
|
"loss": 1.3971, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7293666026871402, |
|
"grad_norm": 2.5246296926850937, |
|
"learning_rate": 4.143860772279287e-06, |
|
"loss": 1.3661, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7325655790147153, |
|
"grad_norm": 2.453168323628695, |
|
"learning_rate": 4.053668236957135e-06, |
|
"loss": 1.3489, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.7357645553422905, |
|
"grad_norm": 2.648255030215048, |
|
"learning_rate": 3.964217889298882e-06, |
|
"loss": 1.3665, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7389635316698656, |
|
"grad_norm": 2.4691460031589014, |
|
"learning_rate": 3.875520893993482e-06, |
|
"loss": 1.3954, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7421625079974408, |
|
"grad_norm": 2.578218572991363, |
|
"learning_rate": 3.787588321700697e-06, |
|
"loss": 1.3518, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.745361484325016, |
|
"grad_norm": 2.2329429977704245, |
|
"learning_rate": 3.7004311476692977e-06, |
|
"loss": 1.4178, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7485604606525912, |
|
"grad_norm": 2.352446489946909, |
|
"learning_rate": 3.6140602503672217e-06, |
|
"loss": 1.3538, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7517594369801663, |
|
"grad_norm": 2.7561385282190076, |
|
"learning_rate": 3.528486410123758e-06, |
|
"loss": 1.4613, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7549584133077415, |
|
"grad_norm": 2.138602055751757, |
|
"learning_rate": 3.443720307784022e-06, |
|
"loss": 1.3133, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7581573896353166, |
|
"grad_norm": 2.3177590399711643, |
|
"learning_rate": 3.359772523375837e-06, |
|
"loss": 1.297, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7613563659628919, |
|
"grad_norm": 2.3451441921767064, |
|
"learning_rate": 3.276653534789185e-06, |
|
"loss": 1.2972, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7645553422904671, |
|
"grad_norm": 2.313314457358321, |
|
"learning_rate": 3.1943737164684094e-06, |
|
"loss": 1.367, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7677543186180422, |
|
"grad_norm": 2.3536589940255364, |
|
"learning_rate": 3.11294333811736e-06, |
|
"loss": 1.3865, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7709532949456174, |
|
"grad_norm": 2.359964164926833, |
|
"learning_rate": 3.032372563417556e-06, |
|
"loss": 1.3596, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7741522712731925, |
|
"grad_norm": 2.2343745022509185, |
|
"learning_rate": 2.952671448759631e-06, |
|
"loss": 1.4056, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7773512476007678, |
|
"grad_norm": 2.5543445735600305, |
|
"learning_rate": 2.8738499419881517e-06, |
|
"loss": 1.3209, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.780550223928343, |
|
"grad_norm": 2.2930909232447445, |
|
"learning_rate": 2.795917881159973e-06, |
|
"loss": 1.3144, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7837492002559181, |
|
"grad_norm": 2.455120126910525, |
|
"learning_rate": 2.718884993316311e-06, |
|
"loss": 1.3333, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7869481765834933, |
|
"grad_norm": 2.4966852667217934, |
|
"learning_rate": 2.642760893268684e-06, |
|
"loss": 1.3509, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7901471529110684, |
|
"grad_norm": 2.4028166421914867, |
|
"learning_rate": 2.5675550823988184e-06, |
|
"loss": 1.3491, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7933461292386437, |
|
"grad_norm": 2.6710950106791205, |
|
"learning_rate": 2.493276947472756e-06, |
|
"loss": 1.3498, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7965451055662188, |
|
"grad_norm": 2.426831641425111, |
|
"learning_rate": 2.4199357594692564e-06, |
|
"loss": 1.2967, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.799744081893794, |
|
"grad_norm": 2.4719107581073527, |
|
"learning_rate": 2.347540672422627e-06, |
|
"loss": 1.3236, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8029430582213691, |
|
"grad_norm": 2.516296634331389, |
|
"learning_rate": 2.2761007222801866e-06, |
|
"loss": 1.3533, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.8061420345489443, |
|
"grad_norm": 2.332883844489818, |
|
"learning_rate": 2.2056248257744383e-06, |
|
"loss": 1.3402, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8093410108765196, |
|
"grad_norm": 2.470627298913124, |
|
"learning_rate": 2.136121779310132e-06, |
|
"loss": 1.4162, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.8125399872040947, |
|
"grad_norm": 2.5704350069752833, |
|
"learning_rate": 2.067600257866358e-06, |
|
"loss": 1.3798, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.8157389635316699, |
|
"grad_norm": 2.3710654190200855, |
|
"learning_rate": 2.000068813913777e-06, |
|
"loss": 1.2724, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.818937939859245, |
|
"grad_norm": 2.558360997816421, |
|
"learning_rate": 1.933535876347141e-06, |
|
"loss": 1.3785, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8221369161868202, |
|
"grad_norm": 2.5100998715469354, |
|
"learning_rate": 1.8680097494332682e-06, |
|
"loss": 1.2916, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.8253358925143954, |
|
"grad_norm": 2.6532733728905207, |
|
"learning_rate": 1.8034986117745245e-06, |
|
"loss": 1.3417, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8285348688419706, |
|
"grad_norm": 2.3261659285032663, |
|
"learning_rate": 1.7400105152880331e-06, |
|
"loss": 1.3738, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.8317338451695457, |
|
"grad_norm": 2.4853808407220725, |
|
"learning_rate": 1.6775533842006853e-06, |
|
"loss": 1.2736, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8349328214971209, |
|
"grad_norm": 2.697298725951212, |
|
"learning_rate": 1.6161350140600606e-06, |
|
"loss": 1.2781, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.838131797824696, |
|
"grad_norm": 2.355635659400595, |
|
"learning_rate": 1.555763070761448e-06, |
|
"loss": 1.361, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8413307741522713, |
|
"grad_norm": 2.6842436040456534, |
|
"learning_rate": 1.496445089591031e-06, |
|
"loss": 1.4244, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.8445297504798465, |
|
"grad_norm": 2.4556961691024055, |
|
"learning_rate": 1.4381884742853723e-06, |
|
"loss": 1.3541, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8477287268074216, |
|
"grad_norm": 2.444589907894017, |
|
"learning_rate": 1.381000496107313e-06, |
|
"loss": 1.3687, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8509277031349968, |
|
"grad_norm": 2.4233465965102803, |
|
"learning_rate": 1.3248882929384321e-06, |
|
"loss": 1.3331, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8541266794625719, |
|
"grad_norm": 2.824638190223306, |
|
"learning_rate": 1.2698588683881185e-06, |
|
"loss": 1.3307, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8573256557901472, |
|
"grad_norm": 2.38649028106469, |
|
"learning_rate": 1.215919090919434e-06, |
|
"loss": 1.3524, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8605246321177223, |
|
"grad_norm": 2.450733408516033, |
|
"learning_rate": 1.1630756929918218e-06, |
|
"loss": 1.347, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8637236084452975, |
|
"grad_norm": 2.4002381429681616, |
|
"learning_rate": 1.111335270220798e-06, |
|
"loss": 1.39, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8669225847728727, |
|
"grad_norm": 2.700669673337657, |
|
"learning_rate": 1.060704280554733e-06, |
|
"loss": 1.297, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.8701215611004478, |
|
"grad_norm": 2.6066489722882475, |
|
"learning_rate": 1.0111890434688009e-06, |
|
"loss": 1.3923, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8733205374280231, |
|
"grad_norm": 2.572737149493473, |
|
"learning_rate": 9.62795739176212e-07, |
|
"loss": 1.369, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8765195137555982, |
|
"grad_norm": 2.2233501199427397, |
|
"learning_rate": 9.155304078568495e-07, |
|
"loss": 1.3645, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8797184900831734, |
|
"grad_norm": 2.6489650845893786, |
|
"learning_rate": 8.693989489033494e-07, |
|
"loss": 1.3737, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8829174664107485, |
|
"grad_norm": 2.5371100403473608, |
|
"learning_rate": 8.244071201847826e-07, |
|
"loss": 1.3034, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8861164427383237, |
|
"grad_norm": 2.432385883291253, |
|
"learning_rate": 7.805605373279934e-07, |
|
"loss": 1.3882, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.889315419065899, |
|
"grad_norm": 2.4449203754420465, |
|
"learning_rate": 7.378646730166783e-07, |
|
"loss": 1.3167, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8925143953934741, |
|
"grad_norm": 2.441267548142986, |
|
"learning_rate": 6.963248563083203e-07, |
|
"loss": 1.3306, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8957133717210493, |
|
"grad_norm": 2.498359509259829, |
|
"learning_rate": 6.559462719690501e-07, |
|
"loss": 1.3204, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8989123480486244, |
|
"grad_norm": 2.4688012231508276, |
|
"learning_rate": 6.167339598265109e-07, |
|
"loss": 1.3334, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.9021113243761996, |
|
"grad_norm": 2.2093568500449785, |
|
"learning_rate": 5.78692814140811e-07, |
|
"loss": 1.3192, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9053103007037748, |
|
"grad_norm": 2.4077314191659234, |
|
"learning_rate": 5.418275829936537e-07, |
|
"loss": 1.2858, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.90850927703135, |
|
"grad_norm": 2.4379295125735045, |
|
"learning_rate": 5.06142867695708e-07, |
|
"loss": 1.3432, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.9117082533589251, |
|
"grad_norm": 2.4162225057600475, |
|
"learning_rate": 4.716431222122997e-07, |
|
"loss": 1.4049, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9149072296865003, |
|
"grad_norm": 2.383814444310597, |
|
"learning_rate": 4.3833265260749157e-07, |
|
"loss": 1.3355, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.9181062060140754, |
|
"grad_norm": 2.427977187387928, |
|
"learning_rate": 4.062156165066211e-07, |
|
"loss": 1.3702, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.9213051823416507, |
|
"grad_norm": 2.256695950593894, |
|
"learning_rate": 3.752960225773772e-07, |
|
"loss": 1.3701, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9245041586692259, |
|
"grad_norm": 2.447985505592198, |
|
"learning_rate": 3.4557773002945607e-07, |
|
"loss": 1.3774, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.927703134996801, |
|
"grad_norm": 2.403393034990299, |
|
"learning_rate": 3.170644481328711e-07, |
|
"loss": 1.348, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9309021113243762, |
|
"grad_norm": 2.213446206865036, |
|
"learning_rate": 2.8975973575499526e-07, |
|
"loss": 1.3011, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.9341010876519513, |
|
"grad_norm": 2.310065753213684, |
|
"learning_rate": 2.636670009163522e-07, |
|
"loss": 1.3022, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9373000639795266, |
|
"grad_norm": 2.3987119937006676, |
|
"learning_rate": 2.3878950036524963e-07, |
|
"loss": 1.2379, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.9404990403071017, |
|
"grad_norm": 2.2743696194136493, |
|
"learning_rate": 2.1513033917129334e-07, |
|
"loss": 1.2969, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9436980166346769, |
|
"grad_norm": 2.656197726258722, |
|
"learning_rate": 1.9269247033782744e-07, |
|
"loss": 1.2934, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.946896992962252, |
|
"grad_norm": 2.5089508490923897, |
|
"learning_rate": 1.7147869443335463e-07, |
|
"loss": 1.3208, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9500959692898272, |
|
"grad_norm": 2.742202995722091, |
|
"learning_rate": 1.5149165924199016e-07, |
|
"loss": 1.3615, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.9532949456174025, |
|
"grad_norm": 2.5187291446173066, |
|
"learning_rate": 1.3273385943297746e-07, |
|
"loss": 1.3065, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9564939219449776, |
|
"grad_norm": 2.604371951037358, |
|
"learning_rate": 1.1520763624931597e-07, |
|
"loss": 1.3141, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9596928982725528, |
|
"grad_norm": 2.6802125882941485, |
|
"learning_rate": 9.891517721554499e-08, |
|
"loss": 1.4077, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9628918746001279, |
|
"grad_norm": 2.706788189883898, |
|
"learning_rate": 8.385851586470318e-08, |
|
"loss": 1.3212, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9660908509277031, |
|
"grad_norm": 2.4793431373091352, |
|
"learning_rate": 7.003953148452036e-08, |
|
"loss": 1.3821, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9692898272552783, |
|
"grad_norm": 2.278314797678949, |
|
"learning_rate": 5.745994888285311e-08, |
|
"loss": 1.2863, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9724888035828535, |
|
"grad_norm": 2.559732108312943, |
|
"learning_rate": 4.612133817239905e-08, |
|
"loss": 1.3566, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9756877799104287, |
|
"grad_norm": 2.2885476469371833, |
|
"learning_rate": 3.602511457473479e-08, |
|
"loss": 1.3009, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9788867562380038, |
|
"grad_norm": 2.4890698617978617, |
|
"learning_rate": 2.7172538243666057e-08, |
|
"loss": 1.4347, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.982085732565579, |
|
"grad_norm": 2.45559913282601, |
|
"learning_rate": 1.9564714107945804e-08, |
|
"loss": 1.3811, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9852847088931542, |
|
"grad_norm": 2.4370971366390863, |
|
"learning_rate": 1.3202591733365577e-08, |
|
"loss": 1.3086, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9884836852207294, |
|
"grad_norm": 2.6654429719413097, |
|
"learning_rate": 8.086965204233688e-09, |
|
"loss": 1.3538, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9916826615483045, |
|
"grad_norm": 2.5063252266533773, |
|
"learning_rate": 4.218473024261149e-09, |
|
"loss": 1.3368, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9948816378758797, |
|
"grad_norm": 2.5804016285503906, |
|
"learning_rate": 1.5975980368709843e-09, |
|
"loss": 1.3791, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9980806142034548, |
|
"grad_norm": 2.5363071477182544, |
|
"learning_rate": 2.2466736492643416e-10, |
|
"loss": 1.275, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.3733937740325928, |
|
"eval_runtime": 17.2957, |
|
"eval_samples_per_second": 28.909, |
|
"eval_steps_per_second": 0.925, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1563, |
|
"total_flos": 45448595030016.0, |
|
"train_loss": 1.473227332512385, |
|
"train_runtime": 5771.4551, |
|
"train_samples_per_second": 8.663, |
|
"train_steps_per_second": 0.271 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1563, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 45448595030016.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|