|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8313847752663029, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 122.31757354736328, |
|
"learning_rate": 5e-06, |
|
"loss": 12.8799, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 52.391082763671875, |
|
"learning_rate": 1e-05, |
|
"loss": 12.6172, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 29.490076065063477, |
|
"learning_rate": 1.5e-05, |
|
"loss": 11.8088, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 31.53589630126953, |
|
"learning_rate": 2e-05, |
|
"loss": 10.9282, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 61.79519271850586, |
|
"learning_rate": 2.5e-05, |
|
"loss": 10.1323, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.654998779296875, |
|
"learning_rate": 3e-05, |
|
"loss": 9.4484, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.740698337554932, |
|
"learning_rate": 3.5e-05, |
|
"loss": 8.8873, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.908849716186523, |
|
"learning_rate": 4e-05, |
|
"loss": 8.3969, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.360279083251953, |
|
"learning_rate": 4.5e-05, |
|
"loss": 8.1293, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.529783248901367, |
|
"learning_rate": 5e-05, |
|
"loss": 7.9849, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.2711334228515625, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 7.9591, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.8138530254364014, |
|
"learning_rate": 6e-05, |
|
"loss": 7.8974, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.322045087814331, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 7.9111, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.2868151664733887, |
|
"learning_rate": 7e-05, |
|
"loss": 7.9229, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.699502468109131, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 7.9082, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.17010760307312, |
|
"learning_rate": 8e-05, |
|
"loss": 7.914, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.845041751861572, |
|
"learning_rate": 8.5e-05, |
|
"loss": 7.8946, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.585634708404541, |
|
"learning_rate": 9e-05, |
|
"loss": 7.8897, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.026464462280273, |
|
"learning_rate": 9.5e-05, |
|
"loss": 7.8652, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.074096202850342, |
|
"learning_rate": 0.0001, |
|
"loss": 7.8336, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.498573303222656, |
|
"learning_rate": 9.999972205865686e-05, |
|
"loss": 7.7889, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.521847724914551, |
|
"learning_rate": 9.999888823771751e-05, |
|
"loss": 7.7641, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 8.344034194946289, |
|
"learning_rate": 9.999749854645204e-05, |
|
"loss": 7.7462, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.066705703735352, |
|
"learning_rate": 9.99955530003106e-05, |
|
"loss": 7.6825, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.9992802143096924, |
|
"learning_rate": 9.99930516209231e-05, |
|
"loss": 7.6298, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 19.802461624145508, |
|
"learning_rate": 9.998999443609897e-05, |
|
"loss": 7.6001, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.808511734008789, |
|
"learning_rate": 9.998638147982696e-05, |
|
"loss": 7.505, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.9778096675872803, |
|
"learning_rate": 9.998221279227467e-05, |
|
"loss": 7.473, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.969865560531616, |
|
"learning_rate": 9.997748841978812e-05, |
|
"loss": 7.3771, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 6.17680549621582, |
|
"learning_rate": 9.997220841489122e-05, |
|
"loss": 7.3354, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.137447357177734, |
|
"learning_rate": 9.996637283628528e-05, |
|
"loss": 7.228, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.0998005867004395, |
|
"learning_rate": 9.995998174884821e-05, |
|
"loss": 7.183, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.1692376136779785, |
|
"learning_rate": 9.995303522363394e-05, |
|
"loss": 7.0672, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.8801262378692627, |
|
"learning_rate": 9.99455333378715e-05, |
|
"loss": 7.0342, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.4446799755096436, |
|
"learning_rate": 9.993747617496428e-05, |
|
"loss": 6.9368, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.6471118927001953, |
|
"learning_rate": 9.9928863824489e-05, |
|
"loss": 6.8468, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.5301289558410645, |
|
"learning_rate": 9.99196963821948e-05, |
|
"loss": 6.7799, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.7082278728485107, |
|
"learning_rate": 9.990997395000217e-05, |
|
"loss": 6.6638, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.3497087955474854, |
|
"learning_rate": 9.989969663600169e-05, |
|
"loss": 6.5953, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.278082370758057, |
|
"learning_rate": 9.9888864554453e-05, |
|
"loss": 6.606, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.66937518119812, |
|
"learning_rate": 9.987747782578342e-05, |
|
"loss": 6.5057, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.571780204772949, |
|
"learning_rate": 9.986553657658668e-05, |
|
"loss": 6.4385, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.088606834411621, |
|
"learning_rate": 9.985304093962145e-05, |
|
"loss": 6.3923, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.042826175689697, |
|
"learning_rate": 9.983999105380988e-05, |
|
"loss": 6.3688, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.568378925323486, |
|
"learning_rate": 9.982638706423608e-05, |
|
"loss": 6.2207, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.896998405456543, |
|
"learning_rate": 9.98122291221445e-05, |
|
"loss": 6.1763, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.744915723800659, |
|
"learning_rate": 9.979751738493826e-05, |
|
"loss": 6.2032, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.402007102966309, |
|
"learning_rate": 9.978225201617732e-05, |
|
"loss": 6.1301, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.604529857635498, |
|
"learning_rate": 9.976643318557678e-05, |
|
"loss": 6.1229, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.7452666759490967, |
|
"learning_rate": 9.975006106900495e-05, |
|
"loss": 6.0317, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.346206188201904, |
|
"learning_rate": 9.973313584848132e-05, |
|
"loss": 5.9815, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.731186628341675, |
|
"learning_rate": 9.971565771217464e-05, |
|
"loss": 5.9493, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.345651865005493, |
|
"learning_rate": 9.969762685440076e-05, |
|
"loss": 5.9823, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.3746602535247803, |
|
"learning_rate": 9.967904347562054e-05, |
|
"loss": 5.8495, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.518210411071777, |
|
"learning_rate": 9.965990778243755e-05, |
|
"loss": 5.9199, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.3639678955078125, |
|
"learning_rate": 9.964021998759577e-05, |
|
"loss": 5.8102, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.2886486053466797, |
|
"learning_rate": 9.961998030997733e-05, |
|
"loss": 5.816, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.6282382011413574, |
|
"learning_rate": 9.95991889745999e-05, |
|
"loss": 5.7934, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.633136749267578, |
|
"learning_rate": 9.957784621261441e-05, |
|
"loss": 5.7084, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.706786155700684, |
|
"learning_rate": 9.955595226130226e-05, |
|
"loss": 5.726, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.696900367736816, |
|
"learning_rate": 9.953350736407282e-05, |
|
"loss": 5.741, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.607839345932007, |
|
"learning_rate": 9.951051177046069e-05, |
|
"loss": 5.6393, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6848416328430176, |
|
"learning_rate": 9.948696573612292e-05, |
|
"loss": 5.6151, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.3749618530273438, |
|
"learning_rate": 9.946286952283618e-05, |
|
"loss": 5.5774, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.431562662124634, |
|
"learning_rate": 9.943822339849381e-05, |
|
"loss": 5.6212, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.472083806991577, |
|
"learning_rate": 9.941302763710288e-05, |
|
"loss": 5.578, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.792559862136841, |
|
"learning_rate": 9.938728251878116e-05, |
|
"loss": 5.5623, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.234732627868652, |
|
"learning_rate": 9.936098832975393e-05, |
|
"loss": 5.5339, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.907464981079102, |
|
"learning_rate": 9.933414536235091e-05, |
|
"loss": 5.4366, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.1314311027526855, |
|
"learning_rate": 9.93067539150029e-05, |
|
"loss": 5.4759, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.1746504306793213, |
|
"learning_rate": 9.927881429223853e-05, |
|
"loss": 5.4098, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.957454204559326, |
|
"learning_rate": 9.925032680468085e-05, |
|
"loss": 5.4251, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.583669900894165, |
|
"learning_rate": 9.922129176904388e-05, |
|
"loss": 5.4224, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.548809051513672, |
|
"learning_rate": 9.919170950812911e-05, |
|
"loss": 5.3477, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.560839653015137, |
|
"learning_rate": 9.916158035082184e-05, |
|
"loss": 5.3688, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.999515533447266, |
|
"learning_rate": 9.913090463208763e-05, |
|
"loss": 5.4106, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.346980571746826, |
|
"learning_rate": 9.90996826929685e-05, |
|
"loss": 5.3671, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.390699863433838, |
|
"learning_rate": 9.906791488057916e-05, |
|
"loss": 5.3009, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8730216026306152, |
|
"learning_rate": 9.903560154810313e-05, |
|
"loss": 5.2528, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8211958408355713, |
|
"learning_rate": 9.900274305478887e-05, |
|
"loss": 5.278, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.9018919467926025, |
|
"learning_rate": 9.896933976594572e-05, |
|
"loss": 5.2639, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.902015209197998, |
|
"learning_rate": 9.893539205293989e-05, |
|
"loss": 5.3053, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.365139961242676, |
|
"learning_rate": 9.890090029319028e-05, |
|
"loss": 5.2269, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.671627521514893, |
|
"learning_rate": 9.886586487016433e-05, |
|
"loss": 5.2006, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.9795711040496826, |
|
"learning_rate": 9.883028617337378e-05, |
|
"loss": 5.1443, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.8376240730285645, |
|
"learning_rate": 9.879416459837022e-05, |
|
"loss": 5.1789, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 74.21269226074219, |
|
"learning_rate": 9.875750054674082e-05, |
|
"loss": 5.164, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.394662618637085, |
|
"learning_rate": 9.872029442610382e-05, |
|
"loss": 5.229, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.8581583499908447, |
|
"learning_rate": 9.8682546650104e-05, |
|
"loss": 5.1748, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.873598098754883, |
|
"learning_rate": 9.864425763840802e-05, |
|
"loss": 5.0188, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.0684592723846436, |
|
"learning_rate": 9.860542781669988e-05, |
|
"loss": 5.0698, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.1893513202667236, |
|
"learning_rate": 9.85660576166761e-05, |
|
"loss": 5.0876, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.5682992935180664, |
|
"learning_rate": 9.852614747604093e-05, |
|
"loss": 5.012, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.143974781036377, |
|
"learning_rate": 9.848569783850145e-05, |
|
"loss": 5.0468, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.436988115310669, |
|
"learning_rate": 9.844470915376278e-05, |
|
"loss": 5.0128, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.3220009803771973, |
|
"learning_rate": 9.840318187752292e-05, |
|
"loss": 5.0599, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.984196901321411, |
|
"learning_rate": 9.836111647146771e-05, |
|
"loss": 4.9826, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.470804452896118, |
|
"learning_rate": 9.831851340326577e-05, |
|
"loss": 4.9622, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.7063026428222656, |
|
"learning_rate": 9.82753731465633e-05, |
|
"loss": 4.9792, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 10.18554401397705, |
|
"learning_rate": 9.823169618097871e-05, |
|
"loss": 4.8994, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.004983901977539, |
|
"learning_rate": 9.81874829920974e-05, |
|
"loss": 4.8866, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.266251564025879, |
|
"learning_rate": 9.814273407146623e-05, |
|
"loss": 4.8267, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.0071024894714355, |
|
"learning_rate": 9.809744991658829e-05, |
|
"loss": 4.8254, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.009719133377075, |
|
"learning_rate": 9.805163103091708e-05, |
|
"loss": 4.8355, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.7107043266296387, |
|
"learning_rate": 9.800527792385112e-05, |
|
"loss": 4.9058, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.9798905849456787, |
|
"learning_rate": 9.79583911107282e-05, |
|
"loss": 4.8755, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.1199405193328857, |
|
"learning_rate": 9.791097111281968e-05, |
|
"loss": 4.877, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.966737747192383, |
|
"learning_rate": 9.786301845732467e-05, |
|
"loss": 4.8143, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.3689117431640625, |
|
"learning_rate": 9.781453367736418e-05, |
|
"loss": 4.7737, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.9130005836486816, |
|
"learning_rate": 9.776551731197524e-05, |
|
"loss": 4.849, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.18276047706604, |
|
"learning_rate": 9.771596990610478e-05, |
|
"loss": 4.7412, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.2997982501983643, |
|
"learning_rate": 9.766589201060372e-05, |
|
"loss": 4.7399, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.285614490509033, |
|
"learning_rate": 9.761528418222077e-05, |
|
"loss": 4.7657, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.5080738067626953, |
|
"learning_rate": 9.756414698359624e-05, |
|
"loss": 4.764, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.743520736694336, |
|
"learning_rate": 9.75124809832558e-05, |
|
"loss": 4.642, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.485870838165283, |
|
"learning_rate": 9.746028675560413e-05, |
|
"loss": 4.7463, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.757502794265747, |
|
"learning_rate": 9.740756488091861e-05, |
|
"loss": 4.6997, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.4718565940856934, |
|
"learning_rate": 9.735431594534277e-05, |
|
"loss": 4.6539, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.387404680252075, |
|
"learning_rate": 9.730054054087983e-05, |
|
"loss": 4.5988, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.5060672760009766, |
|
"learning_rate": 9.724623926538612e-05, |
|
"loss": 4.6711, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.424964189529419, |
|
"learning_rate": 9.719141272256443e-05, |
|
"loss": 4.5616, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.4947853088378906, |
|
"learning_rate": 9.713606152195726e-05, |
|
"loss": 4.5994, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.7293379306793213, |
|
"learning_rate": 9.708018627894011e-05, |
|
"loss": 4.6421, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.5259528160095215, |
|
"learning_rate": 9.702378761471456e-05, |
|
"loss": 4.6012, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.21116828918457, |
|
"learning_rate": 9.696686615630146e-05, |
|
"loss": 4.6564, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.3022258281707764, |
|
"learning_rate": 9.690942253653385e-05, |
|
"loss": 4.6354, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.6259093284606934, |
|
"learning_rate": 9.685145739405002e-05, |
|
"loss": 4.4983, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.3440661430358887, |
|
"learning_rate": 9.679297137328634e-05, |
|
"loss": 4.5076, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.7991883754730225, |
|
"learning_rate": 9.673396512447013e-05, |
|
"loss": 4.5399, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.315129041671753, |
|
"learning_rate": 9.667443930361247e-05, |
|
"loss": 4.5895, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 13.325892448425293, |
|
"learning_rate": 9.661439457250076e-05, |
|
"loss": 4.522, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.741748332977295, |
|
"learning_rate": 9.655383159869158e-05, |
|
"loss": 4.4874, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 6.063040256500244, |
|
"learning_rate": 9.649275105550309e-05, |
|
"loss": 4.6068, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.7268357276916504, |
|
"learning_rate": 9.643115362200762e-05, |
|
"loss": 4.4608, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.668646812438965, |
|
"learning_rate": 9.636903998302409e-05, |
|
"loss": 4.4986, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.6662468910217285, |
|
"learning_rate": 9.630641082911045e-05, |
|
"loss": 4.4831, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.068556785583496, |
|
"learning_rate": 9.624326685655593e-05, |
|
"loss": 4.5347, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.145664930343628, |
|
"learning_rate": 9.617960876737337e-05, |
|
"loss": 4.3987, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.836777687072754, |
|
"learning_rate": 9.611543726929134e-05, |
|
"loss": 4.4232, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.4169726371765137, |
|
"learning_rate": 9.605075307574635e-05, |
|
"loss": 4.419, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.363471508026123, |
|
"learning_rate": 9.598555690587487e-05, |
|
"loss": 4.4505, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.95708429813385, |
|
"learning_rate": 9.591984948450532e-05, |
|
"loss": 4.4358, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 17.262592315673828, |
|
"learning_rate": 9.585363154215008e-05, |
|
"loss": 4.4204, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.599637269973755, |
|
"learning_rate": 9.578690381499728e-05, |
|
"loss": 4.4039, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.246088027954102, |
|
"learning_rate": 9.571966704490271e-05, |
|
"loss": 4.3817, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.036510944366455, |
|
"learning_rate": 9.565192197938148e-05, |
|
"loss": 4.3224, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.655491590499878, |
|
"learning_rate": 9.558366937159977e-05, |
|
"loss": 4.3792, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.5865752696990967, |
|
"learning_rate": 9.551490998036646e-05, |
|
"loss": 4.3374, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5395338535308838, |
|
"learning_rate": 9.544564457012463e-05, |
|
"loss": 4.3361, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.922009825706482, |
|
"learning_rate": 9.537587391094314e-05, |
|
"loss": 4.3745, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.555393934249878, |
|
"learning_rate": 9.5305598778508e-05, |
|
"loss": 4.3643, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.469945192337036, |
|
"learning_rate": 9.52348199541138e-05, |
|
"loss": 4.3793, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.8235751390457153, |
|
"learning_rate": 9.516353822465504e-05, |
|
"loss": 4.282, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.8429944515228271, |
|
"learning_rate": 9.509175438261726e-05, |
|
"loss": 4.3457, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.9383187294006348, |
|
"learning_rate": 9.501946922606838e-05, |
|
"loss": 4.3501, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.5326876640319824, |
|
"learning_rate": 9.494668355864973e-05, |
|
"loss": 4.3599, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.431361198425293, |
|
"learning_rate": 9.487339818956716e-05, |
|
"loss": 4.3081, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.223879098892212, |
|
"learning_rate": 9.479961393358203e-05, |
|
"loss": 4.2371, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4740023612976074, |
|
"learning_rate": 9.472533161100215e-05, |
|
"loss": 4.3332, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.1612377166748047, |
|
"learning_rate": 9.465055204767265e-05, |
|
"loss": 4.2704, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.9543275833129883, |
|
"learning_rate": 9.457527607496685e-05, |
|
"loss": 4.2932, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.7458062171936035, |
|
"learning_rate": 9.44995045297769e-05, |
|
"loss": 4.1922, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.9623520374298096, |
|
"learning_rate": 9.442323825450464e-05, |
|
"loss": 4.2933, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.298856735229492, |
|
"learning_rate": 9.43464780970521e-05, |
|
"loss": 4.1951, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.488687753677368, |
|
"learning_rate": 9.426922491081212e-05, |
|
"loss": 4.1681, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.890091896057129, |
|
"learning_rate": 9.419147955465888e-05, |
|
"loss": 4.2496, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.8966683149337769, |
|
"learning_rate": 9.411324289293832e-05, |
|
"loss": 4.2597, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.0964736938476562, |
|
"learning_rate": 9.403451579545859e-05, |
|
"loss": 4.2082, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.873693823814392, |
|
"learning_rate": 9.395529913748025e-05, |
|
"loss": 4.2124, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.7219654321670532, |
|
"learning_rate": 9.387559379970672e-05, |
|
"loss": 4.1612, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.8131452798843384, |
|
"learning_rate": 9.379540066827431e-05, |
|
"loss": 4.2244, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3947608470916748, |
|
"learning_rate": 9.371472063474248e-05, |
|
"loss": 4.1973, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.269024610519409, |
|
"learning_rate": 9.363355459608394e-05, |
|
"loss": 4.2322, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.323319435119629, |
|
"learning_rate": 9.355190345467457e-05, |
|
"loss": 4.2178, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.0089669227600098, |
|
"learning_rate": 9.346976811828352e-05, |
|
"loss": 4.1682, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.6600987911224365, |
|
"learning_rate": 9.338714950006297e-05, |
|
"loss": 4.1985, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.142987012863159, |
|
"learning_rate": 9.330404851853817e-05, |
|
"loss": 4.1924, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.8982244729995728, |
|
"learning_rate": 9.3220466097597e-05, |
|
"loss": 4.1807, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.6516717672348022, |
|
"learning_rate": 9.313640316647991e-05, |
|
"loss": 4.1747, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.066444158554077, |
|
"learning_rate": 9.305186065976945e-05, |
|
"loss": 4.0994, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.427220106124878, |
|
"learning_rate": 9.296683951737993e-05, |
|
"loss": 4.0989, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.4269256591796875, |
|
"learning_rate": 9.288134068454697e-05, |
|
"loss": 4.1823, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.578458070755005, |
|
"learning_rate": 9.2795365111817e-05, |
|
"loss": 4.1248, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.8514119386672974, |
|
"learning_rate": 9.270891375503665e-05, |
|
"loss": 4.2063, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.622624158859253, |
|
"learning_rate": 9.262198757534218e-05, |
|
"loss": 4.1254, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5997337102890015, |
|
"learning_rate": 9.253458753914874e-05, |
|
"loss": 4.0921, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.544672966003418, |
|
"learning_rate": 9.244671461813969e-05, |
|
"loss": 4.1679, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.013326406478882, |
|
"learning_rate": 9.235836978925572e-05, |
|
"loss": 4.0929, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.0170209407806396, |
|
"learning_rate": 9.226955403468406e-05, |
|
"loss": 4.2025, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.877813696861267, |
|
"learning_rate": 9.21802683418475e-05, |
|
"loss": 4.1673, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.38816499710083, |
|
"learning_rate": 9.209051370339347e-05, |
|
"loss": 4.0372, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.6840617656707764, |
|
"learning_rate": 9.200029111718295e-05, |
|
"loss": 4.0814, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.1929237842559814, |
|
"learning_rate": 9.190960158627941e-05, |
|
"loss": 4.1309, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.2283923625946045, |
|
"learning_rate": 9.181844611893766e-05, |
|
"loss": 4.1529, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.4596413373947144, |
|
"learning_rate": 9.172682572859261e-05, |
|
"loss": 4.1775, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.5317531824111938, |
|
"learning_rate": 9.163474143384806e-05, |
|
"loss": 3.9884, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.49446964263916, |
|
"learning_rate": 9.154219425846528e-05, |
|
"loss": 4.1084, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.7860472202301025, |
|
"learning_rate": 9.144918523135175e-05, |
|
"loss": 4.0325, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.552417516708374, |
|
"learning_rate": 9.13557153865496e-05, |
|
"loss": 4.0353, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 2.3973443508148193, |
|
"learning_rate": 9.12617857632242e-05, |
|
"loss": 4.064, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.8256909847259521, |
|
"learning_rate": 9.116739740565259e-05, |
|
"loss": 4.0357, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.6800718307495117, |
|
"learning_rate": 9.107255136321184e-05, |
|
"loss": 4.0421, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.8746205568313599, |
|
"learning_rate": 9.09772486903674e-05, |
|
"loss": 3.992, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4675569534301758, |
|
"learning_rate": 9.08814904466614e-05, |
|
"loss": 4.0656, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.790181040763855, |
|
"learning_rate": 9.078527769670085e-05, |
|
"loss": 3.9793, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.5290518999099731, |
|
"learning_rate": 9.068861151014575e-05, |
|
"loss": 3.9731, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.6314709186553955, |
|
"learning_rate": 9.05914929616973e-05, |
|
"loss": 4.001, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.6790775060653687, |
|
"learning_rate": 9.04939231310859e-05, |
|
"loss": 3.9733, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.706252336502075, |
|
"learning_rate": 9.039590310305914e-05, |
|
"loss": 3.9434, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.976989507675171, |
|
"learning_rate": 9.029743396736974e-05, |
|
"loss": 4.0118, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.6745636463165283, |
|
"learning_rate": 9.019851681876348e-05, |
|
"loss": 3.9574, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4342231750488281, |
|
"learning_rate": 9.009915275696693e-05, |
|
"loss": 3.977, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3511041402816772, |
|
"learning_rate": 8.999934288667534e-05, |
|
"loss": 4.0192, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2.1652772426605225, |
|
"learning_rate": 8.989908831754028e-05, |
|
"loss": 3.996, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3712204694747925, |
|
"learning_rate": 8.979839016415735e-05, |
|
"loss": 3.9809, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.658094048500061, |
|
"learning_rate": 8.969724954605373e-05, |
|
"loss": 3.9934, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5667743682861328, |
|
"learning_rate": 8.959566758767581e-05, |
|
"loss": 4.0291, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2648104429244995, |
|
"learning_rate": 8.949364541837661e-05, |
|
"loss": 4.007, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.144688606262207, |
|
"learning_rate": 8.939118417240329e-05, |
|
"loss": 3.9619, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.567773461341858, |
|
"learning_rate": 8.92882849888845e-05, |
|
"loss": 3.9128, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5329694747924805, |
|
"learning_rate": 8.918494901181773e-05, |
|
"loss": 3.8977, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.1690361499786377, |
|
"learning_rate": 8.908117739005659e-05, |
|
"loss": 4.0269, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.9766258001327515, |
|
"learning_rate": 8.897697127729805e-05, |
|
"loss": 4.0738, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 2.04693865776062, |
|
"learning_rate": 8.887233183206957e-05, |
|
"loss": 3.9546, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.5112673044204712, |
|
"learning_rate": 8.876726021771627e-05, |
|
"loss": 4.0082, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.8021695613861084, |
|
"learning_rate": 8.866175760238798e-05, |
|
"loss": 3.9489, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.141040563583374, |
|
"learning_rate": 8.855582515902625e-05, |
|
"loss": 3.9274, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.5247093439102173, |
|
"learning_rate": 8.844946406535131e-05, |
|
"loss": 3.9396, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.6850353479385376, |
|
"learning_rate": 8.834267550384893e-05, |
|
"loss": 3.9595, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.969480037689209, |
|
"learning_rate": 8.823546066175741e-05, |
|
"loss": 3.9362, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.3904099464416504, |
|
"learning_rate": 8.81278207310542e-05, |
|
"loss": 3.8443, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.679237961769104, |
|
"learning_rate": 8.801975690844278e-05, |
|
"loss": 3.9694, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.516084909439087, |
|
"learning_rate": 8.791127039533934e-05, |
|
"loss": 3.9411, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.3940407037734985, |
|
"learning_rate": 8.780236239785935e-05, |
|
"loss": 3.949, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.5583488941192627, |
|
"learning_rate": 8.76930341268042e-05, |
|
"loss": 3.9738, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.3650261163711548, |
|
"learning_rate": 8.758328679764776e-05, |
|
"loss": 3.9532, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.3828051090240479, |
|
"learning_rate": 8.747312163052284e-05, |
|
"loss": 3.8283, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.6516921520233154, |
|
"learning_rate": 8.736253985020761e-05, |
|
"loss": 3.8717, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.7323459386825562, |
|
"learning_rate": 8.725154268611203e-05, |
|
"loss": 3.8824, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.829392910003662, |
|
"learning_rate": 8.714013137226411e-05, |
|
"loss": 3.9052, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.3618247509002686, |
|
"learning_rate": 8.702830714729628e-05, |
|
"loss": 3.8535, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2153682708740234, |
|
"learning_rate": 8.691607125443153e-05, |
|
"loss": 3.88, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.5574041604995728, |
|
"learning_rate": 8.680342494146967e-05, |
|
"loss": 3.8169, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.6505224704742432, |
|
"learning_rate": 8.66903694607734e-05, |
|
"loss": 3.8834, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.343554139137268, |
|
"learning_rate": 8.65769060692544e-05, |
|
"loss": 3.8068, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5355747938156128, |
|
"learning_rate": 8.646303602835936e-05, |
|
"loss": 3.9175, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2311254739761353, |
|
"learning_rate": 8.634876060405597e-05, |
|
"loss": 3.82, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1889828443527222, |
|
"learning_rate": 8.623408106681884e-05, |
|
"loss": 3.8909, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.758866548538208, |
|
"learning_rate": 8.611899869161535e-05, |
|
"loss": 3.9627, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.4952363967895508, |
|
"learning_rate": 8.600351475789147e-05, |
|
"loss": 3.8229, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1656522750854492, |
|
"learning_rate": 8.588763054955764e-05, |
|
"loss": 3.8274, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.5959575176239014, |
|
"learning_rate": 8.57713473549743e-05, |
|
"loss": 3.7958, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.9823359251022339, |
|
"learning_rate": 8.565466646693778e-05, |
|
"loss": 3.9781, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.4883431196212769, |
|
"learning_rate": 8.553758918266578e-05, |
|
"loss": 3.7693, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.681728482246399, |
|
"learning_rate": 8.5420116803783e-05, |
|
"loss": 3.774, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.252820611000061, |
|
"learning_rate": 8.530225063630668e-05, |
|
"loss": 3.7305, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.5661219358444214, |
|
"learning_rate": 8.518399199063205e-05, |
|
"loss": 3.8674, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.3736493587493896, |
|
"learning_rate": 8.50653421815178e-05, |
|
"loss": 3.9066, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.458081603050232, |
|
"learning_rate": 8.494630252807138e-05, |
|
"loss": 3.6953, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.8416239023208618, |
|
"learning_rate": 8.482687435373449e-05, |
|
"loss": 3.8985, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2914624214172363, |
|
"learning_rate": 8.470705898626817e-05, |
|
"loss": 3.9091, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.4390232563018799, |
|
"learning_rate": 8.458685775773822e-05, |
|
"loss": 3.7945, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.513663411140442, |
|
"learning_rate": 8.446627200450025e-05, |
|
"loss": 3.8353, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.3679267168045044, |
|
"learning_rate": 8.434530306718493e-05, |
|
"loss": 3.8589, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.641858458518982, |
|
"learning_rate": 8.4223952290683e-05, |
|
"loss": 3.8278, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.3191187381744385, |
|
"learning_rate": 8.41022210241304e-05, |
|
"loss": 3.7688, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.3148295879364014, |
|
"learning_rate": 8.398011062089316e-05, |
|
"loss": 3.7961, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.6754995584487915, |
|
"learning_rate": 8.385762243855249e-05, |
|
"loss": 3.8242, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.521572232246399, |
|
"learning_rate": 8.373475783888958e-05, |
|
"loss": 3.805, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.6051355600357056, |
|
"learning_rate": 8.36115181878705e-05, |
|
"loss": 3.8183, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.645491123199463, |
|
"learning_rate": 8.348790485563101e-05, |
|
"loss": 3.8065, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2536430358886719, |
|
"learning_rate": 8.336391921646134e-05, |
|
"loss": 3.7664, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.691973328590393, |
|
"learning_rate": 8.323956264879089e-05, |
|
"loss": 3.7289, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4711138010025024, |
|
"learning_rate": 8.311483653517294e-05, |
|
"loss": 3.8468, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.3313995599746704, |
|
"learning_rate": 8.298974226226919e-05, |
|
"loss": 3.8141, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.632800579071045, |
|
"learning_rate": 8.28642812208345e-05, |
|
"loss": 3.7395, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1504483222961426, |
|
"learning_rate": 8.273845480570123e-05, |
|
"loss": 3.802, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0767090320587158, |
|
"learning_rate": 8.26122644157639e-05, |
|
"loss": 3.7864, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.284472107887268, |
|
"learning_rate": 8.248571145396362e-05, |
|
"loss": 3.6997, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.715078353881836, |
|
"learning_rate": 8.235879732727236e-05, |
|
"loss": 3.8206, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.6855794191360474, |
|
"learning_rate": 8.223152344667745e-05, |
|
"loss": 3.7517, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.481239914894104, |
|
"learning_rate": 8.21038912271658e-05, |
|
"loss": 3.7843, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2233805656433105, |
|
"learning_rate": 8.197590208770824e-05, |
|
"loss": 3.8117, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.6338953971862793, |
|
"learning_rate": 8.184755745124371e-05, |
|
"loss": 3.7475, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2792235612869263, |
|
"learning_rate": 8.171885874466342e-05, |
|
"loss": 3.7921, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2128022909164429, |
|
"learning_rate": 8.158980739879507e-05, |
|
"loss": 3.7265, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.4311732053756714, |
|
"learning_rate": 8.146040484838677e-05, |
|
"loss": 3.7395, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2157390117645264, |
|
"learning_rate": 8.133065253209132e-05, |
|
"loss": 3.6868, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.5604244470596313, |
|
"learning_rate": 8.120055189245e-05, |
|
"loss": 3.7265, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1982959508895874, |
|
"learning_rate": 8.10701043758767e-05, |
|
"loss": 3.7419, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1536376476287842, |
|
"learning_rate": 8.093931143264174e-05, |
|
"loss": 3.7326, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.265428900718689, |
|
"learning_rate": 8.080817451685576e-05, |
|
"loss": 3.7235, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.3499423265457153, |
|
"learning_rate": 8.067669508645356e-05, |
|
"loss": 3.7707, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.43877112865448, |
|
"learning_rate": 8.054487460317797e-05, |
|
"loss": 3.7049, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.4240456819534302, |
|
"learning_rate": 8.041271453256345e-05, |
|
"loss": 3.6702, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.505245566368103, |
|
"learning_rate": 8.02802163439199e-05, |
|
"loss": 3.7357, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.620792269706726, |
|
"learning_rate": 8.01473815103163e-05, |
|
"loss": 3.8004, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.3375896215438843, |
|
"learning_rate": 8.001421150856434e-05, |
|
"loss": 3.7365, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2824058532714844, |
|
"learning_rate": 7.988070781920197e-05, |
|
"loss": 3.7214, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2873374223709106, |
|
"learning_rate": 7.9746871926477e-05, |
|
"loss": 3.7474, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2541723251342773, |
|
"learning_rate": 7.961270531833052e-05, |
|
"loss": 3.6825, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.3854354619979858, |
|
"learning_rate": 7.947820948638045e-05, |
|
"loss": 3.7333, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0735524892807007, |
|
"learning_rate": 7.934338592590486e-05, |
|
"loss": 3.7878, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1718729734420776, |
|
"learning_rate": 7.92082361358254e-05, |
|
"loss": 3.7286, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.987315058708191, |
|
"learning_rate": 7.907276161869065e-05, |
|
"loss": 3.7756, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.5922112464904785, |
|
"learning_rate": 7.893696388065936e-05, |
|
"loss": 3.6469, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1863898038864136, |
|
"learning_rate": 7.88008444314838e-05, |
|
"loss": 3.72, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0921562910079956, |
|
"learning_rate": 7.866440478449283e-05, |
|
"loss": 3.824, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.4743787050247192, |
|
"learning_rate": 7.852764645657522e-05, |
|
"loss": 3.71, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2745715379714966, |
|
"learning_rate": 7.839057096816271e-05, |
|
"loss": 3.7467, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 5.968575954437256, |
|
"learning_rate": 7.82531798432131e-05, |
|
"loss": 3.7285, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.130025863647461, |
|
"learning_rate": 7.811547460919333e-05, |
|
"loss": 3.6629, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.5758984088897705, |
|
"learning_rate": 7.797745679706254e-05, |
|
"loss": 3.6272, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1425355672836304, |
|
"learning_rate": 7.783912794125496e-05, |
|
"loss": 3.6044, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.5422570705413818, |
|
"learning_rate": 7.770048957966291e-05, |
|
"loss": 3.7452, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.6101562976837158, |
|
"learning_rate": 7.756154325361967e-05, |
|
"loss": 3.7062, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1914243698120117, |
|
"learning_rate": 7.74222905078824e-05, |
|
"loss": 3.6931, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.2087386846542358, |
|
"learning_rate": 7.728273289061489e-05, |
|
"loss": 3.7102, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.6411712169647217, |
|
"learning_rate": 7.714287195337044e-05, |
|
"loss": 3.6645, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0920937061309814, |
|
"learning_rate": 7.700270925107448e-05, |
|
"loss": 3.7041, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.4154627323150635, |
|
"learning_rate": 7.686224634200742e-05, |
|
"loss": 3.7628, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9885333180427551, |
|
"learning_rate": 7.672148478778722e-05, |
|
"loss": 3.6455, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1912422180175781, |
|
"learning_rate": 7.658042615335212e-05, |
|
"loss": 3.6583, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.4246773719787598, |
|
"learning_rate": 7.643907200694318e-05, |
|
"loss": 3.7332, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1347945928573608, |
|
"learning_rate": 7.629742392008684e-05, |
|
"loss": 3.7321, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.3160018920898438, |
|
"learning_rate": 7.615548346757749e-05, |
|
"loss": 3.6345, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.3370493650436401, |
|
"learning_rate": 7.60132522274599e-05, |
|
"loss": 3.6661, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0385992527008057, |
|
"learning_rate": 7.587073178101178e-05, |
|
"loss": 3.6488, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0909576416015625, |
|
"learning_rate": 7.572792371272609e-05, |
|
"loss": 3.6406, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.4987009763717651, |
|
"learning_rate": 7.55848296102935e-05, |
|
"loss": 3.6228, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1608306169509888, |
|
"learning_rate": 7.544145106458465e-05, |
|
"loss": 3.7531, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.263434886932373, |
|
"learning_rate": 7.529778966963259e-05, |
|
"loss": 3.6643, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.130403995513916, |
|
"learning_rate": 7.515384702261496e-05, |
|
"loss": 3.6908, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.0340819358825684, |
|
"learning_rate": 7.500962472383627e-05, |
|
"loss": 3.7466, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.113906979560852, |
|
"learning_rate": 7.486512437671011e-05, |
|
"loss": 3.6592, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.6292065382003784, |
|
"learning_rate": 7.472034758774128e-05, |
|
"loss": 3.5562, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.4088952541351318, |
|
"learning_rate": 7.457529596650797e-05, |
|
"loss": 3.6639, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.2031278610229492, |
|
"learning_rate": 7.442997112564392e-05, |
|
"loss": 3.6144, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.3227585554122925, |
|
"learning_rate": 7.428437468082037e-05, |
|
"loss": 3.6286, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.252436876296997, |
|
"learning_rate": 7.413850825072817e-05, |
|
"loss": 3.646, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.394742727279663, |
|
"learning_rate": 7.39923734570598e-05, |
|
"loss": 3.5767, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2847375869750977, |
|
"learning_rate": 7.384597192449126e-05, |
|
"loss": 3.5754, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2518513202667236, |
|
"learning_rate": 7.369930528066412e-05, |
|
"loss": 3.634, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0519161224365234, |
|
"learning_rate": 7.355237515616732e-05, |
|
"loss": 3.6772, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.226135015487671, |
|
"learning_rate": 7.340518318451914e-05, |
|
"loss": 3.552, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1285977363586426, |
|
"learning_rate": 7.325773100214893e-05, |
|
"loss": 3.601, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1608659029006958, |
|
"learning_rate": 7.311002024837899e-05, |
|
"loss": 3.5961, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2595032453536987, |
|
"learning_rate": 7.296205256540633e-05, |
|
"loss": 3.6213, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1732516288757324, |
|
"learning_rate": 7.281382959828443e-05, |
|
"loss": 3.6298, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0036036968231201, |
|
"learning_rate": 7.26653529949049e-05, |
|
"loss": 3.6044, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2911688089370728, |
|
"learning_rate": 7.25166244059792e-05, |
|
"loss": 3.6088, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1788573265075684, |
|
"learning_rate": 7.236764548502029e-05, |
|
"loss": 3.6249, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.5241605043411255, |
|
"learning_rate": 7.221841788832421e-05, |
|
"loss": 3.5965, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.3550124168395996, |
|
"learning_rate": 7.206894327495173e-05, |
|
"loss": 3.5928, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.125823736190796, |
|
"learning_rate": 7.191922330670982e-05, |
|
"loss": 3.6486, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9978755116462708, |
|
"learning_rate": 7.176925964813326e-05, |
|
"loss": 3.5958, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.6410948038101196, |
|
"learning_rate": 7.161905396646607e-05, |
|
"loss": 3.5321, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1125872135162354, |
|
"learning_rate": 7.146860793164299e-05, |
|
"loss": 3.5681, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.2789889574050903, |
|
"learning_rate": 7.131792321627098e-05, |
|
"loss": 3.6238, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1455119848251343, |
|
"learning_rate": 7.116700149561048e-05, |
|
"loss": 3.5738, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9821574091911316, |
|
"learning_rate": 7.101584444755696e-05, |
|
"loss": 3.5523, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1989892721176147, |
|
"learning_rate": 7.086445375262212e-05, |
|
"loss": 3.7067, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1733030080795288, |
|
"learning_rate": 7.071283109391528e-05, |
|
"loss": 3.5572, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 2.4888556003570557, |
|
"learning_rate": 7.056097815712466e-05, |
|
"loss": 3.6334, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.2262009382247925, |
|
"learning_rate": 7.040889663049862e-05, |
|
"loss": 3.6345, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1262528896331787, |
|
"learning_rate": 7.025658820482693e-05, |
|
"loss": 3.673, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1414169073104858, |
|
"learning_rate": 7.010405457342192e-05, |
|
"loss": 3.5342, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1732423305511475, |
|
"learning_rate": 6.995129743209967e-05, |
|
"loss": 3.6113, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1435227394104004, |
|
"learning_rate": 6.97983184791612e-05, |
|
"loss": 3.5576, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1150481700897217, |
|
"learning_rate": 6.964511941537355e-05, |
|
"loss": 3.659, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.2644362449645996, |
|
"learning_rate": 6.949170194395083e-05, |
|
"loss": 3.5798, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1610716581344604, |
|
"learning_rate": 6.933806777053536e-05, |
|
"loss": 3.5271, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.3042726516723633, |
|
"learning_rate": 6.918421860317872e-05, |
|
"loss": 3.5707, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.9652230143547058, |
|
"learning_rate": 6.903015615232263e-05, |
|
"loss": 3.6355, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.2273874282836914, |
|
"learning_rate": 6.887588213078012e-05, |
|
"loss": 3.542, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.090998649597168, |
|
"learning_rate": 6.87213982537163e-05, |
|
"loss": 3.5175, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.203755259513855, |
|
"learning_rate": 6.856670623862943e-05, |
|
"loss": 3.515, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0707592964172363, |
|
"learning_rate": 6.841180780533179e-05, |
|
"loss": 3.5394, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0272190570831299, |
|
"learning_rate": 6.82567046759305e-05, |
|
"loss": 3.4902, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.9815902709960938, |
|
"learning_rate": 6.810139857480844e-05, |
|
"loss": 3.5563, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1668179035186768, |
|
"learning_rate": 6.794589122860509e-05, |
|
"loss": 3.5696, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.574946641921997, |
|
"learning_rate": 6.779018436619725e-05, |
|
"loss": 3.6011, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2445915937423706, |
|
"learning_rate": 6.763427971867992e-05, |
|
"loss": 3.5176, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.286269187927246, |
|
"learning_rate": 6.747817901934699e-05, |
|
"loss": 3.5828, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2477304935455322, |
|
"learning_rate": 6.732188400367197e-05, |
|
"loss": 3.6096, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.2229552268981934, |
|
"learning_rate": 6.716539640928871e-05, |
|
"loss": 3.6102, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.356566309928894, |
|
"learning_rate": 6.70087179759721e-05, |
|
"loss": 3.5343, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0600124597549438, |
|
"learning_rate": 6.685185044561874e-05, |
|
"loss": 3.5066, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.4194650650024414, |
|
"learning_rate": 6.669479556222747e-05, |
|
"loss": 3.5649, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.2837903499603271, |
|
"learning_rate": 6.653755507188013e-05, |
|
"loss": 3.5501, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.2244608402252197, |
|
"learning_rate": 6.638013072272205e-05, |
|
"loss": 3.4894, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.3000928163528442, |
|
"learning_rate": 6.622252426494259e-05, |
|
"loss": 3.5159, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.141238808631897, |
|
"learning_rate": 6.606473745075581e-05, |
|
"loss": 3.6376, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.2801133394241333, |
|
"learning_rate": 6.590677203438084e-05, |
|
"loss": 3.5289, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.1038589477539062, |
|
"learning_rate": 6.574862977202252e-05, |
|
"loss": 3.511, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.3234045505523682, |
|
"learning_rate": 6.559031242185174e-05, |
|
"loss": 3.5653, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.2162070274353027, |
|
"learning_rate": 6.543182174398597e-05, |
|
"loss": 3.5762, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0354394912719727, |
|
"learning_rate": 6.52731595004697e-05, |
|
"loss": 3.606, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.1612788438796997, |
|
"learning_rate": 6.51143274552548e-05, |
|
"loss": 3.5454, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1168513298034668, |
|
"learning_rate": 6.495532737418098e-05, |
|
"loss": 3.6233, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1265037059783936, |
|
"learning_rate": 6.479616102495605e-05, |
|
"loss": 3.4706, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1584540605545044, |
|
"learning_rate": 6.463683017713638e-05, |
|
"loss": 3.4945, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9387543201446533, |
|
"learning_rate": 6.447733660210715e-05, |
|
"loss": 3.5447, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.6024271249771118, |
|
"learning_rate": 6.431768207306272e-05, |
|
"loss": 3.519, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9260468482971191, |
|
"learning_rate": 6.415786836498684e-05, |
|
"loss": 3.5743, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0614194869995117, |
|
"learning_rate": 6.399789725463298e-05, |
|
"loss": 3.5214, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.9857592582702637, |
|
"learning_rate": 6.383777052050458e-05, |
|
"loss": 3.5064, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1745067834854126, |
|
"learning_rate": 6.367748994283518e-05, |
|
"loss": 3.4946, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0638951063156128, |
|
"learning_rate": 6.351705730356877e-05, |
|
"loss": 3.5524, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.14460289478302, |
|
"learning_rate": 6.335647438633987e-05, |
|
"loss": 3.4517, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.394174575805664, |
|
"learning_rate": 6.319574297645374e-05, |
|
"loss": 3.4724, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0267730951309204, |
|
"learning_rate": 6.303486486086654e-05, |
|
"loss": 3.515, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.169547438621521, |
|
"learning_rate": 6.287384182816546e-05, |
|
"loss": 3.4679, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9727531671524048, |
|
"learning_rate": 6.271267566854883e-05, |
|
"loss": 3.4599, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0682404041290283, |
|
"learning_rate": 6.255136817380618e-05, |
|
"loss": 3.4934, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0919233560562134, |
|
"learning_rate": 6.23899211372984e-05, |
|
"loss": 3.5548, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0446982383728027, |
|
"learning_rate": 6.222833635393772e-05, |
|
"loss": 3.4958, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0501346588134766, |
|
"learning_rate": 6.206661562016782e-05, |
|
"loss": 3.5291, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0181680917739868, |
|
"learning_rate": 6.190476073394382e-05, |
|
"loss": 3.435, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.117964506149292, |
|
"learning_rate": 6.17427734947123e-05, |
|
"loss": 3.5824, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0905768871307373, |
|
"learning_rate": 6.158065570339127e-05, |
|
"loss": 3.4931, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.1306370496749878, |
|
"learning_rate": 6.141840916235021e-05, |
|
"loss": 3.503, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.5290015935897827, |
|
"learning_rate": 6.125603567539001e-05, |
|
"loss": 3.5664, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.1953859329223633, |
|
"learning_rate": 6.109353704772284e-05, |
|
"loss": 3.5314, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0872337818145752, |
|
"learning_rate": 6.0930915085952164e-05, |
|
"loss": 3.5423, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.055855393409729, |
|
"learning_rate": 6.076817159805267e-05, |
|
"loss": 3.5259, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.106011986732483, |
|
"learning_rate": 6.06053083933501e-05, |
|
"loss": 3.393, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0738811492919922, |
|
"learning_rate": 6.044232728250116e-05, |
|
"loss": 3.4602, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1911791563034058, |
|
"learning_rate": 6.027923007747339e-05, |
|
"loss": 3.44, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1135449409484863, |
|
"learning_rate": 6.011601859152506e-05, |
|
"loss": 3.4929, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9973747134208679, |
|
"learning_rate": 5.995269463918495e-05, |
|
"loss": 3.6224, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9805868268013, |
|
"learning_rate": 5.97892600362322e-05, |
|
"loss": 3.4072, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9871159195899963, |
|
"learning_rate": 5.962571659967614e-05, |
|
"loss": 3.4855, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9133853316307068, |
|
"learning_rate": 5.946206614773606e-05, |
|
"loss": 3.5552, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.06731379032135, |
|
"learning_rate": 5.929831049982103e-05, |
|
"loss": 3.4467, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9730843901634216, |
|
"learning_rate": 5.9134451476509633e-05, |
|
"loss": 3.5087, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1040936708450317, |
|
"learning_rate": 5.897049089952974e-05, |
|
"loss": 3.5024, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1260329484939575, |
|
"learning_rate": 5.880643059173826e-05, |
|
"loss": 3.4753, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0565217733383179, |
|
"learning_rate": 5.864227237710093e-05, |
|
"loss": 3.5087, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.034422755241394, |
|
"learning_rate": 5.847801808067189e-05, |
|
"loss": 3.4757, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0072028636932373, |
|
"learning_rate": 5.831366952857357e-05, |
|
"loss": 3.431, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0306864976882935, |
|
"learning_rate": 5.814922854797622e-05, |
|
"loss": 3.5302, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.169833779335022, |
|
"learning_rate": 5.798469696707775e-05, |
|
"loss": 3.425, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9371284246444702, |
|
"learning_rate": 5.782007661508331e-05, |
|
"loss": 3.4402, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.21083402633667, |
|
"learning_rate": 5.765536932218495e-05, |
|
"loss": 3.4139, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.1241546869277954, |
|
"learning_rate": 5.7490576919541315e-05, |
|
"loss": 3.4487, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 1.0151013135910034, |
|
"learning_rate": 5.732570123925729e-05, |
|
"loss": 3.5626, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0404661893844604, |
|
"learning_rate": 5.7160744114363593e-05, |
|
"loss": 3.357, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0313767194747925, |
|
"learning_rate": 5.699570737879641e-05, |
|
"loss": 3.4451, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9385614395141602, |
|
"learning_rate": 5.683059286737702e-05, |
|
"loss": 3.4489, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.2350726127624512, |
|
"learning_rate": 5.666540241579139e-05, |
|
"loss": 3.5222, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.2488938570022583, |
|
"learning_rate": 5.6500137860569766e-05, |
|
"loss": 3.474, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9713031649589539, |
|
"learning_rate": 5.633480103906624e-05, |
|
"loss": 3.4048, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0884007215499878, |
|
"learning_rate": 5.616939378943834e-05, |
|
"loss": 3.4551, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0945968627929688, |
|
"learning_rate": 5.6003917950626595e-05, |
|
"loss": 3.4321, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.3760701417922974, |
|
"learning_rate": 5.583837536233407e-05, |
|
"loss": 3.4909, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0719841718673706, |
|
"learning_rate": 5.567276786500596e-05, |
|
"loss": 3.4773, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0727452039718628, |
|
"learning_rate": 5.5507097299809054e-05, |
|
"loss": 3.4759, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.101324439048767, |
|
"learning_rate": 5.534136550861133e-05, |
|
"loss": 3.378, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.2684340476989746, |
|
"learning_rate": 5.5175574333961465e-05, |
|
"loss": 3.3992, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.119226098060608, |
|
"learning_rate": 5.500972561906832e-05, |
|
"loss": 3.5095, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.1630808115005493, |
|
"learning_rate": 5.484382120778048e-05, |
|
"loss": 3.4643, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0694406032562256, |
|
"learning_rate": 5.467786294456575e-05, |
|
"loss": 3.4609, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0865049362182617, |
|
"learning_rate": 5.451185267449061e-05, |
|
"loss": 3.462, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9963206052780151, |
|
"learning_rate": 5.43457922431998e-05, |
|
"loss": 3.4361, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.2405842542648315, |
|
"learning_rate": 5.417968349689566e-05, |
|
"loss": 3.3828, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.3211381435394287, |
|
"learning_rate": 5.401352828231772e-05, |
|
"loss": 3.4652, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0762677192687988, |
|
"learning_rate": 5.384732844672211e-05, |
|
"loss": 3.4598, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0378797054290771, |
|
"learning_rate": 5.368108583786107e-05, |
|
"loss": 3.4911, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.2162408828735352, |
|
"learning_rate": 5.3514802303962344e-05, |
|
"loss": 3.457, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.988157331943512, |
|
"learning_rate": 5.334847969370868e-05, |
|
"loss": 3.4645, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0698318481445312, |
|
"learning_rate": 5.3182119856217284e-05, |
|
"loss": 3.4249, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9144718050956726, |
|
"learning_rate": 5.3015724641019214e-05, |
|
"loss": 3.4205, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.0714385509490967, |
|
"learning_rate": 5.284929589803884e-05, |
|
"loss": 3.3969, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9795346856117249, |
|
"learning_rate": 5.2682835477573336e-05, |
|
"loss": 3.3832, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9754644632339478, |
|
"learning_rate": 5.2516345230271965e-05, |
|
"loss": 3.3691, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.000421404838562, |
|
"learning_rate": 5.234982700711569e-05, |
|
"loss": 3.4015, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9788889288902283, |
|
"learning_rate": 5.218328265939643e-05, |
|
"loss": 3.4135, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.037010669708252, |
|
"learning_rate": 5.201671403869657e-05, |
|
"loss": 3.4492, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8524897694587708, |
|
"learning_rate": 5.1850122996868366e-05, |
|
"loss": 3.5092, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.112398624420166, |
|
"learning_rate": 5.168351138601334e-05, |
|
"loss": 3.3871, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1374262571334839, |
|
"learning_rate": 5.1516881058461675e-05, |
|
"loss": 3.5498, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0454888343811035, |
|
"learning_rate": 5.135023386675166e-05, |
|
"loss": 3.3942, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.1183348894119263, |
|
"learning_rate": 5.118357166360906e-05, |
|
"loss": 3.3447, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9044198989868164, |
|
"learning_rate": 5.101689630192655e-05, |
|
"loss": 3.4698, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0566176176071167, |
|
"learning_rate": 5.085020963474307e-05, |
|
"loss": 3.397, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0596601963043213, |
|
"learning_rate": 5.068351351522329e-05, |
|
"loss": 3.4018, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.033179521560669, |
|
"learning_rate": 5.0516809796636935e-05, |
|
"loss": 3.3199, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9158328771591187, |
|
"learning_rate": 5.035010033233821e-05, |
|
"loss": 3.3591, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9977751970291138, |
|
"learning_rate": 5.018338697574523e-05, |
|
"loss": 3.4262, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0123366117477417, |
|
"learning_rate": 5.0016671580319354e-05, |
|
"loss": 3.481, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.0733108520507812, |
|
"learning_rate": 4.984995599954461e-05, |
|
"loss": 3.4421, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9767444133758545, |
|
"learning_rate": 4.968324208690712e-05, |
|
"loss": 3.3568, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.9529309272766113, |
|
"learning_rate": 4.951653169587441e-05, |
|
"loss": 3.3881, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.082856297492981, |
|
"learning_rate": 4.93498266798749e-05, |
|
"loss": 3.4472, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0122766494750977, |
|
"learning_rate": 4.918312889227722e-05, |
|
"loss": 3.3907, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.982729971408844, |
|
"learning_rate": 4.901644018636966e-05, |
|
"loss": 3.3404, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9235652089118958, |
|
"learning_rate": 4.8849762415339526e-05, |
|
"loss": 3.4075, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9878057241439819, |
|
"learning_rate": 4.868309743225256e-05, |
|
"loss": 3.3355, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.1668199300765991, |
|
"learning_rate": 4.851644709003233e-05, |
|
"loss": 3.4238, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0045877695083618, |
|
"learning_rate": 4.834981324143964e-05, |
|
"loss": 3.352, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9545599818229675, |
|
"learning_rate": 4.818319773905191e-05, |
|
"loss": 3.5209, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8998648524284363, |
|
"learning_rate": 4.801660243524261e-05, |
|
"loss": 3.4146, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.0703359842300415, |
|
"learning_rate": 4.7850029182160626e-05, |
|
"loss": 3.4293, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.9540716409683228, |
|
"learning_rate": 4.768347983170973e-05, |
|
"loss": 3.4172, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.002467393875122, |
|
"learning_rate": 4.7516956235527884e-05, |
|
"loss": 3.3264, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9552346467971802, |
|
"learning_rate": 4.735046024496682e-05, |
|
"loss": 3.3276, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9977598786354065, |
|
"learning_rate": 4.7183993711071286e-05, |
|
"loss": 3.3882, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0754117965698242, |
|
"learning_rate": 4.7017558484558554e-05, |
|
"loss": 3.3855, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.1346033811569214, |
|
"learning_rate": 4.6851156415797844e-05, |
|
"loss": 3.336, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9791164994239807, |
|
"learning_rate": 4.6684789354789746e-05, |
|
"loss": 3.423, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0167591571807861, |
|
"learning_rate": 4.651845915114563e-05, |
|
"loss": 3.3428, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.0157413482666016, |
|
"learning_rate": 4.6352167654067095e-05, |
|
"loss": 3.4264, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9716314077377319, |
|
"learning_rate": 4.618591671232544e-05, |
|
"loss": 3.3867, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0501482486724854, |
|
"learning_rate": 4.601970817424106e-05, |
|
"loss": 3.3322, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.1115131378173828, |
|
"learning_rate": 4.585354388766292e-05, |
|
"loss": 3.376, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9755733609199524, |
|
"learning_rate": 4.568742569994802e-05, |
|
"loss": 3.3261, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0900994539260864, |
|
"learning_rate": 4.552135545794086e-05, |
|
"loss": 3.4289, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.934795618057251, |
|
"learning_rate": 4.535533500795288e-05, |
|
"loss": 3.3068, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.4602357149124146, |
|
"learning_rate": 4.5189366195741953e-05, |
|
"loss": 3.2843, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.00444495677948, |
|
"learning_rate": 4.502345086649186e-05, |
|
"loss": 3.4315, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0988986492156982, |
|
"learning_rate": 4.485759086479179e-05, |
|
"loss": 3.3491, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 1.0917633771896362, |
|
"learning_rate": 4.469178803461579e-05, |
|
"loss": 3.414, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.9472582340240479, |
|
"learning_rate": 4.4526044219302326e-05, |
|
"loss": 3.3974, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.026991367340088, |
|
"learning_rate": 4.4360361261533745e-05, |
|
"loss": 3.4343, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9805732369422913, |
|
"learning_rate": 4.419474100331579e-05, |
|
"loss": 3.3849, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.0283095836639404, |
|
"learning_rate": 4.402918528595715e-05, |
|
"loss": 3.3216, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.1360772848129272, |
|
"learning_rate": 4.386369595004896e-05, |
|
"loss": 3.3478, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9674432873725891, |
|
"learning_rate": 4.3698274835444354e-05, |
|
"loss": 3.3886, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9571623802185059, |
|
"learning_rate": 4.3532923781238e-05, |
|
"loss": 3.3687, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9667463302612305, |
|
"learning_rate": 4.336764462574566e-05, |
|
"loss": 3.4309, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.9910677075386047, |
|
"learning_rate": 4.320243920648376e-05, |
|
"loss": 3.4069, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.3989684581756592, |
|
"learning_rate": 4.303730936014894e-05, |
|
"loss": 3.3443, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.076253056526184, |
|
"learning_rate": 4.287225692259765e-05, |
|
"loss": 3.2979, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.2015970945358276, |
|
"learning_rate": 4.270728372882575e-05, |
|
"loss": 3.4052, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.1010020971298218, |
|
"learning_rate": 4.254239161294804e-05, |
|
"loss": 3.4477, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9749462604522705, |
|
"learning_rate": 4.237758240817802e-05, |
|
"loss": 3.3347, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9020056128501892, |
|
"learning_rate": 4.2212857946807336e-05, |
|
"loss": 3.4074, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.8927140831947327, |
|
"learning_rate": 4.2048220060185516e-05, |
|
"loss": 3.4199, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9286185503005981, |
|
"learning_rate": 4.188367057869957e-05, |
|
"loss": 3.4153, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9509190320968628, |
|
"learning_rate": 4.171921133175365e-05, |
|
"loss": 3.2441, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9190889000892639, |
|
"learning_rate": 4.155484414774872e-05, |
|
"loss": 3.3528, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.9416577816009521, |
|
"learning_rate": 4.139057085406221e-05, |
|
"loss": 3.3397, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9352203607559204, |
|
"learning_rate": 4.1226393277027726e-05, |
|
"loss": 3.3165, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0076632499694824, |
|
"learning_rate": 4.106231324191471e-05, |
|
"loss": 3.3903, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.970344603061676, |
|
"learning_rate": 4.089833257290817e-05, |
|
"loss": 3.3232, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9177244305610657, |
|
"learning_rate": 4.073445309308842e-05, |
|
"loss": 3.3932, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0965250730514526, |
|
"learning_rate": 4.0570676624410756e-05, |
|
"loss": 3.4276, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.0091582536697388, |
|
"learning_rate": 4.040700498768525e-05, |
|
"loss": 3.3576, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.025673508644104, |
|
"learning_rate": 4.024344000255648e-05, |
|
"loss": 3.4126, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.998465359210968, |
|
"learning_rate": 4.0079983487483313e-05, |
|
"loss": 3.3526, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9938681125640869, |
|
"learning_rate": 3.9916637259718683e-05, |
|
"loss": 3.3711, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 1.008997917175293, |
|
"learning_rate": 3.9753403135289396e-05, |
|
"loss": 3.3428, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9713385701179504, |
|
"learning_rate": 3.9590282928975914e-05, |
|
"loss": 3.3509, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9508611559867859, |
|
"learning_rate": 3.942727845429221e-05, |
|
"loss": 3.4893, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9679702520370483, |
|
"learning_rate": 3.926439152346558e-05, |
|
"loss": 3.324, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9336875081062317, |
|
"learning_rate": 3.910162394741653e-05, |
|
"loss": 3.3954, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9901192784309387, |
|
"learning_rate": 3.893897753573861e-05, |
|
"loss": 3.3056, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.20081627368927, |
|
"learning_rate": 3.877645409667829e-05, |
|
"loss": 3.3136, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9638660550117493, |
|
"learning_rate": 3.861405543711491e-05, |
|
"loss": 3.4024, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.9280533194541931, |
|
"learning_rate": 3.8451783362540507e-05, |
|
"loss": 3.3105, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 1.077518105506897, |
|
"learning_rate": 3.828963967703983e-05, |
|
"loss": 3.4049, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8907383680343628, |
|
"learning_rate": 3.8127626183270223e-05, |
|
"loss": 3.4606, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9571870565414429, |
|
"learning_rate": 3.796574468244161e-05, |
|
"loss": 3.3415, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8469297885894775, |
|
"learning_rate": 3.7803996974296444e-05, |
|
"loss": 3.3511, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.0232789516448975, |
|
"learning_rate": 3.7642384857089776e-05, |
|
"loss": 3.3848, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8824405670166016, |
|
"learning_rate": 3.748091012756915e-05, |
|
"loss": 3.3029, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9363697171211243, |
|
"learning_rate": 3.731957458095467e-05, |
|
"loss": 3.4415, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.9917386174201965, |
|
"learning_rate": 3.71583800109191e-05, |
|
"loss": 3.3475, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.919383704662323, |
|
"learning_rate": 3.699732820956784e-05, |
|
"loss": 3.3903, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8919963240623474, |
|
"learning_rate": 3.6836420967419057e-05, |
|
"loss": 3.4202, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8788687586784363, |
|
"learning_rate": 3.6675660073383745e-05, |
|
"loss": 3.3453, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.1343214511871338, |
|
"learning_rate": 3.6515047314745856e-05, |
|
"loss": 3.4452, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8609405755996704, |
|
"learning_rate": 3.6354584477142437e-05, |
|
"loss": 3.3719, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.921087384223938, |
|
"learning_rate": 3.6194273344543736e-05, |
|
"loss": 3.3104, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9409319162368774, |
|
"learning_rate": 3.6034115699233425e-05, |
|
"loss": 3.2578, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0189098119735718, |
|
"learning_rate": 3.5874113321788736e-05, |
|
"loss": 3.3444, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9592056274414062, |
|
"learning_rate": 3.571426799106071e-05, |
|
"loss": 3.3167, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0163627862930298, |
|
"learning_rate": 3.555458148415437e-05, |
|
"loss": 3.3269, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9534558057785034, |
|
"learning_rate": 3.539505557640901e-05, |
|
"loss": 3.3309, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.9127333164215088, |
|
"learning_rate": 3.523569204137843e-05, |
|
"loss": 3.3732, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.0162489414215088, |
|
"learning_rate": 3.5076492650811246e-05, |
|
"loss": 3.2918, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9052246809005737, |
|
"learning_rate": 3.491745917463113e-05, |
|
"loss": 3.3155, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.2741327285766602, |
|
"learning_rate": 3.475859338091721e-05, |
|
"loss": 3.3678, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9226068258285522, |
|
"learning_rate": 3.4599897035884374e-05, |
|
"loss": 3.3418, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9619899392127991, |
|
"learning_rate": 3.444137190386363e-05, |
|
"loss": 3.3496, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0131815671920776, |
|
"learning_rate": 3.4283019747282514e-05, |
|
"loss": 3.324, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9577188491821289, |
|
"learning_rate": 3.412484232664545e-05, |
|
"loss": 3.2871, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.0029125213623047, |
|
"learning_rate": 3.396684140051424e-05, |
|
"loss": 3.356, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9922611713409424, |
|
"learning_rate": 3.3809018725488466e-05, |
|
"loss": 3.2954, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.943458080291748, |
|
"learning_rate": 3.365137605618598e-05, |
|
"loss": 3.3505, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9286159873008728, |
|
"learning_rate": 3.3493915145223395e-05, |
|
"loss": 3.3266, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.8530454039573669, |
|
"learning_rate": 3.3336637743196584e-05, |
|
"loss": 3.2675, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9328925609588623, |
|
"learning_rate": 3.317954559866126e-05, |
|
"loss": 3.2409, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.931462287902832, |
|
"learning_rate": 3.302264045811344e-05, |
|
"loss": 3.3505, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0057929754257202, |
|
"learning_rate": 3.286592406597021e-05, |
|
"loss": 3.3282, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9094107747077942, |
|
"learning_rate": 3.270939816455012e-05, |
|
"loss": 3.29, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9364352822303772, |
|
"learning_rate": 3.255306449405395e-05, |
|
"loss": 3.3582, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9047064185142517, |
|
"learning_rate": 3.2396924792545304e-05, |
|
"loss": 3.4079, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.9266229271888733, |
|
"learning_rate": 3.224098079593132e-05, |
|
"loss": 3.2917, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 1.0187420845031738, |
|
"learning_rate": 3.2085234237943354e-05, |
|
"loss": 3.3806, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8054633140563965, |
|
"learning_rate": 3.19296868501177e-05, |
|
"loss": 3.2928, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8668712377548218, |
|
"learning_rate": 3.177434036177636e-05, |
|
"loss": 3.2372, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8695892095565796, |
|
"learning_rate": 3.1619196500007804e-05, |
|
"loss": 3.2712, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.868233323097229, |
|
"learning_rate": 3.146425698964776e-05, |
|
"loss": 3.3161, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9072175025939941, |
|
"learning_rate": 3.1309523553260046e-05, |
|
"loss": 3.3399, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8848998546600342, |
|
"learning_rate": 3.115499791111743e-05, |
|
"loss": 3.4425, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9443853497505188, |
|
"learning_rate": 3.10006817811825e-05, |
|
"loss": 3.3577, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.031072974205017, |
|
"learning_rate": 3.084657687908855e-05, |
|
"loss": 3.3258, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9901952147483826, |
|
"learning_rate": 3.069268491812052e-05, |
|
"loss": 3.2956, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.9264650940895081, |
|
"learning_rate": 3.0539007609195934e-05, |
|
"loss": 3.3224, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9035649299621582, |
|
"learning_rate": 3.0385546660845908e-05, |
|
"loss": 3.2863, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9281037449836731, |
|
"learning_rate": 3.0232303779196132e-05, |
|
"loss": 3.3212, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.096510648727417, |
|
"learning_rate": 3.0079280667947885e-05, |
|
"loss": 3.2815, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.8994583487510681, |
|
"learning_rate": 2.9926479028359132e-05, |
|
"loss": 3.408, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9495896100997925, |
|
"learning_rate": 2.97739005592256e-05, |
|
"loss": 3.2415, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.040583848953247, |
|
"learning_rate": 2.962154695686187e-05, |
|
"loss": 3.2955, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9189764261245728, |
|
"learning_rate": 2.9469419915082536e-05, |
|
"loss": 3.2703, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9766911864280701, |
|
"learning_rate": 2.9317521125183368e-05, |
|
"loss": 3.3358, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.9531775712966919, |
|
"learning_rate": 2.9165852275922524e-05, |
|
"loss": 3.3381, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0340322256088257, |
|
"learning_rate": 2.901441505350174e-05, |
|
"loss": 3.2312, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9244762063026428, |
|
"learning_rate": 2.886321114154762e-05, |
|
"loss": 3.3023, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0161535739898682, |
|
"learning_rate": 2.87122422210929e-05, |
|
"loss": 3.2473, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9165362119674683, |
|
"learning_rate": 2.8561509970557736e-05, |
|
"loss": 3.3181, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9139227867126465, |
|
"learning_rate": 2.8411016065731146e-05, |
|
"loss": 3.2694, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9805368185043335, |
|
"learning_rate": 2.826076217975222e-05, |
|
"loss": 3.2982, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9517117142677307, |
|
"learning_rate": 2.8110749983091632e-05, |
|
"loss": 3.3033, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.053392767906189, |
|
"learning_rate": 2.7960981143533053e-05, |
|
"loss": 3.2751, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9289665222167969, |
|
"learning_rate": 2.781145732615457e-05, |
|
"loss": 3.3134, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.9292173981666565, |
|
"learning_rate": 2.7662180193310218e-05, |
|
"loss": 3.3168, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.1169716119766235, |
|
"learning_rate": 2.751315140461145e-05, |
|
"loss": 3.2929, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.919864296913147, |
|
"learning_rate": 2.7364372616908744e-05, |
|
"loss": 3.3529, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9498540163040161, |
|
"learning_rate": 2.7215845484273152e-05, |
|
"loss": 3.3366, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.0325037240982056, |
|
"learning_rate": 2.7067571657977893e-05, |
|
"loss": 3.2316, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8834294080734253, |
|
"learning_rate": 2.691955278648003e-05, |
|
"loss": 3.278, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9540823698043823, |
|
"learning_rate": 2.6771790515402112e-05, |
|
"loss": 3.2605, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.8876092433929443, |
|
"learning_rate": 2.6624286487513916e-05, |
|
"loss": 3.2087, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9012912511825562, |
|
"learning_rate": 2.6477042342714137e-05, |
|
"loss": 3.2982, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.903864324092865, |
|
"learning_rate": 2.633005971801219e-05, |
|
"loss": 3.3177, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9462845921516418, |
|
"learning_rate": 2.6183340247510013e-05, |
|
"loss": 3.255, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.859810471534729, |
|
"learning_rate": 2.6036885562383856e-05, |
|
"loss": 3.24, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.921997606754303, |
|
"learning_rate": 2.5890697290866206e-05, |
|
"loss": 3.3059, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.8770375847816467, |
|
"learning_rate": 2.5744777058227642e-05, |
|
"loss": 3.3065, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9048674702644348, |
|
"learning_rate": 2.5599126486758777e-05, |
|
"loss": 3.2547, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.899581253528595, |
|
"learning_rate": 2.5453747195752243e-05, |
|
"loss": 3.2962, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9766543507575989, |
|
"learning_rate": 2.530864080148464e-05, |
|
"loss": 3.3386, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.956592857837677, |
|
"learning_rate": 2.5163808917198615e-05, |
|
"loss": 3.3397, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9843078255653381, |
|
"learning_rate": 2.501925315308492e-05, |
|
"loss": 3.2581, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.9282741546630859, |
|
"learning_rate": 2.4874975116264477e-05, |
|
"loss": 3.2057, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8186929821968079, |
|
"learning_rate": 2.4730976410770534e-05, |
|
"loss": 3.2798, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9256529808044434, |
|
"learning_rate": 2.458725863753084e-05, |
|
"loss": 3.2866, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.030348300933838, |
|
"learning_rate": 2.4443823394349834e-05, |
|
"loss": 3.3567, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8589518666267395, |
|
"learning_rate": 2.430067227589088e-05, |
|
"loss": 3.2716, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9088854789733887, |
|
"learning_rate": 2.4157806873658517e-05, |
|
"loss": 3.3572, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8666104078292847, |
|
"learning_rate": 2.401522877598087e-05, |
|
"loss": 3.2997, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.873067319393158, |
|
"learning_rate": 2.3872939567991827e-05, |
|
"loss": 3.3017, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8777161240577698, |
|
"learning_rate": 2.373094083161353e-05, |
|
"loss": 3.2611, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.9435715079307556, |
|
"learning_rate": 2.358923414553877e-05, |
|
"loss": 3.2632, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.8779476284980774, |
|
"learning_rate": 2.3447821085213405e-05, |
|
"loss": 3.3449, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9076412916183472, |
|
"learning_rate": 2.3306703222818878e-05, |
|
"loss": 3.3196, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.2708210945129395, |
|
"learning_rate": 2.3165882127254705e-05, |
|
"loss": 3.2802, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8561325073242188, |
|
"learning_rate": 2.302535936412108e-05, |
|
"loss": 3.2155, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9210352301597595, |
|
"learning_rate": 2.2885136495701415e-05, |
|
"loss": 3.2985, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9178744554519653, |
|
"learning_rate": 2.274521508094501e-05, |
|
"loss": 3.3691, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.894907534122467, |
|
"learning_rate": 2.2605596675449698e-05, |
|
"loss": 3.2381, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9445805549621582, |
|
"learning_rate": 2.246628283144457e-05, |
|
"loss": 3.3258, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9801211953163147, |
|
"learning_rate": 2.232727509777269e-05, |
|
"loss": 3.2626, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.8563469052314758, |
|
"learning_rate": 2.2188575019873932e-05, |
|
"loss": 3.2606, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9590172171592712, |
|
"learning_rate": 2.2050184139767704e-05, |
|
"loss": 3.259, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8452922701835632, |
|
"learning_rate": 2.191210399603591e-05, |
|
"loss": 3.3287, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8957239985466003, |
|
"learning_rate": 2.1774336123805772e-05, |
|
"loss": 3.204, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.0064430236816406, |
|
"learning_rate": 2.1636882054732776e-05, |
|
"loss": 3.2428, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8556531071662903, |
|
"learning_rate": 2.1499743316983684e-05, |
|
"loss": 3.2985, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.8558668494224548, |
|
"learning_rate": 2.1362921435219473e-05, |
|
"loss": 3.2492, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9391397833824158, |
|
"learning_rate": 2.1226417930578464e-05, |
|
"loss": 3.3506, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.999201238155365, |
|
"learning_rate": 2.109023432065935e-05, |
|
"loss": 3.312, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9704054594039917, |
|
"learning_rate": 2.095437211950434e-05, |
|
"loss": 3.2287, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.9226841330528259, |
|
"learning_rate": 2.0818832837582352e-05, |
|
"loss": 3.2592, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8675829172134399, |
|
"learning_rate": 2.068361798177218e-05, |
|
"loss": 3.2965, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8511921763420105, |
|
"learning_rate": 2.0548729055345778e-05, |
|
"loss": 3.2273, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8496411442756653, |
|
"learning_rate": 2.0414167557951514e-05, |
|
"loss": 3.3067, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8244772553443909, |
|
"learning_rate": 2.0279934985597527e-05, |
|
"loss": 3.416, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9402651190757751, |
|
"learning_rate": 2.0146032830635054e-05, |
|
"loss": 3.2653, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9962188005447388, |
|
"learning_rate": 2.001246258174192e-05, |
|
"loss": 3.2729, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8802465796470642, |
|
"learning_rate": 1.9879225723905886e-05, |
|
"loss": 3.2681, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8524779677391052, |
|
"learning_rate": 1.9746323738408203e-05, |
|
"loss": 3.2497, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9403968453407288, |
|
"learning_rate": 1.9613758102807117e-05, |
|
"loss": 3.2378, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.0809732675552368, |
|
"learning_rate": 1.9481530290921474e-05, |
|
"loss": 3.2091, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8482281565666199, |
|
"learning_rate": 1.934964177281428e-05, |
|
"loss": 3.2957, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8832562565803528, |
|
"learning_rate": 1.9218094014776434e-05, |
|
"loss": 3.3247, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8952575922012329, |
|
"learning_rate": 1.9086888479310333e-05, |
|
"loss": 3.2158, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.6820100545883179, |
|
"learning_rate": 1.895602662511371e-05, |
|
"loss": 3.2396, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9064893126487732, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 3.3192, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8885228633880615, |
|
"learning_rate": 1.8695339776198872e-05, |
|
"loss": 3.2687, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8821113705635071, |
|
"learning_rate": 1.8565517679706783e-05, |
|
"loss": 3.2581, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.907341480255127, |
|
"learning_rate": 1.8436045060904174e-05, |
|
"loss": 3.2212, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 1.1254795789718628, |
|
"learning_rate": 1.830692335922279e-05, |
|
"loss": 3.2318, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8602316379547119, |
|
"learning_rate": 1.8178154010192994e-05, |
|
"loss": 3.3136, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8210873603820801, |
|
"learning_rate": 1.8049738445427822e-05, |
|
"loss": 3.369, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8428252935409546, |
|
"learning_rate": 1.7921678092607052e-05, |
|
"loss": 3.2728, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8923355340957642, |
|
"learning_rate": 1.7793974375461352e-05, |
|
"loss": 3.336, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8625919818878174, |
|
"learning_rate": 1.7666628713756417e-05, |
|
"loss": 3.3083, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8860301375389099, |
|
"learning_rate": 1.7539642523277228e-05, |
|
"loss": 3.2761, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9147588014602661, |
|
"learning_rate": 1.7413017215812273e-05, |
|
"loss": 3.288, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9153177738189697, |
|
"learning_rate": 1.728675419913788e-05, |
|
"loss": 3.3425, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.8802617192268372, |
|
"learning_rate": 1.716085487700253e-05, |
|
"loss": 3.3332, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.9598279595375061, |
|
"learning_rate": 1.703532064911131e-05, |
|
"loss": 3.2123, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9063637852668762, |
|
"learning_rate": 1.6910152911110283e-05, |
|
"loss": 3.1825, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9381482005119324, |
|
"learning_rate": 1.6785353054571024e-05, |
|
"loss": 3.2832, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8743510842323303, |
|
"learning_rate": 1.666092246697512e-05, |
|
"loss": 3.34, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8579509854316711, |
|
"learning_rate": 1.6536862531698766e-05, |
|
"loss": 3.2737, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9608656167984009, |
|
"learning_rate": 1.6413174627997328e-05, |
|
"loss": 3.2559, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.941361665725708, |
|
"learning_rate": 1.6289860130990147e-05, |
|
"loss": 3.2698, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9168158173561096, |
|
"learning_rate": 1.6166920411645064e-05, |
|
"loss": 3.3504, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9752878546714783, |
|
"learning_rate": 1.6044356836763315e-05, |
|
"loss": 3.2641, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.8661676645278931, |
|
"learning_rate": 1.5922170768964285e-05, |
|
"loss": 3.287, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8774048089981079, |
|
"learning_rate": 1.5800363566670362e-05, |
|
"loss": 3.2631, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.819927990436554, |
|
"learning_rate": 1.5678936584091852e-05, |
|
"loss": 3.3321, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.835914671421051, |
|
"learning_rate": 1.5557891171211892e-05, |
|
"loss": 3.3336, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8567005395889282, |
|
"learning_rate": 1.5437228673771465e-05, |
|
"loss": 3.2515, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8540778160095215, |
|
"learning_rate": 1.5316950433254445e-05, |
|
"loss": 3.1985, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8710616827011108, |
|
"learning_rate": 1.5197057786872649e-05, |
|
"loss": 3.3092, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.9007919430732727, |
|
"learning_rate": 1.5077552067551015e-05, |
|
"loss": 3.2059, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8696431517601013, |
|
"learning_rate": 1.4958434603912747e-05, |
|
"loss": 3.3528, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8940817713737488, |
|
"learning_rate": 1.4839706720264546e-05, |
|
"loss": 3.2701, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8489177227020264, |
|
"learning_rate": 1.4721369736581924e-05, |
|
"loss": 3.2409, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9342685341835022, |
|
"learning_rate": 1.4603424968494484e-05, |
|
"loss": 3.2308, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.9768315553665161, |
|
"learning_rate": 1.448587372727132e-05, |
|
"loss": 3.2947, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8428696393966675, |
|
"learning_rate": 1.4368717319806419e-05, |
|
"loss": 3.2968, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8810005784034729, |
|
"learning_rate": 1.4251957048604152e-05, |
|
"loss": 3.2555, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8546096682548523, |
|
"learning_rate": 1.413559421176479e-05, |
|
"loss": 3.2297, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8835750222206116, |
|
"learning_rate": 1.4019630102970056e-05, |
|
"loss": 3.2423, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8346020579338074, |
|
"learning_rate": 1.3904066011468753e-05, |
|
"loss": 3.2519, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8157615661621094, |
|
"learning_rate": 1.3788903222062433e-05, |
|
"loss": 3.2553, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8050662279129028, |
|
"learning_rate": 1.3674143015091118e-05, |
|
"loss": 3.2356, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.8705363273620605, |
|
"learning_rate": 1.355978666641905e-05, |
|
"loss": 3.2543, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8249686360359192, |
|
"learning_rate": 1.3445835447420507e-05, |
|
"loss": 3.2328, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8292253017425537, |
|
"learning_rate": 1.3332290624965688e-05, |
|
"loss": 3.2763, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8558942079544067, |
|
"learning_rate": 1.3219153461406609e-05, |
|
"loss": 3.2703, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8376969695091248, |
|
"learning_rate": 1.3106425214563078e-05, |
|
"loss": 3.1923, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8428564071655273, |
|
"learning_rate": 1.2994107137708716e-05, |
|
"loss": 3.2835, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8432294726371765, |
|
"learning_rate": 1.2882200479556988e-05, |
|
"loss": 3.252, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8771489858627319, |
|
"learning_rate": 1.2770706484247397e-05, |
|
"loss": 3.2667, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.9042550921440125, |
|
"learning_rate": 1.2659626391331564e-05, |
|
"loss": 3.213, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.8231926560401917, |
|
"learning_rate": 1.2548961435759493e-05, |
|
"loss": 3.2232, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.833065927028656, |
|
"learning_rate": 1.2438712847865846e-05, |
|
"loss": 3.1761, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9881918430328369, |
|
"learning_rate": 1.2328881853356244e-05, |
|
"loss": 3.2745, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8577405214309692, |
|
"learning_rate": 1.221946967329365e-05, |
|
"loss": 3.225, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8643710017204285, |
|
"learning_rate": 1.2110477524084796e-05, |
|
"loss": 3.2312, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9053153395652771, |
|
"learning_rate": 1.2001906617466657e-05, |
|
"loss": 3.3218, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 1.6423649787902832, |
|
"learning_rate": 1.1893758160492978e-05, |
|
"loss": 3.2066, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8391192555427551, |
|
"learning_rate": 1.1786033355520859e-05, |
|
"loss": 3.3188, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8088417649269104, |
|
"learning_rate": 1.1678733400197373e-05, |
|
"loss": 3.2506, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.829933226108551, |
|
"learning_rate": 1.1571859487446263e-05, |
|
"loss": 3.2841, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.8629627227783203, |
|
"learning_rate": 1.1465412805454695e-05, |
|
"loss": 3.2619, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8171300888061523, |
|
"learning_rate": 1.1359394537660011e-05, |
|
"loss": 3.1963, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8476045727729797, |
|
"learning_rate": 1.125380586273661e-05, |
|
"loss": 3.2401, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8052193522453308, |
|
"learning_rate": 1.1148647954582808e-05, |
|
"loss": 3.2368, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8845239877700806, |
|
"learning_rate": 1.1043921982307819e-05, |
|
"loss": 3.2525, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.843393087387085, |
|
"learning_rate": 1.0939629110218735e-05, |
|
"loss": 3.2181, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8983203768730164, |
|
"learning_rate": 1.0835770497807596e-05, |
|
"loss": 3.2595, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.8705394268035889, |
|
"learning_rate": 1.0732347299738493e-05, |
|
"loss": 3.2674, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.841873288154602, |
|
"learning_rate": 1.0629360665834732e-05, |
|
"loss": 3.2982, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.81884765625, |
|
"learning_rate": 1.052681174106604e-05, |
|
"loss": 3.2157, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9083743095397949, |
|
"learning_rate": 1.0424701665535852e-05, |
|
"loss": 3.2913, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8582718968391418, |
|
"learning_rate": 1.0323031574468638e-05, |
|
"loss": 3.244, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9574825167655945, |
|
"learning_rate": 1.0221802598197261e-05, |
|
"loss": 3.21, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8792760372161865, |
|
"learning_rate": 1.0121015862150423e-05, |
|
"loss": 3.2714, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8689337372779846, |
|
"learning_rate": 1.0020672486840154e-05, |
|
"loss": 3.276, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8836303353309631, |
|
"learning_rate": 9.920773587849364e-06, |
|
"loss": 3.2494, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.822163462638855, |
|
"learning_rate": 9.821320275819401e-06, |
|
"loss": 3.2138, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8732519149780273, |
|
"learning_rate": 9.72231365643777e-06, |
|
"loss": 3.2743, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9054784178733826, |
|
"learning_rate": 9.623754830425779e-06, |
|
"loss": 3.1878, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.2136757373809814, |
|
"learning_rate": 9.52564489352632e-06, |
|
"loss": 3.2454, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9085641503334045, |
|
"learning_rate": 9.427984936491702e-06, |
|
"loss": 3.2894, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9027767777442932, |
|
"learning_rate": 9.330776045071509e-06, |
|
"loss": 3.1571, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.1602563858032227, |
|
"learning_rate": 9.23401930000054e-06, |
|
"loss": 3.2342, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.913773775100708, |
|
"learning_rate": 9.137715776986772e-06, |
|
"loss": 3.2798, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9193867444992065, |
|
"learning_rate": 9.041866546699434e-06, |
|
"loss": 3.2185, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.0573819875717163, |
|
"learning_rate": 8.946472674757078e-06, |
|
"loss": 3.2578, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.9102919697761536, |
|
"learning_rate": 8.851535221715735e-06, |
|
"loss": 3.2902, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.1361302137374878, |
|
"learning_rate": 8.757055243057132e-06, |
|
"loss": 3.2576, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.115525722503662, |
|
"learning_rate": 8.663033789176967e-06, |
|
"loss": 3.2436, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8845899105072021, |
|
"learning_rate": 8.5694719053732e-06, |
|
"loss": 3.247, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9481613039970398, |
|
"learning_rate": 8.476370631834458e-06, |
|
"loss": 3.2395, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8820552825927734, |
|
"learning_rate": 8.383731003628452e-06, |
|
"loss": 3.2178, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8499136567115784, |
|
"learning_rate": 8.291554050690508e-06, |
|
"loss": 3.2706, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8514552116394043, |
|
"learning_rate": 8.199840797812058e-06, |
|
"loss": 3.2353, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8272942900657654, |
|
"learning_rate": 8.108592264629295e-06, |
|
"loss": 3.2831, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8572783470153809, |
|
"learning_rate": 8.017809465611803e-06, |
|
"loss": 3.2337, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.8681054711341858, |
|
"learning_rate": 7.927493410051324e-06, |
|
"loss": 3.2473, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.9493885040283203, |
|
"learning_rate": 7.837645102050473e-06, |
|
"loss": 3.2802, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.881588876247406, |
|
"learning_rate": 7.748265540511635e-06, |
|
"loss": 3.3412, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8257955312728882, |
|
"learning_rate": 7.65935571912582e-06, |
|
"loss": 3.2538, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.848596453666687, |
|
"learning_rate": 7.5709166263616405e-06, |
|
"loss": 3.2224, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9227803349494934, |
|
"learning_rate": 7.482949245454302e-06, |
|
"loss": 3.286, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8736422061920166, |
|
"learning_rate": 7.3954545543946876e-06, |
|
"loss": 3.286, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8847970366477966, |
|
"learning_rate": 7.308433525918468e-06, |
|
"loss": 3.3588, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.8576563000679016, |
|
"learning_rate": 7.221887127495313e-06, |
|
"loss": 3.2342, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.9085653424263, |
|
"learning_rate": 7.1358163213181114e-06, |
|
"loss": 3.1536, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 4811, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 1.0865380348133376e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|