|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2144, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0023320895522388058, |
|
"grad_norm": 3.7231960217328877, |
|
"learning_rate": 2.3148148148148148e-06, |
|
"loss": 1.0047, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0046641791044776115, |
|
"grad_norm": 2.8583368538866347, |
|
"learning_rate": 4.6296296296296296e-06, |
|
"loss": 0.9757, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.006996268656716418, |
|
"grad_norm": 2.8571738838176763, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.9678, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.009328358208955223, |
|
"grad_norm": 1.7369287409068157, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.9001, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01166044776119403, |
|
"grad_norm": 1.1660883689238433, |
|
"learning_rate": 1.1574074074074075e-05, |
|
"loss": 0.8759, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.013992537313432836, |
|
"grad_norm": 1.1321862136040273, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 0.8381, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01632462686567164, |
|
"grad_norm": 1.024631758461693, |
|
"learning_rate": 1.6203703703703704e-05, |
|
"loss": 0.834, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.018656716417910446, |
|
"grad_norm": 0.9053510661908479, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.8172, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.020988805970149255, |
|
"grad_norm": 1.0170230987058317, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.8146, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02332089552238806, |
|
"grad_norm": 0.9764899017407078, |
|
"learning_rate": 2.314814814814815e-05, |
|
"loss": 0.7891, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.025652985074626867, |
|
"grad_norm": 1.0251102524307585, |
|
"learning_rate": 2.5462962962962965e-05, |
|
"loss": 0.7814, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.027985074626865673, |
|
"grad_norm": 1.1022491122388751, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.7872, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03031716417910448, |
|
"grad_norm": 1.0992277107302175, |
|
"learning_rate": 3.0092592592592593e-05, |
|
"loss": 0.7726, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03264925373134328, |
|
"grad_norm": 1.0365397617690528, |
|
"learning_rate": 3.240740740740741e-05, |
|
"loss": 0.7455, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.034981343283582086, |
|
"grad_norm": 1.195178900196762, |
|
"learning_rate": 3.472222222222222e-05, |
|
"loss": 0.756, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03731343283582089, |
|
"grad_norm": 1.1412528086977047, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.7633, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.039645522388059705, |
|
"grad_norm": 1.2472194183265157, |
|
"learning_rate": 3.935185185185186e-05, |
|
"loss": 0.7557, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04197761194029851, |
|
"grad_norm": 1.0570653948605382, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.7601, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.044309701492537316, |
|
"grad_norm": 1.1587882371432328, |
|
"learning_rate": 4.3981481481481486e-05, |
|
"loss": 0.7417, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04664179104477612, |
|
"grad_norm": 1.3162397865412059, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 0.739, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04897388059701493, |
|
"grad_norm": 0.9985279247385236, |
|
"learning_rate": 4.8611111111111115e-05, |
|
"loss": 0.7687, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.051305970149253734, |
|
"grad_norm": 1.2887741163380346, |
|
"learning_rate": 4.999989285883431e-05, |
|
"loss": 0.7664, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05363805970149254, |
|
"grad_norm": 1.2392518485033788, |
|
"learning_rate": 4.999868753243865e-05, |
|
"loss": 0.7675, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.055970149253731345, |
|
"grad_norm": 1.8916579665083808, |
|
"learning_rate": 4.999614302517356e-05, |
|
"loss": 0.7598, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05830223880597015, |
|
"grad_norm": 1.5520377612517253, |
|
"learning_rate": 4.9992259488494784e-05, |
|
"loss": 0.7424, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06063432835820896, |
|
"grad_norm": 1.239718216272371, |
|
"learning_rate": 4.99870371535606e-05, |
|
"loss": 0.7366, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06296641791044776, |
|
"grad_norm": 0.9825341780149127, |
|
"learning_rate": 4.9980476331218066e-05, |
|
"loss": 0.786, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06529850746268656, |
|
"grad_norm": 0.9155268841122092, |
|
"learning_rate": 4.997257741198456e-05, |
|
"loss": 0.7505, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06763059701492537, |
|
"grad_norm": 1.082193107284934, |
|
"learning_rate": 4.996334086602444e-05, |
|
"loss": 0.7351, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.06996268656716417, |
|
"grad_norm": 1.0423700442715462, |
|
"learning_rate": 4.9952767243121146e-05, |
|
"loss": 0.7551, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07229477611940298, |
|
"grad_norm": 0.9000144515873072, |
|
"learning_rate": 4.994085717264443e-05, |
|
"loss": 0.7368, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07462686567164178, |
|
"grad_norm": 1.044303554591329, |
|
"learning_rate": 4.992761136351291e-05, |
|
"loss": 0.7436, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07695895522388059, |
|
"grad_norm": 1.0470880900421262, |
|
"learning_rate": 4.99130306041519e-05, |
|
"loss": 0.7446, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07929104477611941, |
|
"grad_norm": 1.1323111761895537, |
|
"learning_rate": 4.989711576244639e-05, |
|
"loss": 0.7226, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08162313432835822, |
|
"grad_norm": 1.0668807832388134, |
|
"learning_rate": 4.9879867785689506e-05, |
|
"loss": 0.7107, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08395522388059702, |
|
"grad_norm": 0.9414033524848218, |
|
"learning_rate": 4.986128770052603e-05, |
|
"loss": 0.7315, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08628731343283583, |
|
"grad_norm": 0.9861314941949435, |
|
"learning_rate": 4.9841376612891365e-05, |
|
"loss": 0.7226, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08861940298507463, |
|
"grad_norm": 0.9154465133645083, |
|
"learning_rate": 4.9820135707945634e-05, |
|
"loss": 0.719, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09095149253731344, |
|
"grad_norm": 1.0049244209561472, |
|
"learning_rate": 4.97975662500032e-05, |
|
"loss": 0.7474, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09328358208955224, |
|
"grad_norm": 0.9649064226232575, |
|
"learning_rate": 4.9773669582457364e-05, |
|
"loss": 0.7273, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09561567164179105, |
|
"grad_norm": 1.0099484363867934, |
|
"learning_rate": 4.974844712770044e-05, |
|
"loss": 0.7456, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.09794776119402986, |
|
"grad_norm": 0.9092588577896343, |
|
"learning_rate": 4.972190038703905e-05, |
|
"loss": 0.7389, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.10027985074626866, |
|
"grad_norm": 1.0001019634384871, |
|
"learning_rate": 4.96940309406048e-05, |
|
"loss": 0.7334, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.10261194029850747, |
|
"grad_norm": 1.0388048983714429, |
|
"learning_rate": 4.966484044726024e-05, |
|
"loss": 0.7246, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.10494402985074627, |
|
"grad_norm": 1.0827378138832435, |
|
"learning_rate": 4.963433064450002e-05, |
|
"loss": 0.7208, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.10727611940298508, |
|
"grad_norm": 1.0427299412602653, |
|
"learning_rate": 4.9602503348347625e-05, |
|
"loss": 0.7133, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.10960820895522388, |
|
"grad_norm": 0.9388505104499245, |
|
"learning_rate": 4.956936045324716e-05, |
|
"loss": 0.6982, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.11194029850746269, |
|
"grad_norm": 0.8567679904473422, |
|
"learning_rate": 4.953490393195063e-05, |
|
"loss": 0.7518, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1142723880597015, |
|
"grad_norm": 0.9211748764639448, |
|
"learning_rate": 4.9499135835400526e-05, |
|
"loss": 0.7287, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1166044776119403, |
|
"grad_norm": 1.0303852066380612, |
|
"learning_rate": 4.9462058292607735e-05, |
|
"loss": 0.7134, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.11893656716417911, |
|
"grad_norm": 1.2197325105359402, |
|
"learning_rate": 4.942367351052481e-05, |
|
"loss": 0.7086, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.12126865671641791, |
|
"grad_norm": 0.8299444059232342, |
|
"learning_rate": 4.938398377391461e-05, |
|
"loss": 0.7187, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.12360074626865672, |
|
"grad_norm": 0.9224154095712178, |
|
"learning_rate": 4.9342991445214334e-05, |
|
"loss": 0.7126, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1259328358208955, |
|
"grad_norm": 0.88090785003352, |
|
"learning_rate": 4.930069896439485e-05, |
|
"loss": 0.7105, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.12826492537313433, |
|
"grad_norm": 0.859476692536741, |
|
"learning_rate": 4.925710884881551e-05, |
|
"loss": 0.7071, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.13059701492537312, |
|
"grad_norm": 1.013753901988807, |
|
"learning_rate": 4.921222369307427e-05, |
|
"loss": 0.7205, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.13292910447761194, |
|
"grad_norm": 0.940182496551757, |
|
"learning_rate": 4.916604616885328e-05, |
|
"loss": 0.7139, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.13526119402985073, |
|
"grad_norm": 1.0259898104670397, |
|
"learning_rate": 4.9118579024759854e-05, |
|
"loss": 0.7194, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.13759328358208955, |
|
"grad_norm": 0.941096247874784, |
|
"learning_rate": 4.9069825086162865e-05, |
|
"loss": 0.694, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.13992537313432835, |
|
"grad_norm": 0.8605571897927781, |
|
"learning_rate": 4.901978725502454e-05, |
|
"loss": 0.7021, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.14225746268656717, |
|
"grad_norm": 1.012855599597888, |
|
"learning_rate": 4.89684685097278e-05, |
|
"loss": 0.6889, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.14458955223880596, |
|
"grad_norm": 0.9877119462322568, |
|
"learning_rate": 4.891587190489891e-05, |
|
"loss": 0.7154, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.14692164179104478, |
|
"grad_norm": 1.090755921619965, |
|
"learning_rate": 4.886200057122568e-05, |
|
"loss": 0.7195, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.14925373134328357, |
|
"grad_norm": 0.9811029143694675, |
|
"learning_rate": 4.880685771527114e-05, |
|
"loss": 0.7055, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1515858208955224, |
|
"grad_norm": 0.8841142544985066, |
|
"learning_rate": 4.8750446619282646e-05, |
|
"loss": 0.7029, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.15391791044776118, |
|
"grad_norm": 0.8332150176518437, |
|
"learning_rate": 4.869277064099654e-05, |
|
"loss": 0.7093, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.15625, |
|
"grad_norm": 0.8744887046543461, |
|
"learning_rate": 4.8633833213438254e-05, |
|
"loss": 0.7065, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.15858208955223882, |
|
"grad_norm": 0.837397759622329, |
|
"learning_rate": 4.8573637844718e-05, |
|
"loss": 0.7325, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.1609141791044776, |
|
"grad_norm": 0.9698882033094054, |
|
"learning_rate": 4.851218811782195e-05, |
|
"loss": 0.7209, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.16324626865671643, |
|
"grad_norm": 0.8572480215660884, |
|
"learning_rate": 4.844948769039896e-05, |
|
"loss": 0.7117, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.16557835820895522, |
|
"grad_norm": 0.8647940595338452, |
|
"learning_rate": 4.8385540294542855e-05, |
|
"loss": 0.7059, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.16791044776119404, |
|
"grad_norm": 0.863764155230387, |
|
"learning_rate": 4.83203497365703e-05, |
|
"loss": 0.7114, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.17024253731343283, |
|
"grad_norm": 0.8063210763659003, |
|
"learning_rate": 4.825391989679422e-05, |
|
"loss": 0.7034, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.17257462686567165, |
|
"grad_norm": 0.8575257639465484, |
|
"learning_rate": 4.818625472929286e-05, |
|
"loss": 0.6947, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.17490671641791045, |
|
"grad_norm": 0.8200838613816108, |
|
"learning_rate": 4.811735826167436e-05, |
|
"loss": 0.681, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.17723880597014927, |
|
"grad_norm": 0.7823498981274288, |
|
"learning_rate": 4.8047234594837143e-05, |
|
"loss": 0.6958, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.17957089552238806, |
|
"grad_norm": 0.8190018216789188, |
|
"learning_rate": 4.7975887902725696e-05, |
|
"loss": 0.6835, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.18190298507462688, |
|
"grad_norm": 0.9214403517298152, |
|
"learning_rate": 4.7903322432082185e-05, |
|
"loss": 0.6984, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.18423507462686567, |
|
"grad_norm": 0.8273600510500148, |
|
"learning_rate": 4.78295425021937e-05, |
|
"loss": 0.6753, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.1865671641791045, |
|
"grad_norm": 0.9257746216157634, |
|
"learning_rate": 4.775455250463507e-05, |
|
"loss": 0.7063, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.18889925373134328, |
|
"grad_norm": 0.8553399615781841, |
|
"learning_rate": 4.767835690300759e-05, |
|
"loss": 0.7031, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.1912313432835821, |
|
"grad_norm": 0.8035273180959155, |
|
"learning_rate": 4.760096023267322e-05, |
|
"loss": 0.7098, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.1935634328358209, |
|
"grad_norm": 0.8691622488936168, |
|
"learning_rate": 4.752236710048472e-05, |
|
"loss": 0.6861, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.1958955223880597, |
|
"grad_norm": 0.9841920794382144, |
|
"learning_rate": 4.744258218451135e-05, |
|
"loss": 0.7031, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.1982276119402985, |
|
"grad_norm": 0.9498479171681536, |
|
"learning_rate": 4.736161023376051e-05, |
|
"loss": 0.6726, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.20055970149253732, |
|
"grad_norm": 0.8700794084478172, |
|
"learning_rate": 4.7279456067895e-05, |
|
"loss": 0.7153, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.20289179104477612, |
|
"grad_norm": 0.8127172667880727, |
|
"learning_rate": 4.719612457694616e-05, |
|
"loss": 0.6963, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.20522388059701493, |
|
"grad_norm": 0.7902973875049144, |
|
"learning_rate": 4.71116207210228e-05, |
|
"loss": 0.6773, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.20755597014925373, |
|
"grad_norm": 0.7788306399019677, |
|
"learning_rate": 4.7025949530016e-05, |
|
"loss": 0.6684, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.20988805970149255, |
|
"grad_norm": 0.7944637728792737, |
|
"learning_rate": 4.6939116103299655e-05, |
|
"loss": 0.6905, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.21222014925373134, |
|
"grad_norm": 0.8571602340727155, |
|
"learning_rate": 4.685112560942699e-05, |
|
"loss": 0.6881, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.21455223880597016, |
|
"grad_norm": 0.8780895287743348, |
|
"learning_rate": 4.676198328582288e-05, |
|
"loss": 0.6927, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.21688432835820895, |
|
"grad_norm": 0.9388682206384781, |
|
"learning_rate": 4.6671694438472154e-05, |
|
"loss": 0.7056, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.21921641791044777, |
|
"grad_norm": 0.8229322015185289, |
|
"learning_rate": 4.6580264441603724e-05, |
|
"loss": 0.6894, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.22154850746268656, |
|
"grad_norm": 0.9117054400216464, |
|
"learning_rate": 4.648769873737071e-05, |
|
"loss": 0.6972, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.22388059701492538, |
|
"grad_norm": 0.7774500802624019, |
|
"learning_rate": 4.6394002835526535e-05, |
|
"loss": 0.6969, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.22621268656716417, |
|
"grad_norm": 0.9974671450500393, |
|
"learning_rate": 4.6299182313096916e-05, |
|
"loss": 0.7168, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.228544776119403, |
|
"grad_norm": 0.7745214676810972, |
|
"learning_rate": 4.6203242814047946e-05, |
|
"loss": 0.6388, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.23087686567164178, |
|
"grad_norm": 0.7946997637352378, |
|
"learning_rate": 4.610619004895017e-05, |
|
"loss": 0.6932, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.2332089552238806, |
|
"grad_norm": 0.8376007460886711, |
|
"learning_rate": 4.6008029794638596e-05, |
|
"loss": 0.6626, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2355410447761194, |
|
"grad_norm": 0.7750371283803145, |
|
"learning_rate": 4.590876789386893e-05, |
|
"loss": 0.6681, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.23787313432835822, |
|
"grad_norm": 0.8455168484121804, |
|
"learning_rate": 4.580841025496974e-05, |
|
"loss": 0.6786, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.240205223880597, |
|
"grad_norm": 0.7240530893309742, |
|
"learning_rate": 4.570696285149084e-05, |
|
"loss": 0.6825, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.24253731343283583, |
|
"grad_norm": 0.7536963649856001, |
|
"learning_rate": 4.560443172184763e-05, |
|
"loss": 0.6856, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.24486940298507462, |
|
"grad_norm": 0.7793205383613011, |
|
"learning_rate": 4.55008229689618e-05, |
|
"loss": 0.6667, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.24720149253731344, |
|
"grad_norm": 0.7157463633792304, |
|
"learning_rate": 4.539614275989793e-05, |
|
"loss": 0.6754, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.24953358208955223, |
|
"grad_norm": 0.7890577436144398, |
|
"learning_rate": 4.529039732549653e-05, |
|
"loss": 0.675, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.251865671641791, |
|
"grad_norm": 0.7846004685785093, |
|
"learning_rate": 4.5183592960003104e-05, |
|
"loss": 0.6635, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.25419776119402987, |
|
"grad_norm": 0.8071994271154869, |
|
"learning_rate": 4.507573602069351e-05, |
|
"loss": 0.6773, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.25652985074626866, |
|
"grad_norm": 0.8233537619340792, |
|
"learning_rate": 4.496683292749555e-05, |
|
"loss": 0.6719, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.25886194029850745, |
|
"grad_norm": 0.7715749039189433, |
|
"learning_rate": 4.485689016260686e-05, |
|
"loss": 0.6643, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.26119402985074625, |
|
"grad_norm": 0.7396872015907794, |
|
"learning_rate": 4.4745914270109055e-05, |
|
"loss": 0.6943, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2635261194029851, |
|
"grad_norm": 0.7904102147449842, |
|
"learning_rate": 4.463391185557822e-05, |
|
"loss": 0.6819, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.2658582089552239, |
|
"grad_norm": 0.7165693906401905, |
|
"learning_rate": 4.4520889585691705e-05, |
|
"loss": 0.6865, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2681902985074627, |
|
"grad_norm": 0.8804513560159245, |
|
"learning_rate": 4.440685418783135e-05, |
|
"loss": 0.6631, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.27052238805970147, |
|
"grad_norm": 0.7607710965405562, |
|
"learning_rate": 4.429181244968301e-05, |
|
"loss": 0.6699, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2728544776119403, |
|
"grad_norm": 0.7620322976601013, |
|
"learning_rate": 4.417577121883256e-05, |
|
"loss": 0.669, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.2751865671641791, |
|
"grad_norm": 0.7328194761484526, |
|
"learning_rate": 4.4058737402358295e-05, |
|
"loss": 0.6488, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2775186567164179, |
|
"grad_norm": 0.7634679687008787, |
|
"learning_rate": 4.394071796641983e-05, |
|
"loss": 0.6729, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.2798507462686567, |
|
"grad_norm": 0.8531001469934757, |
|
"learning_rate": 4.38217199358434e-05, |
|
"loss": 0.685, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.28218283582089554, |
|
"grad_norm": 0.7264481750331164, |
|
"learning_rate": 4.3701750393703786e-05, |
|
"loss": 0.6461, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.28451492537313433, |
|
"grad_norm": 0.7533827654944563, |
|
"learning_rate": 4.3580816480902656e-05, |
|
"loss": 0.6745, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2868470149253731, |
|
"grad_norm": 0.7458225351946263, |
|
"learning_rate": 4.345892539574359e-05, |
|
"loss": 0.6831, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.2891791044776119, |
|
"grad_norm": 0.724365300513045, |
|
"learning_rate": 4.3336084393503545e-05, |
|
"loss": 0.6546, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.29151119402985076, |
|
"grad_norm": 0.7838535311430127, |
|
"learning_rate": 4.3212300786001045e-05, |
|
"loss": 0.6577, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.29384328358208955, |
|
"grad_norm": 0.7715960756766049, |
|
"learning_rate": 4.308758194116094e-05, |
|
"loss": 0.684, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.29617537313432835, |
|
"grad_norm": 0.7160624711169831, |
|
"learning_rate": 4.296193528257586e-05, |
|
"loss": 0.6893, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.29850746268656714, |
|
"grad_norm": 0.7745707050758446, |
|
"learning_rate": 4.283536828906436e-05, |
|
"loss": 0.6789, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.300839552238806, |
|
"grad_norm": 0.8251172241406618, |
|
"learning_rate": 4.270788849422572e-05, |
|
"loss": 0.6623, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3031716417910448, |
|
"grad_norm": 0.8355188301078915, |
|
"learning_rate": 4.2579503485991567e-05, |
|
"loss": 0.6724, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.30550373134328357, |
|
"grad_norm": 0.8380841544851293, |
|
"learning_rate": 4.245022090617418e-05, |
|
"loss": 0.6765, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.30783582089552236, |
|
"grad_norm": 0.7456377128567805, |
|
"learning_rate": 4.2320048450011684e-05, |
|
"loss": 0.6741, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.3101679104477612, |
|
"grad_norm": 0.6884827740191439, |
|
"learning_rate": 4.218899386570994e-05, |
|
"loss": 0.6556, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.6695176963355354, |
|
"learning_rate": 4.205706495398143e-05, |
|
"loss": 0.6824, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.3148320895522388, |
|
"grad_norm": 0.6518330480049027, |
|
"learning_rate": 4.192426956758085e-05, |
|
"loss": 0.6452, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.31716417910447764, |
|
"grad_norm": 0.7023106383456971, |
|
"learning_rate": 4.179061561083777e-05, |
|
"loss": 0.6573, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.31949626865671643, |
|
"grad_norm": 0.6796008979702891, |
|
"learning_rate": 4.165611103918612e-05, |
|
"loss": 0.6813, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3218283582089552, |
|
"grad_norm": 0.7234212222083802, |
|
"learning_rate": 4.1520763858690644e-05, |
|
"loss": 0.6563, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.324160447761194, |
|
"grad_norm": 0.8232766133796315, |
|
"learning_rate": 4.138458212557038e-05, |
|
"loss": 0.6789, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.32649253731343286, |
|
"grad_norm": 0.892744295966519, |
|
"learning_rate": 4.124757394571914e-05, |
|
"loss": 0.6511, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.32882462686567165, |
|
"grad_norm": 0.7465253057638759, |
|
"learning_rate": 4.110974747422299e-05, |
|
"loss": 0.6609, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.33115671641791045, |
|
"grad_norm": 0.9710864864368132, |
|
"learning_rate": 4.097111091487486e-05, |
|
"loss": 0.6567, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.33348880597014924, |
|
"grad_norm": 0.7586336649039006, |
|
"learning_rate": 4.083167251968625e-05, |
|
"loss": 0.6868, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.3358208955223881, |
|
"grad_norm": 0.7411454559784593, |
|
"learning_rate": 4.069144058839605e-05, |
|
"loss": 0.6715, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.3381529850746269, |
|
"grad_norm": 0.7406787177182912, |
|
"learning_rate": 4.055042346797643e-05, |
|
"loss": 0.6655, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.34048507462686567, |
|
"grad_norm": 0.806810167383283, |
|
"learning_rate": 4.040862955213615e-05, |
|
"loss": 0.6792, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.34281716417910446, |
|
"grad_norm": 0.684339497670028, |
|
"learning_rate": 4.026606728082082e-05, |
|
"loss": 0.6643, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.3451492537313433, |
|
"grad_norm": 0.7645437701144697, |
|
"learning_rate": 4.012274513971061e-05, |
|
"loss": 0.6839, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3474813432835821, |
|
"grad_norm": 0.6793956256373396, |
|
"learning_rate": 3.997867165971512e-05, |
|
"loss": 0.6507, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.3498134328358209, |
|
"grad_norm": 0.720694420633502, |
|
"learning_rate": 3.9833855416465624e-05, |
|
"loss": 0.6643, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.3521455223880597, |
|
"grad_norm": 0.7523164344936248, |
|
"learning_rate": 3.968830502980459e-05, |
|
"loss": 0.6591, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.35447761194029853, |
|
"grad_norm": 0.7805853823171353, |
|
"learning_rate": 3.954202916327264e-05, |
|
"loss": 0.6549, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.3568097014925373, |
|
"grad_norm": 0.7234820701835654, |
|
"learning_rate": 3.939503652359287e-05, |
|
"loss": 0.6572, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.3591417910447761, |
|
"grad_norm": 0.7688015369932021, |
|
"learning_rate": 3.924733586015257e-05, |
|
"loss": 0.6533, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3614738805970149, |
|
"grad_norm": 0.708671520124952, |
|
"learning_rate": 3.9098935964482476e-05, |
|
"loss": 0.6551, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.36380597014925375, |
|
"grad_norm": 0.7183227720244021, |
|
"learning_rate": 3.894984566973346e-05, |
|
"loss": 0.6646, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.36613805970149255, |
|
"grad_norm": 0.6737145813609308, |
|
"learning_rate": 3.880007385015075e-05, |
|
"loss": 0.6511, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.36847014925373134, |
|
"grad_norm": 0.6920229984205842, |
|
"learning_rate": 3.864962942054572e-05, |
|
"loss": 0.6818, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.37080223880597013, |
|
"grad_norm": 0.775113876143003, |
|
"learning_rate": 3.849852133576527e-05, |
|
"loss": 0.6435, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.373134328358209, |
|
"grad_norm": 0.7394720394307068, |
|
"learning_rate": 3.834675859015876e-05, |
|
"loss": 0.671, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.37546641791044777, |
|
"grad_norm": 0.7324224028801083, |
|
"learning_rate": 3.819435021704274e-05, |
|
"loss": 0.66, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.37779850746268656, |
|
"grad_norm": 0.7482922307683427, |
|
"learning_rate": 3.804130528816312e-05, |
|
"loss": 0.6605, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.38013059701492535, |
|
"grad_norm": 0.693797019256493, |
|
"learning_rate": 3.7887632913155355e-05, |
|
"loss": 0.6459, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.3824626865671642, |
|
"grad_norm": 0.7332686963960172, |
|
"learning_rate": 3.77333422390021e-05, |
|
"loss": 0.6386, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.384794776119403, |
|
"grad_norm": 0.6722013452671626, |
|
"learning_rate": 3.75784424494888e-05, |
|
"loss": 0.6509, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.3871268656716418, |
|
"grad_norm": 0.6844841017486335, |
|
"learning_rate": 3.7422942764657054e-05, |
|
"loss": 0.6624, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.3894589552238806, |
|
"grad_norm": 0.709365449407489, |
|
"learning_rate": 3.726685244025578e-05, |
|
"loss": 0.6662, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.3917910447761194, |
|
"grad_norm": 0.7295699828504463, |
|
"learning_rate": 3.711018076719034e-05, |
|
"loss": 0.6578, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3941231343283582, |
|
"grad_norm": 0.7263503777736142, |
|
"learning_rate": 3.695293707096947e-05, |
|
"loss": 0.6708, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.396455223880597, |
|
"grad_norm": 0.7812017284771229, |
|
"learning_rate": 3.679513071115025e-05, |
|
"loss": 0.6617, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.3987873134328358, |
|
"grad_norm": 0.6765328713271164, |
|
"learning_rate": 3.663677108078094e-05, |
|
"loss": 0.6674, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.40111940298507465, |
|
"grad_norm": 0.6874658416764338, |
|
"learning_rate": 3.647786760584194e-05, |
|
"loss": 0.657, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.40345149253731344, |
|
"grad_norm": 0.6396850329707013, |
|
"learning_rate": 3.6318429744684676e-05, |
|
"loss": 0.6728, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.40578358208955223, |
|
"grad_norm": 0.7376511879685435, |
|
"learning_rate": 3.615846698746869e-05, |
|
"loss": 0.6626, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.408115671641791, |
|
"grad_norm": 0.7037616699556043, |
|
"learning_rate": 3.599798885559667e-05, |
|
"loss": 0.677, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.41044776119402987, |
|
"grad_norm": 0.6519988831184151, |
|
"learning_rate": 3.583700490114776e-05, |
|
"loss": 0.6385, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.41277985074626866, |
|
"grad_norm": 0.6836720339790674, |
|
"learning_rate": 3.5675524706309014e-05, |
|
"loss": 0.6397, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.41511194029850745, |
|
"grad_norm": 0.6577966791961214, |
|
"learning_rate": 3.5513557882805e-05, |
|
"loss": 0.6718, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.41744402985074625, |
|
"grad_norm": 0.6676911965920004, |
|
"learning_rate": 3.5351114071325696e-05, |
|
"loss": 0.6552, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.4197761194029851, |
|
"grad_norm": 0.6775036363841046, |
|
"learning_rate": 3.518820294095267e-05, |
|
"loss": 0.66, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4221082089552239, |
|
"grad_norm": 0.7274179785465997, |
|
"learning_rate": 3.50248341885835e-05, |
|
"loss": 0.6457, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.4244402985074627, |
|
"grad_norm": 0.6365285914594158, |
|
"learning_rate": 3.486101753835468e-05, |
|
"loss": 0.6549, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.42677238805970147, |
|
"grad_norm": 0.7015328692797856, |
|
"learning_rate": 3.469676274106271e-05, |
|
"loss": 0.6446, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.4291044776119403, |
|
"grad_norm": 0.6897225314313983, |
|
"learning_rate": 3.453207957358377e-05, |
|
"loss": 0.6481, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.4314365671641791, |
|
"grad_norm": 0.7018612270350465, |
|
"learning_rate": 3.436697783829178e-05, |
|
"loss": 0.6421, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.4337686567164179, |
|
"grad_norm": 0.683607419776046, |
|
"learning_rate": 3.420146736247487e-05, |
|
"loss": 0.6262, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.4361007462686567, |
|
"grad_norm": 0.6817625776138896, |
|
"learning_rate": 3.4035557997750506e-05, |
|
"loss": 0.6655, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.43843283582089554, |
|
"grad_norm": 0.7113749823554164, |
|
"learning_rate": 3.386925961947906e-05, |
|
"loss": 0.6589, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.44076492537313433, |
|
"grad_norm": 0.701429407427016, |
|
"learning_rate": 3.370258212617602e-05, |
|
"loss": 0.6756, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.4430970149253731, |
|
"grad_norm": 0.6831638841691235, |
|
"learning_rate": 3.353553543892277e-05, |
|
"loss": 0.645, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.4454291044776119, |
|
"grad_norm": 0.6951969526491697, |
|
"learning_rate": 3.336812950077611e-05, |
|
"loss": 0.653, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.44776119402985076, |
|
"grad_norm": 0.712881849067669, |
|
"learning_rate": 3.320037427617639e-05, |
|
"loss": 0.6503, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.45009328358208955, |
|
"grad_norm": 0.677080067582334, |
|
"learning_rate": 3.30322797503544e-05, |
|
"loss": 0.6634, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.45242537313432835, |
|
"grad_norm": 0.673026887857187, |
|
"learning_rate": 3.2863855928737026e-05, |
|
"loss": 0.6299, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.45475746268656714, |
|
"grad_norm": 0.6976163186906711, |
|
"learning_rate": 3.2695112836351703e-05, |
|
"loss": 0.6514, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.457089552238806, |
|
"grad_norm": 0.6835684109054025, |
|
"learning_rate": 3.252606051722972e-05, |
|
"loss": 0.6715, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.4594216417910448, |
|
"grad_norm": 0.634542987869755, |
|
"learning_rate": 3.235670903380832e-05, |
|
"loss": 0.6452, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.46175373134328357, |
|
"grad_norm": 0.6700081127629496, |
|
"learning_rate": 3.218706846633183e-05, |
|
"loss": 0.6424, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.46408582089552236, |
|
"grad_norm": 0.674844701562939, |
|
"learning_rate": 3.201714891225156e-05, |
|
"loss": 0.64, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.4664179104477612, |
|
"grad_norm": 0.6901945362990005, |
|
"learning_rate": 3.1846960485624886e-05, |
|
"loss": 0.6359, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.46875, |
|
"grad_norm": 0.711302010737976, |
|
"learning_rate": 3.1676513316513156e-05, |
|
"loss": 0.6613, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.4710820895522388, |
|
"grad_norm": 0.6415681002352712, |
|
"learning_rate": 3.150581755037877e-05, |
|
"loss": 0.6448, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.47341417910447764, |
|
"grad_norm": 0.6730615759692896, |
|
"learning_rate": 3.133488334748125e-05, |
|
"loss": 0.6569, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.47574626865671643, |
|
"grad_norm": 0.6995346562904475, |
|
"learning_rate": 3.1163720882272516e-05, |
|
"loss": 0.637, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.4780783582089552, |
|
"grad_norm": 0.7048313864139962, |
|
"learning_rate": 3.0992340342791246e-05, |
|
"loss": 0.6496, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.480410447761194, |
|
"grad_norm": 0.6599686029534936, |
|
"learning_rate": 3.08207519300565e-05, |
|
"loss": 0.642, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.48274253731343286, |
|
"grad_norm": 0.676508421822385, |
|
"learning_rate": 3.064896585746045e-05, |
|
"loss": 0.6307, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.48507462686567165, |
|
"grad_norm": 0.6701675191331543, |
|
"learning_rate": 3.047699235016056e-05, |
|
"loss": 0.6384, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.48740671641791045, |
|
"grad_norm": 0.7258698708508784, |
|
"learning_rate": 3.030484164447085e-05, |
|
"loss": 0.6283, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.48973880597014924, |
|
"grad_norm": 0.7407558467480371, |
|
"learning_rate": 3.0132523987252658e-05, |
|
"loss": 0.6495, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.4920708955223881, |
|
"grad_norm": 0.689449626576069, |
|
"learning_rate": 2.9960049635304755e-05, |
|
"loss": 0.6219, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.4944029850746269, |
|
"grad_norm": 0.6605235489630851, |
|
"learning_rate": 2.9787428854752736e-05, |
|
"loss": 0.6328, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.49673507462686567, |
|
"grad_norm": 0.6521941117929255, |
|
"learning_rate": 2.961467192043807e-05, |
|
"loss": 0.6445, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.49906716417910446, |
|
"grad_norm": 0.719415886577355, |
|
"learning_rate": 2.9441789115306402e-05, |
|
"loss": 0.6601, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.5013992537313433, |
|
"grad_norm": 0.7143547584326713, |
|
"learning_rate": 2.926879072979558e-05, |
|
"loss": 0.6477, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.503731343283582, |
|
"grad_norm": 0.6805215530249611, |
|
"learning_rate": 2.9095687061223058e-05, |
|
"loss": 0.6456, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.5060634328358209, |
|
"grad_norm": 0.6951914121970127, |
|
"learning_rate": 2.8922488413173053e-05, |
|
"loss": 0.6441, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.5083955223880597, |
|
"grad_norm": 0.706237459032283, |
|
"learning_rate": 2.874920509488319e-05, |
|
"loss": 0.6574, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.5107276119402985, |
|
"grad_norm": 0.7346760125128584, |
|
"learning_rate": 2.8575847420630887e-05, |
|
"loss": 0.6314, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.5130597014925373, |
|
"grad_norm": 0.6542912159857234, |
|
"learning_rate": 2.8402425709119435e-05, |
|
"loss": 0.659, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.5153917910447762, |
|
"grad_norm": 0.6819702914670763, |
|
"learning_rate": 2.8228950282863776e-05, |
|
"loss": 0.6492, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.5177238805970149, |
|
"grad_norm": 0.6770174696103765, |
|
"learning_rate": 2.8055431467576106e-05, |
|
"loss": 0.638, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.5200559701492538, |
|
"grad_norm": 0.662818262088358, |
|
"learning_rate": 2.788187959155124e-05, |
|
"loss": 0.63, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.5223880597014925, |
|
"grad_norm": 0.7072513898861241, |
|
"learning_rate": 2.7708304985051868e-05, |
|
"loss": 0.6623, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.5247201492537313, |
|
"grad_norm": 0.711447160492985, |
|
"learning_rate": 2.7534717979693647e-05, |
|
"loss": 0.6344, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5270522388059702, |
|
"grad_norm": 0.6918500244833096, |
|
"learning_rate": 2.7361128907830253e-05, |
|
"loss": 0.6385, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.5293843283582089, |
|
"grad_norm": 0.6722193101035042, |
|
"learning_rate": 2.7187548101938353e-05, |
|
"loss": 0.6256, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.5317164179104478, |
|
"grad_norm": 0.6569908241938373, |
|
"learning_rate": 2.7013985894002623e-05, |
|
"loss": 0.6236, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.5340485074626866, |
|
"grad_norm": 0.695528007373876, |
|
"learning_rate": 2.6840452614900726e-05, |
|
"loss": 0.6533, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.5363805970149254, |
|
"grad_norm": 0.6182593924769839, |
|
"learning_rate": 2.6666958593788405e-05, |
|
"loss": 0.631, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5387126865671642, |
|
"grad_norm": 0.6254875427778447, |
|
"learning_rate": 2.649351415748466e-05, |
|
"loss": 0.6528, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.5410447761194029, |
|
"grad_norm": 0.6195472352586545, |
|
"learning_rate": 2.6320129629857093e-05, |
|
"loss": 0.6421, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5433768656716418, |
|
"grad_norm": 0.6406233353457038, |
|
"learning_rate": 2.6146815331207358e-05, |
|
"loss": 0.6183, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.5457089552238806, |
|
"grad_norm": 0.6456118637481933, |
|
"learning_rate": 2.597358157765692e-05, |
|
"loss": 0.6474, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.5480410447761194, |
|
"grad_norm": 0.6505557028284191, |
|
"learning_rate": 2.5800438680532974e-05, |
|
"loss": 0.6307, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.5503731343283582, |
|
"grad_norm": 0.6673856033395356, |
|
"learning_rate": 2.56273969457547e-05, |
|
"loss": 0.6305, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.5527052238805971, |
|
"grad_norm": 0.6976453369750254, |
|
"learning_rate": 2.545446667321984e-05, |
|
"loss": 0.6511, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.5550373134328358, |
|
"grad_norm": 0.6523315253145765, |
|
"learning_rate": 2.528165815619162e-05, |
|
"loss": 0.6326, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.5573694029850746, |
|
"grad_norm": 0.6513887450128372, |
|
"learning_rate": 2.5108981680686035e-05, |
|
"loss": 0.6357, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.5597014925373134, |
|
"grad_norm": 0.6686846233513335, |
|
"learning_rate": 2.4936447524859625e-05, |
|
"loss": 0.6243, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.5620335820895522, |
|
"grad_norm": 0.6254057596195436, |
|
"learning_rate": 2.4764065958397715e-05, |
|
"loss": 0.6273, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.5643656716417911, |
|
"grad_norm": 0.6496921416294746, |
|
"learning_rate": 2.459184724190308e-05, |
|
"loss": 0.6179, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.5666977611940298, |
|
"grad_norm": 0.6568624775107711, |
|
"learning_rate": 2.441980162628527e-05, |
|
"loss": 0.6344, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.5690298507462687, |
|
"grad_norm": 0.6567469938905354, |
|
"learning_rate": 2.4247939352150386e-05, |
|
"loss": 0.6449, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.5713619402985075, |
|
"grad_norm": 0.61556362280737, |
|
"learning_rate": 2.4076270649191573e-05, |
|
"loss": 0.6555, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.5736940298507462, |
|
"grad_norm": 0.6394081674254839, |
|
"learning_rate": 2.390480573558012e-05, |
|
"loss": 0.6363, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.5760261194029851, |
|
"grad_norm": 0.6051477641244504, |
|
"learning_rate": 2.3733554817357246e-05, |
|
"loss": 0.6348, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.5783582089552238, |
|
"grad_norm": 0.5930962238087756, |
|
"learning_rate": 2.3562528087826573e-05, |
|
"loss": 0.6206, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.5806902985074627, |
|
"grad_norm": 0.698655785577389, |
|
"learning_rate": 2.339173572694746e-05, |
|
"loss": 0.6273, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.5830223880597015, |
|
"grad_norm": 0.6327913417184158, |
|
"learning_rate": 2.3221187900729003e-05, |
|
"loss": 0.637, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.5853544776119403, |
|
"grad_norm": 0.6339231296671493, |
|
"learning_rate": 2.3050894760624982e-05, |
|
"loss": 0.641, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.5876865671641791, |
|
"grad_norm": 0.6051505920325168, |
|
"learning_rate": 2.2880866442929544e-05, |
|
"loss": 0.6265, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.590018656716418, |
|
"grad_norm": 0.6637235396372423, |
|
"learning_rate": 2.271111306817396e-05, |
|
"loss": 0.6482, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.5923507462686567, |
|
"grad_norm": 0.6469789610641435, |
|
"learning_rate": 2.254164474052416e-05, |
|
"loss": 0.6266, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.5946828358208955, |
|
"grad_norm": 0.5897603262670164, |
|
"learning_rate": 2.237247154717932e-05, |
|
"loss": 0.6188, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.5970149253731343, |
|
"grad_norm": 0.6115820772990138, |
|
"learning_rate": 2.2203603557771447e-05, |
|
"loss": 0.6198, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5993470149253731, |
|
"grad_norm": 0.6159243257321477, |
|
"learning_rate": 2.2035050823766008e-05, |
|
"loss": 0.6313, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.601679104477612, |
|
"grad_norm": 0.6683096146103435, |
|
"learning_rate": 2.186682337786365e-05, |
|
"loss": 0.6369, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.6040111940298507, |
|
"grad_norm": 0.603613728907185, |
|
"learning_rate": 2.1698931233403013e-05, |
|
"loss": 0.6151, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.6063432835820896, |
|
"grad_norm": 0.6004781166951644, |
|
"learning_rate": 2.153138438376473e-05, |
|
"loss": 0.622, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.6086753731343284, |
|
"grad_norm": 18.22261692025831, |
|
"learning_rate": 2.136419280177655e-05, |
|
"loss": 0.6161, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.6110074626865671, |
|
"grad_norm": 0.5978990585115841, |
|
"learning_rate": 2.119736643911979e-05, |
|
"loss": 0.6416, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.613339552238806, |
|
"grad_norm": 0.597454038032284, |
|
"learning_rate": 2.1030915225736947e-05, |
|
"loss": 0.6288, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.6156716417910447, |
|
"grad_norm": 0.6430793847859734, |
|
"learning_rate": 2.0864849069240645e-05, |
|
"loss": 0.6383, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.6180037313432836, |
|
"grad_norm": 0.5831365581687563, |
|
"learning_rate": 2.0699177854323902e-05, |
|
"loss": 0.6295, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.6203358208955224, |
|
"grad_norm": 0.5998744003452269, |
|
"learning_rate": 2.0533911442171805e-05, |
|
"loss": 0.6287, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.6226679104477612, |
|
"grad_norm": 0.6643837943700802, |
|
"learning_rate": 2.036905966987449e-05, |
|
"loss": 0.6343, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.6438429216570205, |
|
"learning_rate": 2.0204632349841667e-05, |
|
"loss": 0.616, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.6273320895522388, |
|
"grad_norm": 0.6110144914242343, |
|
"learning_rate": 2.0040639269218532e-05, |
|
"loss": 0.6212, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.6296641791044776, |
|
"grad_norm": 0.6615405256014992, |
|
"learning_rate": 1.9877090189303182e-05, |
|
"loss": 0.6463, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6319962686567164, |
|
"grad_norm": 0.6183940678157902, |
|
"learning_rate": 1.9713994844965657e-05, |
|
"loss": 0.6183, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.6343283582089553, |
|
"grad_norm": 0.6746036398298787, |
|
"learning_rate": 1.9551362944068462e-05, |
|
"loss": 0.6301, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.636660447761194, |
|
"grad_norm": 0.648307373321323, |
|
"learning_rate": 1.938920416688874e-05, |
|
"loss": 0.6308, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.6389925373134329, |
|
"grad_norm": 0.6069645644995993, |
|
"learning_rate": 1.922752816554204e-05, |
|
"loss": 0.6205, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.6413246268656716, |
|
"grad_norm": 0.5822850576614598, |
|
"learning_rate": 1.9066344563407856e-05, |
|
"loss": 0.6295, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.6436567164179104, |
|
"grad_norm": 0.6496746526806308, |
|
"learning_rate": 1.890566295455678e-05, |
|
"loss": 0.605, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6459888059701493, |
|
"grad_norm": 0.5866035567882427, |
|
"learning_rate": 1.874549290317946e-05, |
|
"loss": 0.608, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.648320895522388, |
|
"grad_norm": 0.6041775047865613, |
|
"learning_rate": 1.858584394301728e-05, |
|
"loss": 0.6356, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.6506529850746269, |
|
"grad_norm": 0.5932846773816811, |
|
"learning_rate": 1.8426725576794918e-05, |
|
"loss": 0.6576, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.6529850746268657, |
|
"grad_norm": 0.6021211057686333, |
|
"learning_rate": 1.8268147275654707e-05, |
|
"loss": 0.6276, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6553171641791045, |
|
"grad_norm": 0.5706056220756198, |
|
"learning_rate": 1.8110118478592915e-05, |
|
"loss": 0.6129, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.6576492537313433, |
|
"grad_norm": 0.6303230581411896, |
|
"learning_rate": 1.7952648591897858e-05, |
|
"loss": 0.6296, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.659981343283582, |
|
"grad_norm": 0.6069553160577528, |
|
"learning_rate": 1.7795746988590027e-05, |
|
"loss": 0.6162, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.6623134328358209, |
|
"grad_norm": 0.6233052721164215, |
|
"learning_rate": 1.7639423007864252e-05, |
|
"loss": 0.6071, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.6646455223880597, |
|
"grad_norm": 0.5792817379292262, |
|
"learning_rate": 1.7483685954533692e-05, |
|
"loss": 0.6186, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.6669776119402985, |
|
"grad_norm": 0.598455839355501, |
|
"learning_rate": 1.7328545098476106e-05, |
|
"loss": 0.6126, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.6693097014925373, |
|
"grad_norm": 0.5887965955039507, |
|
"learning_rate": 1.717400967408196e-05, |
|
"loss": 0.6219, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.6716417910447762, |
|
"grad_norm": 0.6037531009113533, |
|
"learning_rate": 1.702008887970491e-05, |
|
"loss": 0.6164, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.6739738805970149, |
|
"grad_norm": 0.5608388614868615, |
|
"learning_rate": 1.6866791877114165e-05, |
|
"loss": 0.6246, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.6763059701492538, |
|
"grad_norm": 0.5732233279593167, |
|
"learning_rate": 1.671412779094926e-05, |
|
"loss": 0.622, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.6786380597014925, |
|
"grad_norm": 0.604276726888298, |
|
"learning_rate": 1.656210570817685e-05, |
|
"loss": 0.6227, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.6809701492537313, |
|
"grad_norm": 0.6084928698482973, |
|
"learning_rate": 1.6410734677549872e-05, |
|
"loss": 0.6156, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.6833022388059702, |
|
"grad_norm": 0.6167514863041356, |
|
"learning_rate": 1.6260023709068932e-05, |
|
"loss": 0.622, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.6856343283582089, |
|
"grad_norm": 0.6360423400540859, |
|
"learning_rate": 1.6109981773446036e-05, |
|
"loss": 0.6183, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.6879664179104478, |
|
"grad_norm": 0.6092909418703362, |
|
"learning_rate": 1.5960617801570555e-05, |
|
"loss": 0.6162, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.6902985074626866, |
|
"grad_norm": 0.6033346353614264, |
|
"learning_rate": 1.58119406839777e-05, |
|
"loss": 0.6221, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.6926305970149254, |
|
"grad_norm": 0.5497901412692066, |
|
"learning_rate": 1.566395927031932e-05, |
|
"loss": 0.6094, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.6949626865671642, |
|
"grad_norm": 0.5726565331606571, |
|
"learning_rate": 1.5516682368837133e-05, |
|
"loss": 0.6165, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.6972947761194029, |
|
"grad_norm": 0.6379827482434526, |
|
"learning_rate": 1.5370118745838453e-05, |
|
"loss": 0.6404, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.6996268656716418, |
|
"grad_norm": 0.6035346967740655, |
|
"learning_rate": 1.5224277125174388e-05, |
|
"loss": 0.6238, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.7019589552238806, |
|
"grad_norm": 0.6046299493263414, |
|
"learning_rate": 1.5079166187720561e-05, |
|
"loss": 0.6233, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.7042910447761194, |
|
"grad_norm": 0.5813425376508258, |
|
"learning_rate": 1.4934794570860416e-05, |
|
"loss": 0.6406, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.7066231343283582, |
|
"grad_norm": 0.6005101213362166, |
|
"learning_rate": 1.4791170867971132e-05, |
|
"loss": 0.6167, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.7089552238805971, |
|
"grad_norm": 0.5968043199699391, |
|
"learning_rate": 1.464830362791204e-05, |
|
"loss": 0.6154, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.7112873134328358, |
|
"grad_norm": 0.585701695377086, |
|
"learning_rate": 1.450620135451585e-05, |
|
"loss": 0.6225, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.7136194029850746, |
|
"grad_norm": 0.5957247172214722, |
|
"learning_rate": 1.4364872506082425e-05, |
|
"loss": 0.6282, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.7159514925373134, |
|
"grad_norm": 0.5967837516043032, |
|
"learning_rate": 1.4224325494875385e-05, |
|
"loss": 0.6399, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.7182835820895522, |
|
"grad_norm": 0.587594664717925, |
|
"learning_rate": 1.4084568686621314e-05, |
|
"loss": 0.6201, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.7206156716417911, |
|
"grad_norm": 0.5617516377764582, |
|
"learning_rate": 1.3945610400011851e-05, |
|
"loss": 0.633, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.7229477611940298, |
|
"grad_norm": 0.6036131494081576, |
|
"learning_rate": 1.3807458906208546e-05, |
|
"loss": 0.6421, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.7252798507462687, |
|
"grad_norm": 0.5619681271472718, |
|
"learning_rate": 1.3670122428350521e-05, |
|
"loss": 0.618, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.7276119402985075, |
|
"grad_norm": 0.5870654465012264, |
|
"learning_rate": 1.3533609141065008e-05, |
|
"loss": 0.6101, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.7299440298507462, |
|
"grad_norm": 0.6499759452581642, |
|
"learning_rate": 1.3397927169980773e-05, |
|
"loss": 0.6161, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.7322761194029851, |
|
"grad_norm": 0.5802369273091457, |
|
"learning_rate": 1.326308459124447e-05, |
|
"loss": 0.6156, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.7346082089552238, |
|
"grad_norm": 0.5862527640011228, |
|
"learning_rate": 1.3129089431039931e-05, |
|
"loss": 0.6218, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.7369402985074627, |
|
"grad_norm": 0.6041233460220358, |
|
"learning_rate": 1.299594966511038e-05, |
|
"loss": 0.6344, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.7392723880597015, |
|
"grad_norm": 0.5761625096468084, |
|
"learning_rate": 1.2863673218283783e-05, |
|
"loss": 0.617, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.7416044776119403, |
|
"grad_norm": 0.6001779302881813, |
|
"learning_rate": 1.2732267964001033e-05, |
|
"loss": 0.6101, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.7439365671641791, |
|
"grad_norm": 0.5964143297371681, |
|
"learning_rate": 1.26017417238474e-05, |
|
"loss": 0.6212, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.746268656716418, |
|
"grad_norm": 0.5964222072799055, |
|
"learning_rate": 1.2472102267086904e-05, |
|
"loss": 0.6211, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7486007462686567, |
|
"grad_norm": 0.6476516452528284, |
|
"learning_rate": 1.2343357310199925e-05, |
|
"loss": 0.6389, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.7509328358208955, |
|
"grad_norm": 0.6211226051748449, |
|
"learning_rate": 1.2215514516423813e-05, |
|
"loss": 0.6345, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.7532649253731343, |
|
"grad_norm": 0.6009531989706945, |
|
"learning_rate": 1.2088581495296852e-05, |
|
"loss": 0.6106, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.7555970149253731, |
|
"grad_norm": 0.5457398269496171, |
|
"learning_rate": 1.1962565802205255e-05, |
|
"loss": 0.6107, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.757929104477612, |
|
"grad_norm": 0.5485455623765262, |
|
"learning_rate": 1.1837474937933464e-05, |
|
"loss": 0.6194, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.7602611940298507, |
|
"grad_norm": 0.5644743687543846, |
|
"learning_rate": 1.1713316348217673e-05, |
|
"loss": 0.6136, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.7625932835820896, |
|
"grad_norm": 0.5944262234833658, |
|
"learning_rate": 1.1590097423302684e-05, |
|
"loss": 0.5942, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.7649253731343284, |
|
"grad_norm": 0.5616590097486719, |
|
"learning_rate": 1.1467825497501954e-05, |
|
"loss": 0.6026, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.7672574626865671, |
|
"grad_norm": 0.6146723556271906, |
|
"learning_rate": 1.1346507848761077e-05, |
|
"loss": 0.6156, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.769589552238806, |
|
"grad_norm": 0.5965680064276904, |
|
"learning_rate": 1.1226151698224597e-05, |
|
"loss": 0.633, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.7719216417910447, |
|
"grad_norm": 0.5614974770289655, |
|
"learning_rate": 1.1106764209806127e-05, |
|
"loss": 0.6194, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.7742537313432836, |
|
"grad_norm": 0.6034383421575543, |
|
"learning_rate": 1.0988352489762006e-05, |
|
"loss": 0.6359, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.7765858208955224, |
|
"grad_norm": 0.5568415876195435, |
|
"learning_rate": 1.0870923586268245e-05, |
|
"loss": 0.6143, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.7789179104477612, |
|
"grad_norm": 0.5686287199324069, |
|
"learning_rate": 1.0754484489001085e-05, |
|
"loss": 0.6186, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.78125, |
|
"grad_norm": 0.5739930134377701, |
|
"learning_rate": 1.0639042128720847e-05, |
|
"loss": 0.6235, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.7835820895522388, |
|
"grad_norm": 0.5795603868856798, |
|
"learning_rate": 1.052460337685951e-05, |
|
"loss": 0.6049, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.7859141791044776, |
|
"grad_norm": 0.56395308762799, |
|
"learning_rate": 1.0411175045111602e-05, |
|
"loss": 0.6032, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.7882462686567164, |
|
"grad_norm": 0.58081174474857, |
|
"learning_rate": 1.0298763885028839e-05, |
|
"loss": 0.6178, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.7905783582089553, |
|
"grad_norm": 0.5588983254320253, |
|
"learning_rate": 1.018737658761817e-05, |
|
"loss": 0.6105, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.792910447761194, |
|
"grad_norm": 0.5364326303324629, |
|
"learning_rate": 1.0077019782943584e-05, |
|
"loss": 0.6237, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.7952425373134329, |
|
"grad_norm": 0.5535399918900618, |
|
"learning_rate": 9.967700039731427e-06, |
|
"loss": 0.6129, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.7975746268656716, |
|
"grad_norm": 0.5357340402470735, |
|
"learning_rate": 9.859423864979441e-06, |
|
"loss": 0.5947, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.7999067164179104, |
|
"grad_norm": 0.6043622414636253, |
|
"learning_rate": 9.752197703569422e-06, |
|
"loss": 0.6341, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.8022388059701493, |
|
"grad_norm": 0.5721045372713984, |
|
"learning_rate": 9.646027937883622e-06, |
|
"loss": 0.6161, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.804570895522388, |
|
"grad_norm": 0.5312555490120068, |
|
"learning_rate": 9.54092088742485e-06, |
|
"loss": 0.64, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.8069029850746269, |
|
"grad_norm": 0.5439729568974323, |
|
"learning_rate": 9.436882808440334e-06, |
|
"loss": 0.6165, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.8092350746268657, |
|
"grad_norm": 0.5911843430242844, |
|
"learning_rate": 9.333919893549294e-06, |
|
"loss": 0.6099, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.8115671641791045, |
|
"grad_norm": 0.5499239897213614, |
|
"learning_rate": 9.232038271374377e-06, |
|
"loss": 0.601, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.8138992537313433, |
|
"grad_norm": 0.5917277497776632, |
|
"learning_rate": 9.131244006176846e-06, |
|
"loss": 0.6181, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.816231343283582, |
|
"grad_norm": 0.5790530037092708, |
|
"learning_rate": 9.031543097495638e-06, |
|
"loss": 0.6033, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.8185634328358209, |
|
"grad_norm": 0.556634899711528, |
|
"learning_rate": 8.93294147979023e-06, |
|
"loss": 0.5975, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.8208955223880597, |
|
"grad_norm": 0.525574565943637, |
|
"learning_rate": 8.835445022087426e-06, |
|
"loss": 0.6194, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.8232276119402985, |
|
"grad_norm": 0.542460572064194, |
|
"learning_rate": 8.739059527631999e-06, |
|
"loss": 0.6194, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.8255597014925373, |
|
"grad_norm": 0.5961740451690235, |
|
"learning_rate": 8.6437907335413e-06, |
|
"loss": 0.6232, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.8278917910447762, |
|
"grad_norm": 0.5156288871553688, |
|
"learning_rate": 8.549644310463717e-06, |
|
"loss": 0.625, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.8302238805970149, |
|
"grad_norm": 0.5827304049610953, |
|
"learning_rate": 8.456625862241193e-06, |
|
"loss": 0.623, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.8325559701492538, |
|
"grad_norm": 0.5473226394306203, |
|
"learning_rate": 8.364740925575643e-06, |
|
"loss": 0.6211, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.8348880597014925, |
|
"grad_norm": 0.5663858177158172, |
|
"learning_rate": 8.273994969699394e-06, |
|
"loss": 0.6115, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.8372201492537313, |
|
"grad_norm": 0.5667136142959207, |
|
"learning_rate": 8.184393396049675e-06, |
|
"loss": 0.6127, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.8395522388059702, |
|
"grad_norm": 0.5603856211987771, |
|
"learning_rate": 8.095941537947057e-06, |
|
"loss": 0.6274, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8418843283582089, |
|
"grad_norm": 0.5594249365684848, |
|
"learning_rate": 8.008644660278051e-06, |
|
"loss": 0.5925, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.8442164179104478, |
|
"grad_norm": 0.5635626874281433, |
|
"learning_rate": 7.922507959181673e-06, |
|
"loss": 0.6189, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.8465485074626866, |
|
"grad_norm": 0.560313283634603, |
|
"learning_rate": 7.837536561740225e-06, |
|
"loss": 0.6097, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.8488805970149254, |
|
"grad_norm": 0.6202519240674869, |
|
"learning_rate": 7.753735525674059e-06, |
|
"loss": 0.6174, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.8512126865671642, |
|
"grad_norm": 0.5713772506301511, |
|
"learning_rate": 7.671109839040547e-06, |
|
"loss": 0.621, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.8535447761194029, |
|
"grad_norm": 0.5237097428909497, |
|
"learning_rate": 7.58966441993719e-06, |
|
"loss": 0.5981, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.8558768656716418, |
|
"grad_norm": 0.5483521318490681, |
|
"learning_rate": 7.509404116208868e-06, |
|
"loss": 0.6138, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.8582089552238806, |
|
"grad_norm": 0.5778220550523655, |
|
"learning_rate": 7.430333705159286e-06, |
|
"loss": 0.6522, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.8605410447761194, |
|
"grad_norm": 0.5486890952901237, |
|
"learning_rate": 7.352457893266627e-06, |
|
"loss": 0.6429, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.8628731343283582, |
|
"grad_norm": 0.5693041316007504, |
|
"learning_rate": 7.275781315903374e-06, |
|
"loss": 0.6225, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.8652052238805971, |
|
"grad_norm": 0.5594174594138699, |
|
"learning_rate": 7.20030853706046e-06, |
|
"loss": 0.6174, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.8675373134328358, |
|
"grad_norm": 0.5408573140361542, |
|
"learning_rate": 7.126044049075548e-06, |
|
"loss": 0.6239, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.8698694029850746, |
|
"grad_norm": 0.5625658917979015, |
|
"learning_rate": 7.052992272365681e-06, |
|
"loss": 0.6149, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.8722014925373134, |
|
"grad_norm": 0.5795869183892941, |
|
"learning_rate": 6.9811575551641224e-06, |
|
"loss": 0.6014, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.8745335820895522, |
|
"grad_norm": 0.5908193742703406, |
|
"learning_rate": 6.910544173261588e-06, |
|
"loss": 0.6047, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.8768656716417911, |
|
"grad_norm": 0.5468229515625878, |
|
"learning_rate": 6.8411563297516995e-06, |
|
"loss": 0.5956, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.8791977611940298, |
|
"grad_norm": 0.5746605159059298, |
|
"learning_rate": 6.772998154780832e-06, |
|
"loss": 0.612, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.8815298507462687, |
|
"grad_norm": 0.5756230793946113, |
|
"learning_rate": 6.706073705302254e-06, |
|
"loss": 0.6145, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.8838619402985075, |
|
"grad_norm": 0.5662725596320226, |
|
"learning_rate": 6.6403869648346634e-06, |
|
"loss": 0.6116, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.8861940298507462, |
|
"grad_norm": 0.6638472478598192, |
|
"learning_rate": 6.575941843225068e-06, |
|
"loss": 0.5928, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.8885261194029851, |
|
"grad_norm": 0.5392491951943847, |
|
"learning_rate": 6.5127421764160685e-06, |
|
"loss": 0.5994, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.8908582089552238, |
|
"grad_norm": 0.5501297394583888, |
|
"learning_rate": 6.450791726217538e-06, |
|
"loss": 0.623, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.8931902985074627, |
|
"grad_norm": 0.5561658935766048, |
|
"learning_rate": 6.390094180082694e-06, |
|
"loss": 0.6137, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.8955223880597015, |
|
"grad_norm": 0.5486158970271028, |
|
"learning_rate": 6.330653150888617e-06, |
|
"loss": 0.6153, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.8978544776119403, |
|
"grad_norm": 0.5624023243356174, |
|
"learning_rate": 6.272472176721207e-06, |
|
"loss": 0.6207, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.9001865671641791, |
|
"grad_norm": 0.5469455949584299, |
|
"learning_rate": 6.215554720664598e-06, |
|
"loss": 0.6276, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.902518656716418, |
|
"grad_norm": 0.5335851595481946, |
|
"learning_rate": 6.159904170594982e-06, |
|
"loss": 0.6151, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.9048507462686567, |
|
"grad_norm": 0.5451933732807914, |
|
"learning_rate": 6.105523838979022e-06, |
|
"loss": 0.6302, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.9071828358208955, |
|
"grad_norm": 0.5362545841210454, |
|
"learning_rate": 6.052416962676621e-06, |
|
"loss": 0.6058, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.9095149253731343, |
|
"grad_norm": 0.5230617545385562, |
|
"learning_rate": 6.000586702748301e-06, |
|
"loss": 0.603, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.9118470149253731, |
|
"grad_norm": 0.582021589520335, |
|
"learning_rate": 5.950036144267021e-06, |
|
"loss": 0.6111, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.914179104477612, |
|
"grad_norm": 0.519728015454707, |
|
"learning_rate": 5.900768296134551e-06, |
|
"loss": 0.6049, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.9165111940298507, |
|
"grad_norm": 0.592109342645659, |
|
"learning_rate": 5.852786090902383e-06, |
|
"loss": 0.6199, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.9188432835820896, |
|
"grad_norm": 0.5517752644641193, |
|
"learning_rate": 5.8060923845971825e-06, |
|
"loss": 0.6113, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.9211753731343284, |
|
"grad_norm": 0.5374604103547962, |
|
"learning_rate": 5.760689956550763e-06, |
|
"loss": 0.6051, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.9235074626865671, |
|
"grad_norm": 0.5317691219584879, |
|
"learning_rate": 5.7165815092346825e-06, |
|
"loss": 0.5952, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.925839552238806, |
|
"grad_norm": 0.5553747148697364, |
|
"learning_rate": 5.673769668099364e-06, |
|
"loss": 0.6088, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.9281716417910447, |
|
"grad_norm": 0.5606224712873208, |
|
"learning_rate": 5.632256981417845e-06, |
|
"loss": 0.6134, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.9305037313432836, |
|
"grad_norm": 0.5375797889998934, |
|
"learning_rate": 5.59204592013407e-06, |
|
"loss": 0.5973, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.9328358208955224, |
|
"grad_norm": 0.5508292133907728, |
|
"learning_rate": 5.553138877715833e-06, |
|
"loss": 0.6298, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.9351679104477612, |
|
"grad_norm": 0.5407565378019897, |
|
"learning_rate": 5.515538170012309e-06, |
|
"loss": 0.6085, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.5516140680428332, |
|
"learning_rate": 5.479246035116201e-06, |
|
"loss": 0.5998, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.9398320895522388, |
|
"grad_norm": 0.5871082563627422, |
|
"learning_rate": 5.444264633230531e-06, |
|
"loss": 0.6146, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.9421641791044776, |
|
"grad_norm": 0.5579174490831493, |
|
"learning_rate": 5.410596046540051e-06, |
|
"loss": 0.6008, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.9444962686567164, |
|
"grad_norm": 0.5535279013398093, |
|
"learning_rate": 5.378242279087314e-06, |
|
"loss": 0.6071, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.9468283582089553, |
|
"grad_norm": 0.5542818715981681, |
|
"learning_rate": 5.347205256653387e-06, |
|
"loss": 0.6131, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.949160447761194, |
|
"grad_norm": 0.5507958590165508, |
|
"learning_rate": 5.317486826643219e-06, |
|
"loss": 0.5896, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.9514925373134329, |
|
"grad_norm": 0.5386294413734031, |
|
"learning_rate": 5.28908875797568e-06, |
|
"loss": 0.6164, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.9538246268656716, |
|
"grad_norm": 0.5657594223236935, |
|
"learning_rate": 5.262012740978269e-06, |
|
"loss": 0.6171, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.9561567164179104, |
|
"grad_norm": 0.545809161265838, |
|
"learning_rate": 5.236260387286509e-06, |
|
"loss": 0.6014, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.9584888059701493, |
|
"grad_norm": 0.5400541424481552, |
|
"learning_rate": 5.2118332297480105e-06, |
|
"loss": 0.6195, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.960820895522388, |
|
"grad_norm": 0.5301287222550025, |
|
"learning_rate": 5.1887327223312296e-06, |
|
"loss": 0.5894, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.9631529850746269, |
|
"grad_norm": 0.5826747467153147, |
|
"learning_rate": 5.166960240038937e-06, |
|
"loss": 0.6103, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.9654850746268657, |
|
"grad_norm": 0.5624482325072565, |
|
"learning_rate": 5.1465170788263595e-06, |
|
"loss": 0.6205, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.9678171641791045, |
|
"grad_norm": 0.5710832690138076, |
|
"learning_rate": 5.1274044555240525e-06, |
|
"loss": 0.6083, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.9701492537313433, |
|
"grad_norm": 0.5427558552652122, |
|
"learning_rate": 5.109623507765466e-06, |
|
"loss": 0.6113, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.972481343283582, |
|
"grad_norm": 0.5356447254957378, |
|
"learning_rate": 5.093175293919228e-06, |
|
"loss": 0.6138, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.9748134328358209, |
|
"grad_norm": 0.554507085372338, |
|
"learning_rate": 5.07806079302615e-06, |
|
"loss": 0.6209, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.9771455223880597, |
|
"grad_norm": 0.5347894717484287, |
|
"learning_rate": 5.064280904740953e-06, |
|
"loss": 0.6187, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.9794776119402985, |
|
"grad_norm": 0.548171827658312, |
|
"learning_rate": 5.051836449278715e-06, |
|
"loss": 0.6069, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.9818097014925373, |
|
"grad_norm": 0.562019041400098, |
|
"learning_rate": 5.040728167366057e-06, |
|
"loss": 0.6028, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.9841417910447762, |
|
"grad_norm": 0.5554495773144189, |
|
"learning_rate": 5.030956720197035e-06, |
|
"loss": 0.6039, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.9864738805970149, |
|
"grad_norm": 0.5847204114674615, |
|
"learning_rate": 5.022522689393809e-06, |
|
"loss": 0.6377, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.9888059701492538, |
|
"grad_norm": 0.5293713892212091, |
|
"learning_rate": 5.015426576972003e-06, |
|
"loss": 0.6099, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.9911380597014925, |
|
"grad_norm": 0.5377836319550625, |
|
"learning_rate": 5.009668805310832e-06, |
|
"loss": 0.6063, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.9934701492537313, |
|
"grad_norm": 0.5661212299229181, |
|
"learning_rate": 5.005249717127964e-06, |
|
"loss": 0.6199, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.9958022388059702, |
|
"grad_norm": 0.5383491265897701, |
|
"learning_rate": 5.002169575459111e-06, |
|
"loss": 0.6107, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.9981343283582089, |
|
"grad_norm": 0.5598221695919163, |
|
"learning_rate": 5.000428563642382e-06, |
|
"loss": 0.6166, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 2144, |
|
"total_flos": 241268013662208.0, |
|
"train_loss": 0.6583015036894314, |
|
"train_runtime": 13168.5767, |
|
"train_samples_per_second": 2.605, |
|
"train_steps_per_second": 0.163 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 2144, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 241268013662208.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|