|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9989550679205852, |
|
"eval_steps": 500, |
|
"global_step": 239, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0041797283176593526, |
|
"grad_norm": 3.793109431652244, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.3773, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008359456635318705, |
|
"grad_norm": 3.860176928138404, |
|
"learning_rate": 5e-06, |
|
"loss": 1.3697, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.012539184952978056, |
|
"grad_norm": 3.472574916996383, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.3685, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01671891327063741, |
|
"grad_norm": 2.426017601116389, |
|
"learning_rate": 1e-05, |
|
"loss": 1.2106, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02089864158829676, |
|
"grad_norm": 2.369903447078516, |
|
"learning_rate": 1.25e-05, |
|
"loss": 1.2334, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.025078369905956112, |
|
"grad_norm": 6.10573157991006, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.1659, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.029258098223615466, |
|
"grad_norm": 2.828089970728912, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 1.1133, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03343782654127482, |
|
"grad_norm": 3.7326281687198226, |
|
"learning_rate": 2e-05, |
|
"loss": 1.031, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03761755485893417, |
|
"grad_norm": 2.8226722768014127, |
|
"learning_rate": 1.9999075218579184e-05, |
|
"loss": 1.0627, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04179728317659352, |
|
"grad_norm": 2.3941660915552196, |
|
"learning_rate": 1.9996301045360874e-05, |
|
"loss": 0.8942, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04597701149425287, |
|
"grad_norm": 2.333154808520554, |
|
"learning_rate": 1.9991677993445832e-05, |
|
"loss": 0.8475, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.050156739811912224, |
|
"grad_norm": 2.1073095025076003, |
|
"learning_rate": 1.9985206917896563e-05, |
|
"loss": 0.7987, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.054336468129571575, |
|
"grad_norm": 2.0147413946396506, |
|
"learning_rate": 1.9976889015579167e-05, |
|
"loss": 0.7122, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.05851619644723093, |
|
"grad_norm": 1.7762710304793987, |
|
"learning_rate": 1.9966725824941933e-05, |
|
"loss": 0.6907, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.06269592476489028, |
|
"grad_norm": 1.854790476944044, |
|
"learning_rate": 1.9954719225730847e-05, |
|
"loss": 0.583, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06687565308254964, |
|
"grad_norm": 1.6431498121799215, |
|
"learning_rate": 1.994087143864188e-05, |
|
"loss": 0.5144, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.07105538140020899, |
|
"grad_norm": 1.5849345385422475, |
|
"learning_rate": 1.992518502491028e-05, |
|
"loss": 0.4662, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.07523510971786834, |
|
"grad_norm": 2.0976691689662488, |
|
"learning_rate": 1.9907662885836836e-05, |
|
"loss": 0.5641, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0794148380355277, |
|
"grad_norm": 1.8591436531727428, |
|
"learning_rate": 1.9888308262251286e-05, |
|
"loss": 0.5513, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.08359456635318704, |
|
"grad_norm": 1.6077371820716417, |
|
"learning_rate": 1.986712473391289e-05, |
|
"loss": 0.514, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0877742946708464, |
|
"grad_norm": 1.6369595572017508, |
|
"learning_rate": 1.9844116218848335e-05, |
|
"loss": 0.4729, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.09195402298850575, |
|
"grad_norm": 1.4143725432673173, |
|
"learning_rate": 1.9819286972627066e-05, |
|
"loss": 0.4404, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0961337513061651, |
|
"grad_norm": 1.9394850939365578, |
|
"learning_rate": 1.9792641587574212e-05, |
|
"loss": 0.4529, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.10031347962382445, |
|
"grad_norm": 1.4086541686064722, |
|
"learning_rate": 1.9764184991921178e-05, |
|
"loss": 0.4101, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1044932079414838, |
|
"grad_norm": 2.563989330153671, |
|
"learning_rate": 1.973392244889415e-05, |
|
"loss": 0.3327, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10867293625914315, |
|
"grad_norm": 1.4054095165182117, |
|
"learning_rate": 1.9701859555740647e-05, |
|
"loss": 0.3673, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.11285266457680251, |
|
"grad_norm": 1.3347083701406377, |
|
"learning_rate": 1.966800224269424e-05, |
|
"loss": 0.4013, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.11703239289446186, |
|
"grad_norm": 1.3599751397794146, |
|
"learning_rate": 1.9632356771877735e-05, |
|
"loss": 0.3765, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 1.2994011828520489, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.3092, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.12539184952978055, |
|
"grad_norm": 1.4307464855692031, |
|
"learning_rate": 1.955572805786141e-05, |
|
"loss": 0.352, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12957157784743992, |
|
"grad_norm": 1.1156432595068917, |
|
"learning_rate": 1.9514758987623784e-05, |
|
"loss": 0.2338, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.13375130616509928, |
|
"grad_norm": 1.2383396612081838, |
|
"learning_rate": 1.9472030102919102e-05, |
|
"loss": 0.3058, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.13793103448275862, |
|
"grad_norm": 1.3484241033815327, |
|
"learning_rate": 1.94275493067231e-05, |
|
"loss": 0.3032, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.14211076280041798, |
|
"grad_norm": 1.1814763693141177, |
|
"learning_rate": 1.938132482603856e-05, |
|
"loss": 0.2606, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.14629049111807732, |
|
"grad_norm": 1.1152475267807018, |
|
"learning_rate": 1.9333365210373668e-05, |
|
"loss": 0.2655, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.15047021943573669, |
|
"grad_norm": 1.0274673205489633, |
|
"learning_rate": 1.9283679330160726e-05, |
|
"loss": 0.2393, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.15464994775339602, |
|
"grad_norm": 1.1446059182649253, |
|
"learning_rate": 1.9232276375115517e-05, |
|
"loss": 0.3053, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1588296760710554, |
|
"grad_norm": 1.2083159051790577, |
|
"learning_rate": 1.9179165852537596e-05, |
|
"loss": 0.2887, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.16300940438871472, |
|
"grad_norm": 1.5658760108502938, |
|
"learning_rate": 1.9124357585551872e-05, |
|
"loss": 0.3527, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1671891327063741, |
|
"grad_norm": 1.3279092492462565, |
|
"learning_rate": 1.9067861711291744e-05, |
|
"loss": 0.3117, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17136886102403343, |
|
"grad_norm": 1.177122725805587, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.2835, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1755485893416928, |
|
"grad_norm": 1.0091524997429624, |
|
"learning_rate": 1.89498492482171e-05, |
|
"loss": 0.2474, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.17972831765935215, |
|
"grad_norm": 0.993604546498234, |
|
"learning_rate": 1.8888354486549238e-05, |
|
"loss": 0.2289, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1839080459770115, |
|
"grad_norm": 1.3521657755483074, |
|
"learning_rate": 1.8825215767863215e-05, |
|
"loss": 0.3526, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.18808777429467086, |
|
"grad_norm": 1.1636752310323113, |
|
"learning_rate": 1.876044477006183e-05, |
|
"loss": 0.3034, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1922675026123302, |
|
"grad_norm": 0.9201202294799887, |
|
"learning_rate": 1.8694053472948154e-05, |
|
"loss": 0.2322, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.19644723092998956, |
|
"grad_norm": 1.0591767636272813, |
|
"learning_rate": 1.8626054156009807e-05, |
|
"loss": 0.1968, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2006269592476489, |
|
"grad_norm": 1.1802357695207026, |
|
"learning_rate": 1.8556459396147777e-05, |
|
"loss": 0.248, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.20480668756530826, |
|
"grad_norm": 1.05844609547146, |
|
"learning_rate": 1.8485282065350237e-05, |
|
"loss": 0.2361, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2089864158829676, |
|
"grad_norm": 0.8493872312825937, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.163, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.21316614420062696, |
|
"grad_norm": 1.1938936546063497, |
|
"learning_rate": 1.8338232639998672e-05, |
|
"loss": 0.2011, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2173458725182863, |
|
"grad_norm": 0.9821596876965665, |
|
"learning_rate": 1.826238774315995e-05, |
|
"loss": 0.1797, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.22152560083594566, |
|
"grad_norm": 1.0890806155634258, |
|
"learning_rate": 1.8185014665785936e-05, |
|
"loss": 0.2356, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.22570532915360503, |
|
"grad_norm": 3.1623969781776875, |
|
"learning_rate": 1.810612771851352e-05, |
|
"loss": 0.2341, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.22988505747126436, |
|
"grad_norm": 1.0495429246657253, |
|
"learning_rate": 1.8025741491979326e-05, |
|
"loss": 0.2286, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.23406478578892373, |
|
"grad_norm": 1.0998745172943698, |
|
"learning_rate": 1.7943870854121126e-05, |
|
"loss": 0.228, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.23824451410658307, |
|
"grad_norm": 1.0611257780697756, |
|
"learning_rate": 1.7860530947427878e-05, |
|
"loss": 0.1926, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 1.3963883396571588, |
|
"learning_rate": 1.777573718613904e-05, |
|
"loss": 0.2455, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.24660397074190177, |
|
"grad_norm": 0.8817393289325651, |
|
"learning_rate": 1.768950525339362e-05, |
|
"loss": 0.186, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2507836990595611, |
|
"grad_norm": 1.004709836073224, |
|
"learning_rate": 1.7601851098329484e-05, |
|
"loss": 0.2003, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2549634273772205, |
|
"grad_norm": 1.009032284413222, |
|
"learning_rate": 1.7512790933133435e-05, |
|
"loss": 0.2389, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.25914315569487983, |
|
"grad_norm": 1.0842648970922986, |
|
"learning_rate": 1.74223412300427e-05, |
|
"loss": 0.2324, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.26332288401253917, |
|
"grad_norm": 1.1362114248016657, |
|
"learning_rate": 1.7330518718298263e-05, |
|
"loss": 0.2636, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.26750261233019856, |
|
"grad_norm": 1.2490219244647938, |
|
"learning_rate": 1.72373403810507e-05, |
|
"loss": 0.2392, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2716823406478579, |
|
"grad_norm": 0.9229560381350037, |
|
"learning_rate": 1.7142823452219036e-05, |
|
"loss": 0.2207, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.27586206896551724, |
|
"grad_norm": 0.9194163633451438, |
|
"learning_rate": 1.7046985413303215e-05, |
|
"loss": 0.2208, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2800417972831766, |
|
"grad_norm": 0.9431299303955618, |
|
"learning_rate": 1.6949843990150798e-05, |
|
"loss": 0.1877, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.28422152560083597, |
|
"grad_norm": 1.0695148700733295, |
|
"learning_rate": 1.6851417149678442e-05, |
|
"loss": 0.2431, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2884012539184953, |
|
"grad_norm": 0.9981918594046101, |
|
"learning_rate": 1.6751723096548834e-05, |
|
"loss": 0.2134, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.29258098223615464, |
|
"grad_norm": 0.7779428741827196, |
|
"learning_rate": 1.6650780269803587e-05, |
|
"loss": 0.1772, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.296760710553814, |
|
"grad_norm": 0.7017283200072758, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.154, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.30094043887147337, |
|
"grad_norm": 1.2657237042635219, |
|
"learning_rate": 1.644522320302217e-05, |
|
"loss": 0.1989, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3051201671891327, |
|
"grad_norm": 0.9141216726104325, |
|
"learning_rate": 1.634064698205725e-05, |
|
"loss": 0.1632, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.30929989550679204, |
|
"grad_norm": 0.853915517926657, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.2127, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.31347962382445144, |
|
"grad_norm": 1.0323735847419797, |
|
"learning_rate": 1.612799587154777e-05, |
|
"loss": 0.2167, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3176593521421108, |
|
"grad_norm": 0.930984497217397, |
|
"learning_rate": 1.6019960313162436e-05, |
|
"loss": 0.1956, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3218390804597701, |
|
"grad_norm": 1.006790474010485, |
|
"learning_rate": 1.5910811325286768e-05, |
|
"loss": 0.2224, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.32601880877742945, |
|
"grad_norm": 0.6839140410559078, |
|
"learning_rate": 1.5800569095711983e-05, |
|
"loss": 0.1534, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.33019853709508884, |
|
"grad_norm": 0.9218639125486777, |
|
"learning_rate": 1.5689254014431225e-05, |
|
"loss": 0.2173, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3343782654127482, |
|
"grad_norm": 0.9628453893674432, |
|
"learning_rate": 1.5576886669868297e-05, |
|
"loss": 0.2144, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3385579937304075, |
|
"grad_norm": 1.0976706938526015, |
|
"learning_rate": 1.5463487845069708e-05, |
|
"loss": 0.2869, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.34273772204806685, |
|
"grad_norm": 0.843877293720617, |
|
"learning_rate": 1.5349078513860728e-05, |
|
"loss": 0.1644, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.34691745036572624, |
|
"grad_norm": 1.0395155619197012, |
|
"learning_rate": 1.5233679836966122e-05, |
|
"loss": 0.2048, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3510971786833856, |
|
"grad_norm": 0.62654878377622, |
|
"learning_rate": 1.5117313158096371e-05, |
|
"loss": 0.1011, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3552769070010449, |
|
"grad_norm": 0.8765627543230693, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.2282, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3594566353187043, |
|
"grad_norm": 0.8493012101164317, |
|
"learning_rate": 1.4881762060482814e-05, |
|
"loss": 0.2303, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.9061911256912499, |
|
"learning_rate": 1.476262120839475e-05, |
|
"loss": 0.1772, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.367816091954023, |
|
"grad_norm": 0.7714054432768603, |
|
"learning_rate": 1.4642599479585106e-05, |
|
"loss": 0.1827, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3719958202716823, |
|
"grad_norm": 1.0302753021934954, |
|
"learning_rate": 1.4521719072826858e-05, |
|
"loss": 0.2556, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.3761755485893417, |
|
"grad_norm": 0.8170194188293493, |
|
"learning_rate": 1.4400002345710871e-05, |
|
"loss": 0.2131, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.38035527690700105, |
|
"grad_norm": 0.8353229514513874, |
|
"learning_rate": 1.427747181051071e-05, |
|
"loss": 0.1774, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3845350052246604, |
|
"grad_norm": 0.8996783507158662, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.2074, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3887147335423197, |
|
"grad_norm": 1.0120263532766836, |
|
"learning_rate": 1.4030060113355118e-05, |
|
"loss": 0.2291, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3928944618599791, |
|
"grad_norm": 0.8009583936768994, |
|
"learning_rate": 1.3905224711747844e-05, |
|
"loss": 0.1793, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.39707419017763845, |
|
"grad_norm": 0.8764394706115101, |
|
"learning_rate": 1.3779667014289067e-05, |
|
"loss": 0.1639, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4012539184952978, |
|
"grad_norm": 1.2665682983369955, |
|
"learning_rate": 1.3653410243663953e-05, |
|
"loss": 0.2469, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4054336468129572, |
|
"grad_norm": 2.02790558792064, |
|
"learning_rate": 1.3526477751855645e-05, |
|
"loss": 0.2458, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4096133751306165, |
|
"grad_norm": 0.8765418818205535, |
|
"learning_rate": 1.3398893015826166e-05, |
|
"loss": 0.2141, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.41379310344827586, |
|
"grad_norm": 0.8402084285563245, |
|
"learning_rate": 1.3270679633174219e-05, |
|
"loss": 0.17, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4179728317659352, |
|
"grad_norm": 0.8927111902324383, |
|
"learning_rate": 1.3141861317770628e-05, |
|
"loss": 0.2419, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4221525600835946, |
|
"grad_norm": 0.9307548734337794, |
|
"learning_rate": 1.3012461895372343e-05, |
|
"loss": 0.2211, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.4263322884012539, |
|
"grad_norm": 0.8182965551610069, |
|
"learning_rate": 1.2882505299215711e-05, |
|
"loss": 0.1546, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.43051201671891326, |
|
"grad_norm": 0.5513459236136672, |
|
"learning_rate": 1.2752015565589852e-05, |
|
"loss": 0.1028, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4346917450365726, |
|
"grad_norm": 0.7895344671438952, |
|
"learning_rate": 1.2621016829391022e-05, |
|
"loss": 0.147, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.438871473354232, |
|
"grad_norm": 0.7347726170918327, |
|
"learning_rate": 1.2489533319658703e-05, |
|
"loss": 0.1499, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4430512016718913, |
|
"grad_norm": 0.9930172706392331, |
|
"learning_rate": 1.2357589355094275e-05, |
|
"loss": 0.1999, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.44723092998955066, |
|
"grad_norm": 0.700736172068482, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.1727, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.45141065830721006, |
|
"grad_norm": 0.9578584031611264, |
|
"learning_rate": 1.2092417757581085e-05, |
|
"loss": 0.2408, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4555903866248694, |
|
"grad_norm": 0.8203132426685221, |
|
"learning_rate": 1.1959239169785668e-05, |
|
"loss": 0.1951, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.45977011494252873, |
|
"grad_norm": 0.9270712768052833, |
|
"learning_rate": 1.182569820839362e-05, |
|
"loss": 0.2222, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.46394984326018807, |
|
"grad_norm": 0.9063435343774252, |
|
"learning_rate": 1.1691819572644941e-05, |
|
"loss": 0.1827, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.46812957157784746, |
|
"grad_norm": 0.9742349956564444, |
|
"learning_rate": 1.155762802423463e-05, |
|
"loss": 0.2166, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.4723092998955068, |
|
"grad_norm": 0.740191368731416, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.162, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.47648902821316613, |
|
"grad_norm": 2.152737189786801, |
|
"learning_rate": 1.128840552099439e-05, |
|
"loss": 0.2347, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.48066875653082547, |
|
"grad_norm": 0.6837438960691173, |
|
"learning_rate": 1.1153424360558268e-05, |
|
"loss": 0.1344, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 0.8928037881326962, |
|
"learning_rate": 1.1018229867038358e-05, |
|
"loss": 0.1723, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4890282131661442, |
|
"grad_norm": 1.004544035476834, |
|
"learning_rate": 1.0882847045505809e-05, |
|
"loss": 0.1962, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.49320794148380354, |
|
"grad_norm": 0.841348260273773, |
|
"learning_rate": 1.0747300935864245e-05, |
|
"loss": 0.1803, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.49738766980146293, |
|
"grad_norm": 0.8496561849066544, |
|
"learning_rate": 1.0611616608218429e-05, |
|
"loss": 0.1905, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5015673981191222, |
|
"grad_norm": 1.1461318580041029, |
|
"learning_rate": 1.0475819158237426e-05, |
|
"loss": 0.2435, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5057471264367817, |
|
"grad_norm": 0.9171413538232467, |
|
"learning_rate": 1.0339933702512978e-05, |
|
"loss": 0.253, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.509926854754441, |
|
"grad_norm": 0.7285731725801685, |
|
"learning_rate": 1.0203985373914056e-05, |
|
"loss": 0.1548, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5141065830721003, |
|
"grad_norm": 0.8652478932288088, |
|
"learning_rate": 1.0067999316938348e-05, |
|
"loss": 0.204, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5182863113897597, |
|
"grad_norm": 1.38915125451696, |
|
"learning_rate": 9.932000683061654e-06, |
|
"loss": 0.2442, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.522466039707419, |
|
"grad_norm": 0.9268619467285734, |
|
"learning_rate": 9.79601462608595e-06, |
|
"loss": 0.2297, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5266457680250783, |
|
"grad_norm": 0.7880807499102447, |
|
"learning_rate": 9.660066297487024e-06, |
|
"loss": 0.181, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5308254963427377, |
|
"grad_norm": 0.754337068341988, |
|
"learning_rate": 9.524180841762577e-06, |
|
"loss": 0.1641, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.5350052246603971, |
|
"grad_norm": 1.1050970913178217, |
|
"learning_rate": 9.388383391781576e-06, |
|
"loss": 0.2412, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.5391849529780565, |
|
"grad_norm": 0.911580865548536, |
|
"learning_rate": 9.252699064135759e-06, |
|
"loss": 0.2092, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.5433646812957158, |
|
"grad_norm": 0.8679286773035769, |
|
"learning_rate": 9.117152954494195e-06, |
|
"loss": 0.1902, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5475444096133751, |
|
"grad_norm": 0.76185165053938, |
|
"learning_rate": 8.981770132961649e-06, |
|
"loss": 0.2015, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.5517241379310345, |
|
"grad_norm": 0.6153250846939452, |
|
"learning_rate": 8.846575639441732e-06, |
|
"loss": 0.1301, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.5559038662486938, |
|
"grad_norm": 0.8129974861108137, |
|
"learning_rate": 8.711594479005614e-06, |
|
"loss": 0.1688, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.5600835945663531, |
|
"grad_norm": 0.8932210165728961, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.142, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.5642633228840125, |
|
"grad_norm": 1.2480488477623177, |
|
"learning_rate": 8.442371975765368e-06, |
|
"loss": 0.2117, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5684430512016719, |
|
"grad_norm": 1.2436168746130591, |
|
"learning_rate": 8.308180427355062e-06, |
|
"loss": 0.2376, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5726227795193313, |
|
"grad_norm": 0.8002711459729407, |
|
"learning_rate": 8.174301791606384e-06, |
|
"loss": 0.1827, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5768025078369906, |
|
"grad_norm": 0.8475588556028547, |
|
"learning_rate": 8.040760830214334e-06, |
|
"loss": 0.1658, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.58098223615465, |
|
"grad_norm": 0.8639184920307346, |
|
"learning_rate": 7.907582242418916e-06, |
|
"loss": 0.17, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5851619644723093, |
|
"grad_norm": 0.8598440958255673, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.2112, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5893416927899686, |
|
"grad_norm": 0.8792127016276399, |
|
"learning_rate": 7.642410644905726e-06, |
|
"loss": 0.2157, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.593521421107628, |
|
"grad_norm": 0.5310794775676358, |
|
"learning_rate": 7.5104666803413015e-06, |
|
"loss": 0.0995, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5977011494252874, |
|
"grad_norm": 0.6031042659903776, |
|
"learning_rate": 7.378983170608982e-06, |
|
"loss": 0.1226, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6018808777429467, |
|
"grad_norm": 0.9322591026745743, |
|
"learning_rate": 7.24798443441015e-06, |
|
"loss": 0.2269, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.7650887090451826, |
|
"learning_rate": 7.117494700784292e-06, |
|
"loss": 0.1972, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6102403343782654, |
|
"grad_norm": 1.08702051590229, |
|
"learning_rate": 6.9875381046276605e-06, |
|
"loss": 0.2312, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6144200626959248, |
|
"grad_norm": 1.1938077188160876, |
|
"learning_rate": 6.8581386822293765e-06, |
|
"loss": 0.1944, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6185997910135841, |
|
"grad_norm": 0.8286648541039914, |
|
"learning_rate": 6.729320366825785e-06, |
|
"loss": 0.2012, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.6227795193312434, |
|
"grad_norm": 1.0448385340552901, |
|
"learning_rate": 6.601106984173835e-06, |
|
"loss": 0.176, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.6269592476489029, |
|
"grad_norm": 0.6431098205020193, |
|
"learning_rate": 6.473522248144359e-06, |
|
"loss": 0.1611, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6311389759665622, |
|
"grad_norm": 0.6902192399005279, |
|
"learning_rate": 6.34658975633605e-06, |
|
"loss": 0.1571, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.6353187042842215, |
|
"grad_norm": 1.4126319469067181, |
|
"learning_rate": 6.220332985710936e-06, |
|
"loss": 0.2253, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.6394984326018809, |
|
"grad_norm": 0.9250333234508288, |
|
"learning_rate": 6.094775288252157e-06, |
|
"loss": 0.1659, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.6436781609195402, |
|
"grad_norm": 1.0996139187104108, |
|
"learning_rate": 5.9699398866448846e-06, |
|
"loss": 0.1927, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.6478578892371996, |
|
"grad_norm": 0.6600170395773419, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.1504, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.6520376175548589, |
|
"grad_norm": 0.8091133996097659, |
|
"learning_rate": 5.722528189489294e-06, |
|
"loss": 0.1728, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.6562173458725182, |
|
"grad_norm": 0.6787441434180406, |
|
"learning_rate": 5.599997654289129e-06, |
|
"loss": 0.1306, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.6603970741901777, |
|
"grad_norm": 0.8602639720659184, |
|
"learning_rate": 5.478280927173145e-06, |
|
"loss": 0.168, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.664576802507837, |
|
"grad_norm": 0.6826930291022159, |
|
"learning_rate": 5.357400520414898e-06, |
|
"loss": 0.1205, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.6687565308254964, |
|
"grad_norm": 0.6701217983786535, |
|
"learning_rate": 5.237378791605249e-06, |
|
"loss": 0.1452, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6729362591431557, |
|
"grad_norm": 0.7578669897505922, |
|
"learning_rate": 5.11823793951719e-06, |
|
"loss": 0.1626, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.677115987460815, |
|
"grad_norm": 0.8367106457596803, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.1981, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.6812957157784744, |
|
"grad_norm": 0.8324385169673411, |
|
"learning_rate": 4.882686841903627e-06, |
|
"loss": 0.1945, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6854754440961337, |
|
"grad_norm": 0.8336564812001488, |
|
"learning_rate": 4.766320163033882e-06, |
|
"loss": 0.1497, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.9106572698316981, |
|
"learning_rate": 4.6509214861392785e-06, |
|
"loss": 0.2122, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6938349007314525, |
|
"grad_norm": 0.9899598996363259, |
|
"learning_rate": 4.5365121549302916e-06, |
|
"loss": 0.291, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6980146290491118, |
|
"grad_norm": 0.7526211809216892, |
|
"learning_rate": 4.423113330131708e-06, |
|
"loss": 0.1398, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7021943573667712, |
|
"grad_norm": 0.580108623854109, |
|
"learning_rate": 4.310745985568779e-06, |
|
"loss": 0.1228, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7063740856844305, |
|
"grad_norm": 0.5902701942035865, |
|
"learning_rate": 4.19943090428802e-06, |
|
"loss": 0.122, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.7105538140020898, |
|
"grad_norm": 0.8908115982207944, |
|
"learning_rate": 4.0891886747132356e-06, |
|
"loss": 0.1679, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.7147335423197492, |
|
"grad_norm": 0.7610727588905307, |
|
"learning_rate": 3.9800396868375675e-06, |
|
"loss": 0.1331, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.7189132706374086, |
|
"grad_norm": 0.9084312218170052, |
|
"learning_rate": 3.872004128452231e-06, |
|
"loss": 0.2037, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.723092998955068, |
|
"grad_norm": 0.7951214412363438, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 0.152, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 1.0007664865334354, |
|
"learning_rate": 3.659353017942754e-06, |
|
"loss": 0.199, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.7314524555903866, |
|
"grad_norm": 0.8000807402307464, |
|
"learning_rate": 3.5547767969778355e-06, |
|
"loss": 0.1874, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.735632183908046, |
|
"grad_norm": 0.7056309894011483, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.1408, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.7398119122257053, |
|
"grad_norm": 1.0154810770219924, |
|
"learning_rate": 3.3492197301964145e-06, |
|
"loss": 0.1516, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.7439916405433646, |
|
"grad_norm": 0.6774580884476967, |
|
"learning_rate": 3.248276903451171e-06, |
|
"loss": 0.1389, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.7481713688610241, |
|
"grad_norm": 1.383727265413641, |
|
"learning_rate": 3.1485828503215588e-06, |
|
"loss": 0.1652, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.7523510971786834, |
|
"grad_norm": 0.9084226985349156, |
|
"learning_rate": 3.0501560098492056e-06, |
|
"loss": 0.22, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7565308254963428, |
|
"grad_norm": 0.7904229164202767, |
|
"learning_rate": 2.9530145866967897e-06, |
|
"loss": 0.1688, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.7607105538140021, |
|
"grad_norm": 0.9537175407913363, |
|
"learning_rate": 2.8571765477809645e-06, |
|
"loss": 0.2203, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.7648902821316614, |
|
"grad_norm": 0.9045213538470349, |
|
"learning_rate": 2.7626596189492983e-06, |
|
"loss": 0.2235, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.7690700104493208, |
|
"grad_norm": 0.8437761162278906, |
|
"learning_rate": 2.669481281701739e-06, |
|
"loss": 0.2405, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.7732497387669801, |
|
"grad_norm": 0.9170366332680835, |
|
"learning_rate": 2.5776587699573007e-06, |
|
"loss": 0.2356, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.7774294670846394, |
|
"grad_norm": 0.859807092495294, |
|
"learning_rate": 2.487209066866565e-06, |
|
"loss": 0.1581, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.7816091954022989, |
|
"grad_norm": 0.7541718313410231, |
|
"learning_rate": 2.398148901670521e-06, |
|
"loss": 0.1735, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.7857889237199582, |
|
"grad_norm": 0.5298691809105464, |
|
"learning_rate": 2.3104947466063785e-06, |
|
"loss": 0.1247, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.7899686520376176, |
|
"grad_norm": 0.5509131740535222, |
|
"learning_rate": 2.224262813860962e-06, |
|
"loss": 0.1135, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.7941483803552769, |
|
"grad_norm": 1.211014988349712, |
|
"learning_rate": 2.1394690525721275e-06, |
|
"loss": 0.2396, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7983281086729362, |
|
"grad_norm": 0.8304883104248448, |
|
"learning_rate": 2.0561291458788736e-06, |
|
"loss": 0.1495, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.8025078369905956, |
|
"grad_norm": 0.6575918108532034, |
|
"learning_rate": 1.9742585080206754e-06, |
|
"loss": 0.1705, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.8066875653082549, |
|
"grad_norm": 0.6483828691185768, |
|
"learning_rate": 1.8938722814864863e-06, |
|
"loss": 0.1179, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.8108672936259144, |
|
"grad_norm": 0.7874326139345168, |
|
"learning_rate": 1.8149853342140644e-06, |
|
"loss": 0.1911, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.8150470219435737, |
|
"grad_norm": 0.9396325288901851, |
|
"learning_rate": 1.7376122568400533e-06, |
|
"loss": 0.2313, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.819226750261233, |
|
"grad_norm": 0.8980422324440791, |
|
"learning_rate": 1.6617673600013295e-06, |
|
"loss": 0.2637, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.8234064785788924, |
|
"grad_norm": 0.8393671732445989, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.2008, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.8275862068965517, |
|
"grad_norm": 0.9628013968768121, |
|
"learning_rate": 1.5147179346497665e-06, |
|
"loss": 0.1803, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.831765935214211, |
|
"grad_norm": 0.8055867006150553, |
|
"learning_rate": 1.443540603852227e-06, |
|
"loss": 0.2058, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.8359456635318704, |
|
"grad_norm": 0.7402053267386335, |
|
"learning_rate": 1.373945843990192e-06, |
|
"loss": 0.1678, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8401253918495298, |
|
"grad_norm": 0.6345259422046625, |
|
"learning_rate": 1.3059465270518469e-06, |
|
"loss": 0.1499, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.8443051201671892, |
|
"grad_norm": 0.9278886576312714, |
|
"learning_rate": 1.2395552299381742e-06, |
|
"loss": 0.1376, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 0.7305237779713204, |
|
"learning_rate": 1.1747842321367886e-06, |
|
"loss": 0.1689, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.8526645768025078, |
|
"grad_norm": 0.785339323897186, |
|
"learning_rate": 1.1116455134507665e-06, |
|
"loss": 0.1515, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.8568443051201672, |
|
"grad_norm": 0.8232817926726907, |
|
"learning_rate": 1.0501507517829012e-06, |
|
"loss": 0.2148, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.8610240334378265, |
|
"grad_norm": 0.920951874634867, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.1981, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.8652037617554859, |
|
"grad_norm": 0.778697980092621, |
|
"learning_rate": 9.321382887082564e-07, |
|
"loss": 0.1697, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.8693834900731452, |
|
"grad_norm": 0.552864168373498, |
|
"learning_rate": 8.756424144481313e-07, |
|
"loss": 0.1197, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.8735632183908046, |
|
"grad_norm": 0.7721394559949915, |
|
"learning_rate": 8.208341474624071e-07, |
|
"loss": 0.1636, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.877742946708464, |
|
"grad_norm": 0.5887750742792935, |
|
"learning_rate": 7.677236248844855e-07, |
|
"loss": 0.1238, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8819226750261233, |
|
"grad_norm": 1.1706298717117896, |
|
"learning_rate": 7.163206698392744e-07, |
|
"loss": 0.2685, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.8861024033437827, |
|
"grad_norm": 0.7156481787290525, |
|
"learning_rate": 6.666347896263326e-07, |
|
"loss": 0.1272, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.890282131661442, |
|
"grad_norm": 0.9148297160556185, |
|
"learning_rate": 6.186751739614405e-07, |
|
"loss": 0.1878, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.8944618599791013, |
|
"grad_norm": 0.8443233125008259, |
|
"learning_rate": 5.724506932769014e-07, |
|
"loss": 0.1872, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.8986415882967607, |
|
"grad_norm": 0.7906265849992707, |
|
"learning_rate": 5.279698970809011e-07, |
|
"loss": 0.1672, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.9028213166144201, |
|
"grad_norm": 0.8000045890229907, |
|
"learning_rate": 4.852410123762164e-07, |
|
"loss": 0.1357, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.9070010449320794, |
|
"grad_norm": 0.7692285277924558, |
|
"learning_rate": 4.4427194213859216e-07, |
|
"loss": 0.1733, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.9111807732497388, |
|
"grad_norm": 0.6828595942343869, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.1377, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.9153605015673981, |
|
"grad_norm": 0.7616205791512801, |
|
"learning_rate": 3.6764322812226416e-07, |
|
"loss": 0.1743, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.9195402298850575, |
|
"grad_norm": 0.6920015184607431, |
|
"learning_rate": 3.319977573057642e-07, |
|
"loss": 0.178, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.9237199582027168, |
|
"grad_norm": 0.8599083603091006, |
|
"learning_rate": 2.9814044425935605e-07, |
|
"loss": 0.211, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.9278996865203761, |
|
"grad_norm": 0.8372366140139196, |
|
"learning_rate": 2.6607755110584886e-07, |
|
"loss": 0.1693, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.9320794148380356, |
|
"grad_norm": 0.740216908527879, |
|
"learning_rate": 2.3581500807882462e-07, |
|
"loss": 0.1677, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.9362591431556949, |
|
"grad_norm": 0.7486951708296801, |
|
"learning_rate": 2.0735841242578992e-07, |
|
"loss": 0.1367, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.9404388714733543, |
|
"grad_norm": 0.6059460975199559, |
|
"learning_rate": 1.8071302737293294e-07, |
|
"loss": 0.1287, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.9446185997910136, |
|
"grad_norm": 0.6725131427516452, |
|
"learning_rate": 1.558837811516667e-07, |
|
"loss": 0.1355, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.9487983281086729, |
|
"grad_norm": 0.89103460281586, |
|
"learning_rate": 1.3287526608711132e-07, |
|
"loss": 0.2224, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.9529780564263323, |
|
"grad_norm": 0.7294258012671165, |
|
"learning_rate": 1.1169173774871478e-07, |
|
"loss": 0.1247, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.9571577847439916, |
|
"grad_norm": 0.5563178146719318, |
|
"learning_rate": 9.233711416316571e-08, |
|
"loss": 0.1229, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.9613375130616509, |
|
"grad_norm": 1.2052590543331665, |
|
"learning_rate": 7.481497508972313e-08, |
|
"loss": 0.1689, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9655172413793104, |
|
"grad_norm": 0.8195585207091408, |
|
"learning_rate": 5.912856135812051e-08, |
|
"loss": 0.1942, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 2.0024730819109773, |
|
"learning_rate": 4.528077426915412e-08, |
|
"loss": 0.1719, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.9738766980146291, |
|
"grad_norm": 0.6177697556146066, |
|
"learning_rate": 3.327417505806785e-08, |
|
"loss": 0.1286, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.9780564263322884, |
|
"grad_norm": 2.319773776301439, |
|
"learning_rate": 2.311098442083659e-08, |
|
"loss": 0.2415, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.9822361546499477, |
|
"grad_norm": 1.1389478132400794, |
|
"learning_rate": 1.4793082103435885e-08, |
|
"loss": 0.1913, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.9864158829676071, |
|
"grad_norm": 0.8823489571757573, |
|
"learning_rate": 8.322006554171147e-09, |
|
"loss": 0.2037, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.9905956112852664, |
|
"grad_norm": 0.6469295501938342, |
|
"learning_rate": 3.698954639129726e-09, |
|
"loss": 0.1321, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.9947753396029259, |
|
"grad_norm": 0.8129747939307076, |
|
"learning_rate": 9.24781420816867e-10, |
|
"loss": 0.194, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.9989550679205852, |
|
"grad_norm": 0.9261130483396426, |
|
"learning_rate": 0.0, |
|
"loss": 0.1793, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.9989550679205852, |
|
"step": 239, |
|
"total_flos": 449784335171584.0, |
|
"train_loss": 0.26206765498451606, |
|
"train_runtime": 2412.5124, |
|
"train_samples_per_second": 12.688, |
|
"train_steps_per_second": 0.099 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 239, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 449784335171584.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|