|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.92814371257485, |
|
"eval_steps": 500, |
|
"global_step": 3328, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.059880239520958084, |
|
"grad_norm": 6.431809425354004, |
|
"learning_rate": 1.1976047904191617e-05, |
|
"loss": 1.1211, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11976047904191617, |
|
"grad_norm": 2.930279493331909, |
|
"learning_rate": 2.3952095808383234e-05, |
|
"loss": 0.6469, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17964071856287425, |
|
"grad_norm": 2.0707571506500244, |
|
"learning_rate": 3.592814371257485e-05, |
|
"loss": 0.3656, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.23952095808383234, |
|
"grad_norm": 1.0708367824554443, |
|
"learning_rate": 4.790419161676647e-05, |
|
"loss": 0.2645, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2994011976047904, |
|
"grad_norm": 0.8269807696342468, |
|
"learning_rate": 5.988023952095808e-05, |
|
"loss": 0.2082, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3592814371257485, |
|
"grad_norm": 0.9306111931800842, |
|
"learning_rate": 7.18562874251497e-05, |
|
"loss": 0.1856, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.41916167664670656, |
|
"grad_norm": 2.0250155925750732, |
|
"learning_rate": 8.383233532934131e-05, |
|
"loss": 0.1663, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.47904191616766467, |
|
"grad_norm": 1.3040319681167603, |
|
"learning_rate": 9.580838323353294e-05, |
|
"loss": 0.1629, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5389221556886228, |
|
"grad_norm": 0.7521260380744934, |
|
"learning_rate": 0.00010778443113772456, |
|
"loss": 0.1357, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5988023952095808, |
|
"grad_norm": 1.0798639059066772, |
|
"learning_rate": 0.00011976047904191617, |
|
"loss": 0.1219, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6586826347305389, |
|
"grad_norm": 1.1869535446166992, |
|
"learning_rate": 0.0001317365269461078, |
|
"loss": 0.1231, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.718562874251497, |
|
"grad_norm": 0.6623774766921997, |
|
"learning_rate": 0.0001437125748502994, |
|
"loss": 0.1056, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7784431137724551, |
|
"grad_norm": 0.7135199308395386, |
|
"learning_rate": 0.00015568862275449103, |
|
"loss": 0.0958, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8383233532934131, |
|
"grad_norm": 0.5620641112327576, |
|
"learning_rate": 0.00016766467065868263, |
|
"loss": 0.1049, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8982035928143712, |
|
"grad_norm": 0.9924416542053223, |
|
"learning_rate": 0.00017964071856287425, |
|
"loss": 0.094, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9580838323353293, |
|
"grad_norm": 1.8146198987960815, |
|
"learning_rate": 0.00019161676646706587, |
|
"loss": 0.0982, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0179640718562875, |
|
"grad_norm": 0.9982231259346008, |
|
"learning_rate": 0.00019999955550902674, |
|
"loss": 0.0987, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.0778443113772456, |
|
"grad_norm": 0.7121732831001282, |
|
"learning_rate": 0.0001999916535572039, |
|
"loss": 0.0865, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.1377245508982037, |
|
"grad_norm": 0.7211266160011292, |
|
"learning_rate": 0.00019997387492660495, |
|
"loss": 0.0815, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.1976047904191618, |
|
"grad_norm": 0.7090721130371094, |
|
"learning_rate": 0.00019994622137331462, |
|
"loss": 0.078, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.2574850299401197, |
|
"grad_norm": 0.6617732644081116, |
|
"learning_rate": 0.00019990869562881356, |
|
"loss": 0.0751, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.3173652694610778, |
|
"grad_norm": 0.9563634395599365, |
|
"learning_rate": 0.00019986130139970835, |
|
"loss": 0.077, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.377245508982036, |
|
"grad_norm": 0.4591192901134491, |
|
"learning_rate": 0.00019980404336736554, |
|
"loss": 0.0742, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.437125748502994, |
|
"grad_norm": 0.3945522606372833, |
|
"learning_rate": 0.00019973692718744902, |
|
"loss": 0.073, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.4970059880239521, |
|
"grad_norm": 0.5244777202606201, |
|
"learning_rate": 0.0001996599594893617, |
|
"loss": 0.0689, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.55688622754491, |
|
"grad_norm": 0.5341875553131104, |
|
"learning_rate": 0.00019957314787559036, |
|
"loss": 0.069, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.6167664670658684, |
|
"grad_norm": 0.33649489283561707, |
|
"learning_rate": 0.00019947650092095494, |
|
"loss": 0.0651, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.6766467065868262, |
|
"grad_norm": 0.48326146602630615, |
|
"learning_rate": 0.00019937002817176146, |
|
"loss": 0.0664, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.7365269461077846, |
|
"grad_norm": 0.4517434239387512, |
|
"learning_rate": 0.00019925374014485912, |
|
"loss": 0.0669, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.7964071856287425, |
|
"grad_norm": 0.529296338558197, |
|
"learning_rate": 0.00019912764832660156, |
|
"loss": 0.0665, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.8562874251497006, |
|
"grad_norm": 0.5424647927284241, |
|
"learning_rate": 0.00019899176517171208, |
|
"loss": 0.0598, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.9161676646706587, |
|
"grad_norm": 0.3814735412597656, |
|
"learning_rate": 0.0001988461041020537, |
|
"loss": 0.054, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.9760479041916168, |
|
"grad_norm": 0.48093119263648987, |
|
"learning_rate": 0.0001986906795053032, |
|
"loss": 0.0553, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.035928143712575, |
|
"grad_norm": 0.5698864459991455, |
|
"learning_rate": 0.00019852550673353004, |
|
"loss": 0.0586, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.095808383233533, |
|
"grad_norm": 0.839526355266571, |
|
"learning_rate": 0.00019835060210168017, |
|
"loss": 0.0565, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.155688622754491, |
|
"grad_norm": 0.5425819754600525, |
|
"learning_rate": 0.00019816598288596412, |
|
"loss": 0.0573, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.215568862275449, |
|
"grad_norm": 0.2914687693119049, |
|
"learning_rate": 0.00019797166732215076, |
|
"loss": 0.052, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.2754491017964074, |
|
"grad_norm": 0.5953413248062134, |
|
"learning_rate": 0.0001977676746037662, |
|
"loss": 0.054, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.3353293413173652, |
|
"grad_norm": 0.42740774154663086, |
|
"learning_rate": 0.0001975540248801977, |
|
"loss": 0.0554, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.3952095808383236, |
|
"grad_norm": 0.37728944420814514, |
|
"learning_rate": 0.00019733073925470352, |
|
"loss": 0.0513, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.4550898203592815, |
|
"grad_norm": 0.475818932056427, |
|
"learning_rate": 0.00019709783978232842, |
|
"loss": 0.0542, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.5149700598802394, |
|
"grad_norm": 0.6117169260978699, |
|
"learning_rate": 0.00019685534946772536, |
|
"loss": 0.0554, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.5748502994011977, |
|
"grad_norm": 0.34023669362068176, |
|
"learning_rate": 0.00019660329226288273, |
|
"loss": 0.0535, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.6347305389221556, |
|
"grad_norm": 0.4431716501712799, |
|
"learning_rate": 0.00019634169306475917, |
|
"loss": 0.0496, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.694610778443114, |
|
"grad_norm": 0.5750408172607422, |
|
"learning_rate": 0.00019607057771282372, |
|
"loss": 0.0505, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.754491017964072, |
|
"grad_norm": 0.45190536975860596, |
|
"learning_rate": 0.00019578997298650395, |
|
"loss": 0.0438, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.81437125748503, |
|
"grad_norm": 0.37500932812690735, |
|
"learning_rate": 0.00019549990660254073, |
|
"loss": 0.049, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.874251497005988, |
|
"grad_norm": 0.35688552260398865, |
|
"learning_rate": 0.00019520040721225033, |
|
"loss": 0.0507, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.934131736526946, |
|
"grad_norm": 0.3029610216617584, |
|
"learning_rate": 0.00019489150439869465, |
|
"loss": 0.0497, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.9940119760479043, |
|
"grad_norm": 0.4592169523239136, |
|
"learning_rate": 0.000194573228673759, |
|
"loss": 0.0504, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.053892215568862, |
|
"grad_norm": 0.3194161355495453, |
|
"learning_rate": 0.0001942456114751382, |
|
"loss": 0.0481, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.1137724550898205, |
|
"grad_norm": 0.5735427141189575, |
|
"learning_rate": 0.00019390868516323156, |
|
"loss": 0.0463, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.1736526946107784, |
|
"grad_norm": 0.251126766204834, |
|
"learning_rate": 0.00019356248301794632, |
|
"loss": 0.0466, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.2335329341317367, |
|
"grad_norm": 0.2263738214969635, |
|
"learning_rate": 0.00019320703923541045, |
|
"loss": 0.0475, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.2934131736526946, |
|
"grad_norm": 0.3312239348888397, |
|
"learning_rate": 0.00019284238892459485, |
|
"loss": 0.0451, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.3532934131736525, |
|
"grad_norm": 0.27982234954833984, |
|
"learning_rate": 0.00019246856810384576, |
|
"loss": 0.0489, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.413173652694611, |
|
"grad_norm": 0.34601065516471863, |
|
"learning_rate": 0.0001920856136973266, |
|
"loss": 0.0481, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.4730538922155687, |
|
"grad_norm": 0.4402002692222595, |
|
"learning_rate": 0.0001916935635313711, |
|
"loss": 0.0437, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.532934131736527, |
|
"grad_norm": 0.6538058519363403, |
|
"learning_rate": 0.00019129245633074703, |
|
"loss": 0.0478, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.592814371257485, |
|
"grad_norm": 0.36621013283729553, |
|
"learning_rate": 0.0001908823317148309, |
|
"loss": 0.0436, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.6526946107784433, |
|
"grad_norm": 0.36551448702812195, |
|
"learning_rate": 0.00019046323019369474, |
|
"loss": 0.044, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.712574850299401, |
|
"grad_norm": 0.4750276505947113, |
|
"learning_rate": 0.0001900351931641047, |
|
"loss": 0.0453, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.772455089820359, |
|
"grad_norm": 0.6237766742706299, |
|
"learning_rate": 0.00018959826290543207, |
|
"loss": 0.0466, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.8323353293413174, |
|
"grad_norm": 0.45154473185539246, |
|
"learning_rate": 0.00018915248257547713, |
|
"loss": 0.0448, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.8922155688622757, |
|
"grad_norm": 0.3980957865715027, |
|
"learning_rate": 0.0001886978962062062, |
|
"loss": 0.0502, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.9520958083832336, |
|
"grad_norm": 0.3399762213230133, |
|
"learning_rate": 0.00018823454869940242, |
|
"loss": 0.046, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.0119760479041915, |
|
"grad_norm": 0.3906604051589966, |
|
"learning_rate": 0.00018776248582223063, |
|
"loss": 0.0416, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.07185628742515, |
|
"grad_norm": 0.35462263226509094, |
|
"learning_rate": 0.00018728175420271658, |
|
"loss": 0.0385, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.131736526946108, |
|
"grad_norm": 0.43025287985801697, |
|
"learning_rate": 0.00018679240132514136, |
|
"loss": 0.042, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.191616766467066, |
|
"grad_norm": 0.31433719396591187, |
|
"learning_rate": 0.0001862944755253511, |
|
"loss": 0.0381, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.251497005988024, |
|
"grad_norm": 0.545958936214447, |
|
"learning_rate": 0.00018578802598598256, |
|
"loss": 0.0402, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.311377245508982, |
|
"grad_norm": 0.283829003572464, |
|
"learning_rate": 0.00018527310273160522, |
|
"loss": 0.0409, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.37125748502994, |
|
"grad_norm": 0.3808535635471344, |
|
"learning_rate": 0.00018474975662377996, |
|
"loss": 0.0411, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.431137724550898, |
|
"grad_norm": 0.25326570868492126, |
|
"learning_rate": 0.00018421803935603532, |
|
"loss": 0.0431, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.491017964071856, |
|
"grad_norm": 0.6720901131629944, |
|
"learning_rate": 0.0001836780034487614, |
|
"loss": 0.0436, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.550898203592815, |
|
"grad_norm": 0.4183627665042877, |
|
"learning_rate": 0.00018312970224402224, |
|
"loss": 0.0408, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.610778443113772, |
|
"grad_norm": 0.2710108160972595, |
|
"learning_rate": 0.00018257318990028682, |
|
"loss": 0.0427, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.6706586826347305, |
|
"grad_norm": 0.38718074560165405, |
|
"learning_rate": 0.00018200852138707975, |
|
"loss": 0.0467, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.730538922155689, |
|
"grad_norm": 0.2398432046175003, |
|
"learning_rate": 0.00018143575247955135, |
|
"loss": 0.0406, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.790419161676647, |
|
"grad_norm": 0.3367202877998352, |
|
"learning_rate": 0.0001808549397529689, |
|
"loss": 0.0407, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.850299401197605, |
|
"grad_norm": 0.30790024995803833, |
|
"learning_rate": 0.00018026614057712795, |
|
"loss": 0.0404, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.910179640718563, |
|
"grad_norm": 0.266544908285141, |
|
"learning_rate": 0.00017966941311068594, |
|
"loss": 0.0373, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.970059880239521, |
|
"grad_norm": 0.2760760188102722, |
|
"learning_rate": 0.00017906481629541743, |
|
"loss": 0.0376, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 5.029940119760479, |
|
"grad_norm": 0.23295895755290985, |
|
"learning_rate": 0.00017845240985039218, |
|
"loss": 0.0375, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 5.089820359281437, |
|
"grad_norm": 0.3514467477798462, |
|
"learning_rate": 0.0001778322542660764, |
|
"loss": 0.037, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 5.149700598802395, |
|
"grad_norm": 0.3003256618976593, |
|
"learning_rate": 0.0001772044107983577, |
|
"loss": 0.0379, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 5.209580838323353, |
|
"grad_norm": 0.24236004054546356, |
|
"learning_rate": 0.0001765689414624947, |
|
"loss": 0.0399, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 5.269461077844311, |
|
"grad_norm": 0.22592489421367645, |
|
"learning_rate": 0.00017592590902699138, |
|
"loss": 0.0388, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 5.3293413173652695, |
|
"grad_norm": 0.21647202968597412, |
|
"learning_rate": 0.00017527537700739708, |
|
"loss": 0.0409, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 5.389221556886228, |
|
"grad_norm": 0.37862035632133484, |
|
"learning_rate": 0.00017461740966003282, |
|
"loss": 0.0429, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 5.449101796407185, |
|
"grad_norm": 0.29174819588661194, |
|
"learning_rate": 0.00017395207197564442, |
|
"loss": 0.0402, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 5.508982035928144, |
|
"grad_norm": 0.2290957272052765, |
|
"learning_rate": 0.00017327942967298285, |
|
"loss": 0.0372, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 5.568862275449102, |
|
"grad_norm": 0.23877893388271332, |
|
"learning_rate": 0.0001725995491923131, |
|
"loss": 0.0348, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 5.62874251497006, |
|
"grad_norm": 0.3068605959415436, |
|
"learning_rate": 0.00017191249768885137, |
|
"loss": 0.0434, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 5.688622754491018, |
|
"grad_norm": 0.2592070698738098, |
|
"learning_rate": 0.00017121834302613186, |
|
"loss": 0.0349, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 5.748502994011976, |
|
"grad_norm": 0.2851567268371582, |
|
"learning_rate": 0.00017051715376930363, |
|
"loss": 0.0394, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 5.808383233532934, |
|
"grad_norm": 0.3089054226875305, |
|
"learning_rate": 0.00016980899917835783, |
|
"loss": 0.0356, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 5.868263473053892, |
|
"grad_norm": 0.26863303780555725, |
|
"learning_rate": 0.00016909394920128695, |
|
"loss": 0.0366, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 5.92814371257485, |
|
"grad_norm": 0.3612504005432129, |
|
"learning_rate": 0.00016837207446717518, |
|
"loss": 0.0375, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 5.9880239520958085, |
|
"grad_norm": 0.24680382013320923, |
|
"learning_rate": 0.00016764344627922254, |
|
"loss": 0.0419, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 6.047904191616767, |
|
"grad_norm": 0.2882663905620575, |
|
"learning_rate": 0.0001669081366077016, |
|
"loss": 0.0337, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 6.107784431137724, |
|
"grad_norm": 0.3996296226978302, |
|
"learning_rate": 0.00016616621808284864, |
|
"loss": 0.0371, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 6.167664670658683, |
|
"grad_norm": 0.19218039512634277, |
|
"learning_rate": 0.00016541776398768966, |
|
"loss": 0.0374, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 6.227544910179641, |
|
"grad_norm": 0.21666273474693298, |
|
"learning_rate": 0.00016466284825080184, |
|
"loss": 0.0319, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 6.287425149700598, |
|
"grad_norm": 0.267782986164093, |
|
"learning_rate": 0.00016390154543901122, |
|
"loss": 0.0291, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 6.347305389221557, |
|
"grad_norm": 0.30645301938056946, |
|
"learning_rate": 0.00016313393075002736, |
|
"loss": 0.0331, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 6.407185628742515, |
|
"grad_norm": 0.29111963510513306, |
|
"learning_rate": 0.00016236008000501573, |
|
"loss": 0.0328, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 6.467065868263473, |
|
"grad_norm": 1.8472002744674683, |
|
"learning_rate": 0.00016158006964110837, |
|
"loss": 0.0392, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 6.526946107784431, |
|
"grad_norm": 0.2733491063117981, |
|
"learning_rate": 0.0001607939767038539, |
|
"loss": 0.0355, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 6.586826347305389, |
|
"grad_norm": 0.3126026391983032, |
|
"learning_rate": 0.00016000187883960734, |
|
"loss": 0.0347, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 6.6467065868263475, |
|
"grad_norm": 0.1978524774312973, |
|
"learning_rate": 0.00015920385428786057, |
|
"loss": 0.0335, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 6.706586826347305, |
|
"grad_norm": 0.181430846452713, |
|
"learning_rate": 0.0001583999818735142, |
|
"loss": 0.0318, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 6.766467065868263, |
|
"grad_norm": 0.29239514470100403, |
|
"learning_rate": 0.00015759034099909158, |
|
"loss": 0.034, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 6.826347305389222, |
|
"grad_norm": 0.22587193548679352, |
|
"learning_rate": 0.00015677501163689604, |
|
"loss": 0.0361, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 6.88622754491018, |
|
"grad_norm": 0.20451228320598602, |
|
"learning_rate": 0.00015595407432111134, |
|
"loss": 0.032, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 6.946107784431137, |
|
"grad_norm": 0.26598894596099854, |
|
"learning_rate": 0.00015512761013984711, |
|
"loss": 0.0341, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 7.005988023952096, |
|
"grad_norm": 0.34581083059310913, |
|
"learning_rate": 0.00015429570072712925, |
|
"loss": 0.0351, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 7.065868263473054, |
|
"grad_norm": 0.31990233063697815, |
|
"learning_rate": 0.00015345842825483666, |
|
"loss": 0.0369, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 7.125748502994012, |
|
"grad_norm": 0.17922641336917877, |
|
"learning_rate": 0.00015261587542458443, |
|
"loss": 0.0288, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 7.18562874251497, |
|
"grad_norm": 0.3264837861061096, |
|
"learning_rate": 0.0001517681254595554, |
|
"loss": 0.0299, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 7.245508982035928, |
|
"grad_norm": 1.1214922666549683, |
|
"learning_rate": 0.00015091526209627944, |
|
"loss": 0.0292, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 7.3053892215568865, |
|
"grad_norm": 0.3472778797149658, |
|
"learning_rate": 0.0001500573695763625, |
|
"loss": 0.031, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 7.365269461077844, |
|
"grad_norm": 0.3422839641571045, |
|
"learning_rate": 0.00014919453263816565, |
|
"loss": 0.0312, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 7.425149700598802, |
|
"grad_norm": 0.1827375441789627, |
|
"learning_rate": 0.0001483268365084351, |
|
"loss": 0.0329, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 7.485029940119761, |
|
"grad_norm": 0.265622079372406, |
|
"learning_rate": 0.0001474543668938837, |
|
"loss": 0.0288, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 7.544910179640718, |
|
"grad_norm": 0.56901615858078, |
|
"learning_rate": 0.0001465772099727256, |
|
"loss": 0.0322, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 7.604790419161676, |
|
"grad_norm": 0.2764129638671875, |
|
"learning_rate": 0.00014569545238616368, |
|
"loss": 0.029, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 7.664670658682635, |
|
"grad_norm": 0.3648919463157654, |
|
"learning_rate": 0.00014480918122983174, |
|
"loss": 0.0316, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 7.724550898203593, |
|
"grad_norm": 0.25707149505615234, |
|
"learning_rate": 0.00014391848404519158, |
|
"loss": 0.033, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 7.7844311377245505, |
|
"grad_norm": 0.4237244427204132, |
|
"learning_rate": 0.0001430234488108861, |
|
"loss": 0.0331, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 7.844311377245509, |
|
"grad_norm": 0.6553322672843933, |
|
"learning_rate": 0.00014212416393404915, |
|
"loss": 0.0293, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 7.904191616766467, |
|
"grad_norm": 0.4958610236644745, |
|
"learning_rate": 0.00014122071824157318, |
|
"loss": 0.0333, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 7.9640718562874255, |
|
"grad_norm": 0.24589641392230988, |
|
"learning_rate": 0.0001403132009713354, |
|
"loss": 0.0339, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 8.023952095808383, |
|
"grad_norm": 0.4254026710987091, |
|
"learning_rate": 0.0001394017017633831, |
|
"loss": 0.0361, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 8.08383233532934, |
|
"grad_norm": 0.1916535496711731, |
|
"learning_rate": 0.00013848631065107973, |
|
"loss": 0.0318, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 8.1437125748503, |
|
"grad_norm": 0.3515986204147339, |
|
"learning_rate": 0.00013756711805221168, |
|
"loss": 0.0327, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 8.203592814371257, |
|
"grad_norm": 0.2998508810997009, |
|
"learning_rate": 0.00013664421476005723, |
|
"loss": 0.0337, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 8.263473053892216, |
|
"grad_norm": 0.3755238354206085, |
|
"learning_rate": 0.0001357176919344186, |
|
"loss": 0.0282, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 8.323353293413174, |
|
"grad_norm": 0.28876182436943054, |
|
"learning_rate": 0.0001347876410926175, |
|
"loss": 0.0279, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 8.383233532934131, |
|
"grad_norm": 0.18108633160591125, |
|
"learning_rate": 0.00013385415410045555, |
|
"loss": 0.0244, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 8.44311377245509, |
|
"grad_norm": 0.24347344040870667, |
|
"learning_rate": 0.00013291732316314024, |
|
"loss": 0.0268, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 8.502994011976048, |
|
"grad_norm": 0.18795382976531982, |
|
"learning_rate": 0.00013197724081617736, |
|
"loss": 0.0266, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 8.562874251497005, |
|
"grad_norm": 0.2501884698867798, |
|
"learning_rate": 0.00013103399991623078, |
|
"loss": 0.0323, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 8.622754491017965, |
|
"grad_norm": 0.24649246037006378, |
|
"learning_rate": 0.00013008769363195052, |
|
"loss": 0.0282, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 8.682634730538922, |
|
"grad_norm": 0.2907567322254181, |
|
"learning_rate": 0.00012913841543477005, |
|
"loss": 0.0261, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 8.74251497005988, |
|
"grad_norm": 0.4828208386898041, |
|
"learning_rate": 0.00012818625908967367, |
|
"loss": 0.0301, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 8.802395209580839, |
|
"grad_norm": 0.2910805642604828, |
|
"learning_rate": 0.0001272313186459348, |
|
"loss": 0.0265, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 8.862275449101796, |
|
"grad_norm": 0.18343299627304077, |
|
"learning_rate": 0.00012627368842782626, |
|
"loss": 0.0274, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 8.922155688622755, |
|
"grad_norm": 0.2735447883605957, |
|
"learning_rate": 0.00012531346302530348, |
|
"loss": 0.0314, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 8.982035928143713, |
|
"grad_norm": 0.5026355385780334, |
|
"learning_rate": 0.0001243507372846613, |
|
"loss": 0.0234, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 9.04191616766467, |
|
"grad_norm": 0.3467860519886017, |
|
"learning_rate": 0.00012338560629916544, |
|
"loss": 0.0271, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 9.10179640718563, |
|
"grad_norm": 0.2949310839176178, |
|
"learning_rate": 0.00012241816539965996, |
|
"loss": 0.0289, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 9.161676646706587, |
|
"grad_norm": 0.3735937476158142, |
|
"learning_rate": 0.00012144851014515055, |
|
"loss": 0.0268, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 9.221556886227544, |
|
"grad_norm": 0.26456427574157715, |
|
"learning_rate": 0.000120476736313366, |
|
"loss": 0.0295, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 9.281437125748504, |
|
"grad_norm": 0.2651910185813904, |
|
"learning_rate": 0.0001195029398912976, |
|
"loss": 0.0271, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 9.341317365269461, |
|
"grad_norm": 0.310340017080307, |
|
"learning_rate": 0.00011852721706571803, |
|
"loss": 0.0301, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 9.401197604790418, |
|
"grad_norm": 0.23329517245292664, |
|
"learning_rate": 0.00011754966421368066, |
|
"loss": 0.0281, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 9.461077844311378, |
|
"grad_norm": 0.21749205887317657, |
|
"learning_rate": 0.00011657037789299961, |
|
"loss": 0.0263, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 9.520958083832335, |
|
"grad_norm": 0.4612361490726471, |
|
"learning_rate": 0.00011558945483271253, |
|
"loss": 0.0298, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 9.580838323353294, |
|
"grad_norm": 0.19917744398117065, |
|
"learning_rate": 0.00011460699192352608, |
|
"loss": 0.0276, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 9.640718562874252, |
|
"grad_norm": 0.22414059937000275, |
|
"learning_rate": 0.00011362308620824549, |
|
"loss": 0.0261, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 9.70059880239521, |
|
"grad_norm": 0.3830195963382721, |
|
"learning_rate": 0.00011263783487218924, |
|
"loss": 0.025, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 9.760479041916168, |
|
"grad_norm": 0.2302185744047165, |
|
"learning_rate": 0.00011165133523358955, |
|
"loss": 0.0197, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 9.820359281437126, |
|
"grad_norm": 0.22359217703342438, |
|
"learning_rate": 0.00011066368473397977, |
|
"loss": 0.0263, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 9.880239520958083, |
|
"grad_norm": 0.29255324602127075, |
|
"learning_rate": 0.0001096749809285695, |
|
"loss": 0.0252, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 9.940119760479043, |
|
"grad_norm": 0.1755807250738144, |
|
"learning_rate": 0.0001086853214766088, |
|
"loss": 0.024, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.3928946554660797, |
|
"learning_rate": 0.00010769480413174162, |
|
"loss": 0.0237, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 10.059880239520957, |
|
"grad_norm": 0.2258201539516449, |
|
"learning_rate": 0.0001067035267323504, |
|
"loss": 0.0263, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 10.119760479041917, |
|
"grad_norm": 0.24732491374015808, |
|
"learning_rate": 0.00010571158719189192, |
|
"loss": 0.0273, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 10.179640718562874, |
|
"grad_norm": 0.2847275733947754, |
|
"learning_rate": 0.00010471908348922596, |
|
"loss": 0.0263, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 10.239520958083832, |
|
"grad_norm": 0.1886613517999649, |
|
"learning_rate": 0.00010372611365893758, |
|
"loss": 0.0285, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 10.29940119760479, |
|
"grad_norm": 0.3526057302951813, |
|
"learning_rate": 0.00010273277578165341, |
|
"loss": 0.0273, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 10.359281437125748, |
|
"grad_norm": 0.3312857449054718, |
|
"learning_rate": 0.00010173916797435405, |
|
"loss": 0.0259, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 10.419161676646706, |
|
"grad_norm": 0.3997904360294342, |
|
"learning_rate": 0.00010074538838068243, |
|
"loss": 0.0234, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 10.479041916167665, |
|
"grad_norm": 0.29786816239356995, |
|
"learning_rate": 9.975153516124972e-05, |
|
"loss": 0.027, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 10.538922155688622, |
|
"grad_norm": 0.37553417682647705, |
|
"learning_rate": 9.875770648393936e-05, |
|
"loss": 0.0282, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 10.598802395209582, |
|
"grad_norm": 0.20235197246074677, |
|
"learning_rate": 9.776400051421078e-05, |
|
"loss": 0.0211, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 10.658682634730539, |
|
"grad_norm": 0.20965729653835297, |
|
"learning_rate": 9.677051540540286e-05, |
|
"loss": 0.0271, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 10.718562874251496, |
|
"grad_norm": 0.29233694076538086, |
|
"learning_rate": 9.577734928903899e-05, |
|
"loss": 0.025, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 10.778443113772456, |
|
"grad_norm": 0.258211225271225, |
|
"learning_rate": 9.47846002651341e-05, |
|
"loss": 0.0245, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 10.838323353293413, |
|
"grad_norm": 0.19290903210639954, |
|
"learning_rate": 9.379236639250473e-05, |
|
"loss": 0.0221, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 10.89820359281437, |
|
"grad_norm": 0.2504674196243286, |
|
"learning_rate": 9.28007456790833e-05, |
|
"loss": 0.0232, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 10.95808383233533, |
|
"grad_norm": 0.2591340243816376, |
|
"learning_rate": 9.180983607223757e-05, |
|
"loss": 0.0247, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 11.017964071856287, |
|
"grad_norm": 0.3230155408382416, |
|
"learning_rate": 9.081973544909557e-05, |
|
"loss": 0.0212, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 11.077844311377245, |
|
"grad_norm": 0.21772272884845734, |
|
"learning_rate": 8.983054160687801e-05, |
|
"loss": 0.0249, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 11.137724550898204, |
|
"grad_norm": 0.31186336278915405, |
|
"learning_rate": 8.88423522532383e-05, |
|
"loss": 0.0254, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 11.197604790419161, |
|
"grad_norm": 0.20252284407615662, |
|
"learning_rate": 8.785526499661137e-05, |
|
"loss": 0.0233, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 11.25748502994012, |
|
"grad_norm": 0.2314423769712448, |
|
"learning_rate": 8.686937733657264e-05, |
|
"loss": 0.025, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 11.317365269461078, |
|
"grad_norm": 0.16507509350776672, |
|
"learning_rate": 8.588478665420717e-05, |
|
"loss": 0.0253, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 11.377245508982035, |
|
"grad_norm": 0.3258945941925049, |
|
"learning_rate": 8.490159020249107e-05, |
|
"loss": 0.0219, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 11.437125748502995, |
|
"grad_norm": 0.20880267024040222, |
|
"learning_rate": 8.391988509668534e-05, |
|
"loss": 0.0248, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 11.497005988023952, |
|
"grad_norm": 0.4254096448421478, |
|
"learning_rate": 8.293976830474312e-05, |
|
"loss": 0.0272, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 11.55688622754491, |
|
"grad_norm": 0.15054860711097717, |
|
"learning_rate": 8.196133663773191e-05, |
|
"loss": 0.0202, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 11.616766467065869, |
|
"grad_norm": 0.18735750019550323, |
|
"learning_rate": 8.09846867402709e-05, |
|
"loss": 0.0249, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 11.676646706586826, |
|
"grad_norm": 0.1882217973470688, |
|
"learning_rate": 8.000991508098492e-05, |
|
"loss": 0.0204, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 11.736526946107784, |
|
"grad_norm": 0.23175270855426788, |
|
"learning_rate": 7.903711794297572e-05, |
|
"loss": 0.0231, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 11.796407185628743, |
|
"grad_norm": 0.18652957677841187, |
|
"learning_rate": 7.806639141431178e-05, |
|
"loss": 0.0223, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 11.8562874251497, |
|
"grad_norm": 0.4685545563697815, |
|
"learning_rate": 7.709783137853692e-05, |
|
"loss": 0.0246, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 11.91616766467066, |
|
"grad_norm": 0.2099016010761261, |
|
"learning_rate": 7.613153350519969e-05, |
|
"loss": 0.0224, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 11.976047904191617, |
|
"grad_norm": 0.23307521641254425, |
|
"learning_rate": 7.516759324040338e-05, |
|
"loss": 0.0209, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 12.035928143712574, |
|
"grad_norm": 0.8399657607078552, |
|
"learning_rate": 7.42061057973785e-05, |
|
"loss": 0.0215, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 12.095808383233534, |
|
"grad_norm": 0.389076292514801, |
|
"learning_rate": 7.324716614707793e-05, |
|
"loss": 0.0227, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 12.155688622754491, |
|
"grad_norm": 0.25040480494499207, |
|
"learning_rate": 7.229086900879629e-05, |
|
"loss": 0.0231, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 12.215568862275449, |
|
"grad_norm": 0.2006189376115799, |
|
"learning_rate": 7.133730884081395e-05, |
|
"loss": 0.022, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 12.275449101796408, |
|
"grad_norm": 0.24044956266880035, |
|
"learning_rate": 7.038657983106702e-05, |
|
"loss": 0.0196, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 12.335329341317365, |
|
"grad_norm": 0.15739458799362183, |
|
"learning_rate": 6.943877588784382e-05, |
|
"loss": 0.0232, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 12.395209580838323, |
|
"grad_norm": 0.18294060230255127, |
|
"learning_rate": 6.849399063050912e-05, |
|
"loss": 0.0193, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 12.455089820359282, |
|
"grad_norm": 0.17852729558944702, |
|
"learning_rate": 6.755231738025693e-05, |
|
"loss": 0.0225, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 12.51497005988024, |
|
"grad_norm": 0.1694345921278, |
|
"learning_rate": 6.661384915089268e-05, |
|
"loss": 0.0181, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 12.574850299401197, |
|
"grad_norm": 0.2561972141265869, |
|
"learning_rate": 6.567867863964561e-05, |
|
"loss": 0.0197, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 12.634730538922156, |
|
"grad_norm": 0.16482268273830414, |
|
"learning_rate": 6.474689821801295e-05, |
|
"loss": 0.0179, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 12.694610778443113, |
|
"grad_norm": 1.6696979999542236, |
|
"learning_rate": 6.38185999226356e-05, |
|
"loss": 0.0197, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 12.754491017964071, |
|
"grad_norm": 0.22506001591682434, |
|
"learning_rate": 6.289387544620736e-05, |
|
"loss": 0.0218, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 12.81437125748503, |
|
"grad_norm": 0.23911543190479279, |
|
"learning_rate": 6.197281612841795e-05, |
|
"loss": 0.0217, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 12.874251497005988, |
|
"grad_norm": 0.20184627175331116, |
|
"learning_rate": 6.10555129469309e-05, |
|
"loss": 0.0168, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 12.934131736526947, |
|
"grad_norm": 0.17834153771400452, |
|
"learning_rate": 6.014205650839737e-05, |
|
"loss": 0.019, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 12.994011976047904, |
|
"grad_norm": 0.2051975578069687, |
|
"learning_rate": 5.923253703950617e-05, |
|
"loss": 0.0192, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 13.053892215568862, |
|
"grad_norm": 0.21173317730426788, |
|
"learning_rate": 5.832704437807195e-05, |
|
"loss": 0.0165, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 13.113772455089821, |
|
"grad_norm": 0.4522579610347748, |
|
"learning_rate": 5.742566796416129e-05, |
|
"loss": 0.0185, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 13.173652694610778, |
|
"grad_norm": 0.22313633561134338, |
|
"learning_rate": 5.652849683125823e-05, |
|
"loss": 0.0172, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 13.233532934131736, |
|
"grad_norm": 0.2275288850069046, |
|
"learning_rate": 5.563561959747019e-05, |
|
"loss": 0.0242, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 13.293413173652695, |
|
"grad_norm": 0.15697140991687775, |
|
"learning_rate": 5.4747124456774445e-05, |
|
"loss": 0.0207, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 13.353293413173652, |
|
"grad_norm": 0.1576000303030014, |
|
"learning_rate": 5.38630991703069e-05, |
|
"loss": 0.0217, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 13.41317365269461, |
|
"grad_norm": 0.42864474654197693, |
|
"learning_rate": 5.2983631057693484e-05, |
|
"loss": 0.0201, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 13.47305389221557, |
|
"grad_norm": 0.24114832282066345, |
|
"learning_rate": 5.210880698842514e-05, |
|
"loss": 0.0187, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 13.532934131736527, |
|
"grad_norm": 0.2340080291032791, |
|
"learning_rate": 5.123871337327724e-05, |
|
"loss": 0.0202, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 13.592814371257486, |
|
"grad_norm": 0.22986619174480438, |
|
"learning_rate": 5.037343615577448e-05, |
|
"loss": 0.0211, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 13.652694610778443, |
|
"grad_norm": 0.09167616814374924, |
|
"learning_rate": 4.951306080370165e-05, |
|
"loss": 0.0177, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 13.7125748502994, |
|
"grad_norm": 0.1504812389612198, |
|
"learning_rate": 4.865767230066166e-05, |
|
"loss": 0.0153, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 13.77245508982036, |
|
"grad_norm": 0.16760465502738953, |
|
"learning_rate": 4.78073551376812e-05, |
|
"loss": 0.0177, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 13.832335329341317, |
|
"grad_norm": 0.1385156363248825, |
|
"learning_rate": 4.6962193304865335e-05, |
|
"loss": 0.0183, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 13.892215568862275, |
|
"grad_norm": 0.5168209075927734, |
|
"learning_rate": 4.6122270283100944e-05, |
|
"loss": 0.0184, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 13.952095808383234, |
|
"grad_norm": 0.25558820366859436, |
|
"learning_rate": 4.528766903581149e-05, |
|
"loss": 0.0171, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 14.011976047904191, |
|
"grad_norm": 0.2101954072713852, |
|
"learning_rate": 4.44584720007618e-05, |
|
"loss": 0.022, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 14.071856287425149, |
|
"grad_norm": 0.3249596655368805, |
|
"learning_rate": 4.363476108191552e-05, |
|
"loss": 0.0176, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 14.131736526946108, |
|
"grad_norm": 0.48448604345321655, |
|
"learning_rate": 4.2816617641344934e-05, |
|
"loss": 0.0189, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 14.191616766467066, |
|
"grad_norm": 0.3032410144805908, |
|
"learning_rate": 4.200412249119463e-05, |
|
"loss": 0.0176, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 14.251497005988025, |
|
"grad_norm": 0.2361571490764618, |
|
"learning_rate": 4.119735588569899e-05, |
|
"loss": 0.0165, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 14.311377245508982, |
|
"grad_norm": 0.2228900045156479, |
|
"learning_rate": 4.039639751325525e-05, |
|
"loss": 0.0178, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 14.37125748502994, |
|
"grad_norm": 0.25487351417541504, |
|
"learning_rate": 3.960132648855226e-05, |
|
"loss": 0.0191, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 14.431137724550899, |
|
"grad_norm": 0.13682517409324646, |
|
"learning_rate": 3.881222134475589e-05, |
|
"loss": 0.0161, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 14.491017964071856, |
|
"grad_norm": 0.29112255573272705, |
|
"learning_rate": 3.802916002575192e-05, |
|
"loss": 0.0152, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 14.550898203592814, |
|
"grad_norm": 0.1447286605834961, |
|
"learning_rate": 3.725221987844721e-05, |
|
"loss": 0.0208, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 14.610778443113773, |
|
"grad_norm": 0.2142002284526825, |
|
"learning_rate": 3.6481477645129624e-05, |
|
"loss": 0.0183, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 14.67065868263473, |
|
"grad_norm": 0.47556108236312866, |
|
"learning_rate": 3.571700945588789e-05, |
|
"loss": 0.0179, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 14.730538922155688, |
|
"grad_norm": 0.11879216134548187, |
|
"learning_rate": 3.495889082109185e-05, |
|
"loss": 0.0189, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 14.790419161676647, |
|
"grad_norm": 0.18499957025051117, |
|
"learning_rate": 3.4207196623933857e-05, |
|
"loss": 0.0171, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 14.850299401197605, |
|
"grad_norm": 0.3223067820072174, |
|
"learning_rate": 3.346200111303226e-05, |
|
"loss": 0.0165, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 14.910179640718562, |
|
"grad_norm": 0.2720371186733246, |
|
"learning_rate": 3.272337789509751e-05, |
|
"loss": 0.0175, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 14.970059880239521, |
|
"grad_norm": 0.20944693684577942, |
|
"learning_rate": 3.199139992766155e-05, |
|
"loss": 0.0195, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 15.029940119760479, |
|
"grad_norm": 0.18262909352779388, |
|
"learning_rate": 3.1266139511871465e-05, |
|
"loss": 0.015, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 15.089820359281438, |
|
"grad_norm": 0.09555590897798538, |
|
"learning_rate": 3.0547668285348105e-05, |
|
"loss": 0.0163, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 15.149700598802395, |
|
"grad_norm": 0.2287481278181076, |
|
"learning_rate": 2.9836057215109746e-05, |
|
"loss": 0.0184, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 15.209580838323353, |
|
"grad_norm": 0.18416175246238708, |
|
"learning_rate": 2.9131376590562587e-05, |
|
"loss": 0.0161, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 15.269461077844312, |
|
"grad_norm": 0.18038851022720337, |
|
"learning_rate": 2.843369601655792e-05, |
|
"loss": 0.0149, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 15.32934131736527, |
|
"grad_norm": 0.2322443276643753, |
|
"learning_rate": 2.7743084406516693e-05, |
|
"loss": 0.0178, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 15.389221556886227, |
|
"grad_norm": 0.1812848597764969, |
|
"learning_rate": 2.705960997562277e-05, |
|
"loss": 0.0191, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 15.449101796407186, |
|
"grad_norm": 0.1792038083076477, |
|
"learning_rate": 2.6383340234084875e-05, |
|
"loss": 0.0132, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 15.508982035928144, |
|
"grad_norm": 0.14055480062961578, |
|
"learning_rate": 2.5714341980468437e-05, |
|
"loss": 0.0146, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 15.568862275449101, |
|
"grad_norm": 0.2022690773010254, |
|
"learning_rate": 2.5052681295097225e-05, |
|
"loss": 0.0156, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 15.62874251497006, |
|
"grad_norm": 0.11712924391031265, |
|
"learning_rate": 2.439842353352667e-05, |
|
"loss": 0.0174, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 15.688622754491018, |
|
"grad_norm": 0.3119458258152008, |
|
"learning_rate": 2.3751633320088064e-05, |
|
"loss": 0.0188, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 15.748502994011975, |
|
"grad_norm": 0.31999191641807556, |
|
"learning_rate": 2.311237454150539e-05, |
|
"loss": 0.0149, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 15.808383233532934, |
|
"grad_norm": 0.21171946823596954, |
|
"learning_rate": 2.248071034058492e-05, |
|
"loss": 0.0166, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 15.868263473053892, |
|
"grad_norm": 0.15118949115276337, |
|
"learning_rate": 2.185670310997835e-05, |
|
"loss": 0.0195, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 15.928143712574851, |
|
"grad_norm": 0.16680358350276947, |
|
"learning_rate": 2.124041448601979e-05, |
|
"loss": 0.0135, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 15.988023952095809, |
|
"grad_norm": 0.16885513067245483, |
|
"learning_rate": 2.0631905342637782e-05, |
|
"loss": 0.0154, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 16.047904191616766, |
|
"grad_norm": 0.2569000720977783, |
|
"learning_rate": 2.0031235785342407e-05, |
|
"loss": 0.0168, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 16.107784431137723, |
|
"grad_norm": 0.3208532929420471, |
|
"learning_rate": 1.9438465145288374e-05, |
|
"loss": 0.0154, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 16.16766467065868, |
|
"grad_norm": 0.1462489366531372, |
|
"learning_rate": 1.885365197341461e-05, |
|
"loss": 0.0157, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 16.227544910179642, |
|
"grad_norm": 0.1316727101802826, |
|
"learning_rate": 1.827685403466087e-05, |
|
"loss": 0.0138, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 16.2874251497006, |
|
"grad_norm": 0.17914234101772308, |
|
"learning_rate": 1.7708128302261994e-05, |
|
"loss": 0.0147, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 16.347305389221557, |
|
"grad_norm": 0.18030117452144623, |
|
"learning_rate": 1.7147530952120337e-05, |
|
"loss": 0.0149, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 16.407185628742514, |
|
"grad_norm": 0.18496276438236237, |
|
"learning_rate": 1.659511735725716e-05, |
|
"loss": 0.016, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 16.46706586826347, |
|
"grad_norm": 0.20014838874340057, |
|
"learning_rate": 1.605094208234288e-05, |
|
"loss": 0.0119, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 16.526946107784433, |
|
"grad_norm": 0.2455081343650818, |
|
"learning_rate": 1.5515058878307665e-05, |
|
"loss": 0.0129, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 16.58682634730539, |
|
"grad_norm": 0.40518850088119507, |
|
"learning_rate": 1.4987520677032174e-05, |
|
"loss": 0.0145, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 16.646706586826348, |
|
"grad_norm": 0.2330215722322464, |
|
"learning_rate": 1.4468379586119074e-05, |
|
"loss": 0.0178, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 16.706586826347305, |
|
"grad_norm": 0.29919859766960144, |
|
"learning_rate": 1.3957686883746191e-05, |
|
"loss": 0.0129, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 16.766467065868262, |
|
"grad_norm": 0.1759548783302307, |
|
"learning_rate": 1.3455493013601561e-05, |
|
"loss": 0.0137, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 16.82634730538922, |
|
"grad_norm": 0.21709783375263214, |
|
"learning_rate": 1.2961847579900777e-05, |
|
"loss": 0.0154, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 16.88622754491018, |
|
"grad_norm": 0.12408145517110825, |
|
"learning_rate": 1.2476799342487255e-05, |
|
"loss": 0.0147, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 16.94610778443114, |
|
"grad_norm": 0.17193816602230072, |
|
"learning_rate": 1.2000396212016218e-05, |
|
"loss": 0.0161, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 17.005988023952096, |
|
"grad_norm": 0.2605287730693817, |
|
"learning_rate": 1.1532685245222075e-05, |
|
"loss": 0.0142, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 17.065868263473053, |
|
"grad_norm": 0.11747428774833679, |
|
"learning_rate": 1.1073712640270506e-05, |
|
"loss": 0.0131, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 17.12574850299401, |
|
"grad_norm": 0.3511081337928772, |
|
"learning_rate": 1.0623523732195284e-05, |
|
"loss": 0.0147, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 17.18562874251497, |
|
"grad_norm": 0.3589492440223694, |
|
"learning_rate": 1.0182162988420163e-05, |
|
"loss": 0.0166, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 17.24550898203593, |
|
"grad_norm": 0.14912162721157074, |
|
"learning_rate": 9.749674004366727e-06, |
|
"loss": 0.0148, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 17.305389221556887, |
|
"grad_norm": 0.4481550455093384, |
|
"learning_rate": 9.326099499148177e-06, |
|
"loss": 0.0126, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 17.365269461077844, |
|
"grad_norm": 0.3854505121707916, |
|
"learning_rate": 8.911481311349834e-06, |
|
"loss": 0.0149, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 17.4251497005988, |
|
"grad_norm": 0.11779692023992538, |
|
"learning_rate": 8.505860394896414e-06, |
|
"loss": 0.0133, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 17.48502994011976, |
|
"grad_norm": 0.16993847489356995, |
|
"learning_rate": 8.109276815006938e-06, |
|
"loss": 0.0116, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 17.54491017964072, |
|
"grad_norm": 0.3170551657676697, |
|
"learning_rate": 7.721769744237184e-06, |
|
"loss": 0.0159, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 17.604790419161677, |
|
"grad_norm": 0.5289223194122314, |
|
"learning_rate": 7.343377458610446e-06, |
|
"loss": 0.0171, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 17.664670658682635, |
|
"grad_norm": 0.2612517476081848, |
|
"learning_rate": 6.9741373338368165e-06, |
|
"loss": 0.0161, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 17.724550898203592, |
|
"grad_norm": 0.1158524826169014, |
|
"learning_rate": 6.614085841621442e-06, |
|
"loss": 0.0135, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 17.78443113772455, |
|
"grad_norm": 0.3021811246871948, |
|
"learning_rate": 6.263258546061978e-06, |
|
"loss": 0.0161, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 17.84431137724551, |
|
"grad_norm": 0.4671183228492737, |
|
"learning_rate": 5.921690100135713e-06, |
|
"loss": 0.0111, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 17.904191616766468, |
|
"grad_norm": 0.12684480845928192, |
|
"learning_rate": 5.589414242276869e-06, |
|
"loss": 0.0118, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 17.964071856287426, |
|
"grad_norm": 0.18783032894134521, |
|
"learning_rate": 5.266463793043896e-06, |
|
"loss": 0.0108, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 18.023952095808383, |
|
"grad_norm": 0.3168010413646698, |
|
"learning_rate": 4.952870651877739e-06, |
|
"loss": 0.0149, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 18.08383233532934, |
|
"grad_norm": 1.2831743955612183, |
|
"learning_rate": 4.648665793950968e-06, |
|
"loss": 0.0119, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 18.143712574850298, |
|
"grad_norm": 0.11012054234743118, |
|
"learning_rate": 4.353879267108119e-06, |
|
"loss": 0.0141, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 18.20359281437126, |
|
"grad_norm": 0.41468945145606995, |
|
"learning_rate": 4.068540188897796e-06, |
|
"loss": 0.0118, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 18.263473053892216, |
|
"grad_norm": 0.16896358132362366, |
|
"learning_rate": 3.792676743696588e-06, |
|
"loss": 0.0139, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 18.323353293413174, |
|
"grad_norm": 0.21275334060192108, |
|
"learning_rate": 3.5263161799251154e-06, |
|
"loss": 0.0118, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 18.38323353293413, |
|
"grad_norm": 0.15216277539730072, |
|
"learning_rate": 3.2694848073565596e-06, |
|
"loss": 0.0117, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 18.44311377245509, |
|
"grad_norm": 0.15968835353851318, |
|
"learning_rate": 3.0222079945179895e-06, |
|
"loss": 0.0144, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 18.50299401197605, |
|
"grad_norm": 0.2130011022090912, |
|
"learning_rate": 2.7845101661844618e-06, |
|
"loss": 0.0114, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 18.562874251497007, |
|
"grad_norm": 0.14197607338428497, |
|
"learning_rate": 2.5564148009665778e-06, |
|
"loss": 0.0142, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 18.622754491017965, |
|
"grad_norm": 0.7464102506637573, |
|
"learning_rate": 2.3379444289913342e-06, |
|
"loss": 0.0135, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 18.682634730538922, |
|
"grad_norm": 0.21782080829143524, |
|
"learning_rate": 2.1291206296767063e-06, |
|
"loss": 0.0146, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 18.74251497005988, |
|
"grad_norm": 0.21821217238903046, |
|
"learning_rate": 1.9299640296001288e-06, |
|
"loss": 0.0112, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 18.802395209580837, |
|
"grad_norm": 0.2201797068119049, |
|
"learning_rate": 1.7404943004611596e-06, |
|
"loss": 0.0155, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 18.862275449101798, |
|
"grad_norm": 0.4531762897968292, |
|
"learning_rate": 1.5607301571383459e-06, |
|
"loss": 0.0129, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 18.922155688622755, |
|
"grad_norm": 0.2320559322834015, |
|
"learning_rate": 1.390689355840691e-06, |
|
"loss": 0.0152, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 18.982035928143713, |
|
"grad_norm": 0.12046198546886444, |
|
"learning_rate": 1.2303886923537677e-06, |
|
"loss": 0.0129, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 19.04191616766467, |
|
"grad_norm": 0.23176096379756927, |
|
"learning_rate": 1.0798440003807474e-06, |
|
"loss": 0.013, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 19.101796407185628, |
|
"grad_norm": 0.15658603608608246, |
|
"learning_rate": 9.390701499783827e-07, |
|
"loss": 0.0102, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 19.161676646706585, |
|
"grad_norm": 0.12516404688358307, |
|
"learning_rate": 8.080810460882493e-07, |
|
"loss": 0.0117, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 19.221556886227546, |
|
"grad_norm": 0.1989312320947647, |
|
"learning_rate": 6.868896271632785e-07, |
|
"loss": 0.0121, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 19.281437125748504, |
|
"grad_norm": 0.15833425521850586, |
|
"learning_rate": 5.755078638897459e-07, |
|
"loss": 0.0145, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 19.34131736526946, |
|
"grad_norm": 0.13037511706352234, |
|
"learning_rate": 4.739467580048618e-07, |
|
"loss": 0.0106, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 19.40119760479042, |
|
"grad_norm": 0.3514297306537628, |
|
"learning_rate": 3.8221634121013983e-07, |
|
"loss": 0.0104, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 19.461077844311376, |
|
"grad_norm": 0.1234828531742096, |
|
"learning_rate": 3.003256741803906e-07, |
|
"loss": 0.0137, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 19.520958083832337, |
|
"grad_norm": 0.1495758444070816, |
|
"learning_rate": 2.2828284566890345e-07, |
|
"loss": 0.013, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 19.580838323353294, |
|
"grad_norm": 0.5322373509407043, |
|
"learning_rate": 1.6609497170834155e-07, |
|
"loss": 0.0136, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 19.64071856287425, |
|
"grad_norm": 0.15904094278812408, |
|
"learning_rate": 1.1376819490795942e-07, |
|
"loss": 0.0151, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 19.70059880239521, |
|
"grad_norm": 0.43996843695640564, |
|
"learning_rate": 7.130768384679965e-08, |
|
"loss": 0.0125, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 19.760479041916167, |
|
"grad_norm": 0.13913209736347198, |
|
"learning_rate": 3.8717632563201046e-08, |
|
"loss": 0.0188, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 19.820359281437124, |
|
"grad_norm": 0.6390590667724609, |
|
"learning_rate": 1.600126014051906e-08, |
|
"loss": 0.0132, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 19.880239520958085, |
|
"grad_norm": 0.11968772113323212, |
|
"learning_rate": 3.1608103891689335e-09, |
|
"loss": 0.0118, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 19.92814371257485, |
|
"step": 3328, |
|
"total_flos": 4.678773863650698e+17, |
|
"train_loss": 0.03960529512663085, |
|
"train_runtime": 3740.453, |
|
"train_samples_per_second": 56.943, |
|
"train_steps_per_second": 0.89 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3328, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.678773863650698e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|