|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.92814371257485, |
|
"eval_steps": 500, |
|
"global_step": 3328, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.059880239520958084, |
|
"grad_norm": 4.727615833282471, |
|
"learning_rate": 1.1976047904191617e-05, |
|
"loss": 1.062, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11976047904191617, |
|
"grad_norm": 2.2185091972351074, |
|
"learning_rate": 2.3952095808383234e-05, |
|
"loss": 0.6786, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.17964071856287425, |
|
"grad_norm": 1.7475155591964722, |
|
"learning_rate": 3.592814371257485e-05, |
|
"loss": 0.3714, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.23952095808383234, |
|
"grad_norm": 0.7705522179603577, |
|
"learning_rate": 4.790419161676647e-05, |
|
"loss": 0.2662, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2994011976047904, |
|
"grad_norm": 0.5850515365600586, |
|
"learning_rate": 5.988023952095808e-05, |
|
"loss": 0.2129, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3592814371257485, |
|
"grad_norm": 0.5091089010238647, |
|
"learning_rate": 7.18562874251497e-05, |
|
"loss": 0.1831, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.41916167664670656, |
|
"grad_norm": 0.8252758383750916, |
|
"learning_rate": 8.383233532934131e-05, |
|
"loss": 0.1614, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.47904191616766467, |
|
"grad_norm": 0.8221730589866638, |
|
"learning_rate": 9.580838323353294e-05, |
|
"loss": 0.1408, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5389221556886228, |
|
"grad_norm": 0.6895884871482849, |
|
"learning_rate": 0.00010778443113772456, |
|
"loss": 0.1253, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5988023952095808, |
|
"grad_norm": 1.0663549900054932, |
|
"learning_rate": 0.00011976047904191617, |
|
"loss": 0.1049, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6586826347305389, |
|
"grad_norm": 0.4257476031780243, |
|
"learning_rate": 0.0001317365269461078, |
|
"loss": 0.1017, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.718562874251497, |
|
"grad_norm": 0.46928706765174866, |
|
"learning_rate": 0.0001437125748502994, |
|
"loss": 0.0887, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7784431137724551, |
|
"grad_norm": 0.40188413858413696, |
|
"learning_rate": 0.00015568862275449103, |
|
"loss": 0.0821, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8383233532934131, |
|
"grad_norm": 0.44656357169151306, |
|
"learning_rate": 0.00016766467065868263, |
|
"loss": 0.0802, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8982035928143712, |
|
"grad_norm": 0.8075553178787231, |
|
"learning_rate": 0.00017964071856287425, |
|
"loss": 0.0768, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9580838323353293, |
|
"grad_norm": 0.7961329817771912, |
|
"learning_rate": 0.00019161676646706587, |
|
"loss": 0.0788, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0179640718562875, |
|
"grad_norm": 0.5878146886825562, |
|
"learning_rate": 0.00019999955550902674, |
|
"loss": 0.0751, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.0778443113772456, |
|
"grad_norm": 0.4572954773902893, |
|
"learning_rate": 0.0001999916535572039, |
|
"loss": 0.0684, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.1377245508982037, |
|
"grad_norm": 0.3957609236240387, |
|
"learning_rate": 0.00019997387492660495, |
|
"loss": 0.0651, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.1976047904191618, |
|
"grad_norm": 0.3396591544151306, |
|
"learning_rate": 0.00019994622137331462, |
|
"loss": 0.0655, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.2574850299401197, |
|
"grad_norm": 0.4531398415565491, |
|
"learning_rate": 0.00019990869562881356, |
|
"loss": 0.0648, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.3173652694610778, |
|
"grad_norm": 0.36353209614753723, |
|
"learning_rate": 0.00019986130139970835, |
|
"loss": 0.0619, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.377245508982036, |
|
"grad_norm": 0.392230361700058, |
|
"learning_rate": 0.00019980404336736554, |
|
"loss": 0.0583, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.437125748502994, |
|
"grad_norm": 0.34288108348846436, |
|
"learning_rate": 0.00019973692718744902, |
|
"loss": 0.0599, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.4970059880239521, |
|
"grad_norm": 0.40226370096206665, |
|
"learning_rate": 0.0001996599594893617, |
|
"loss": 0.0538, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.55688622754491, |
|
"grad_norm": 0.26641878485679626, |
|
"learning_rate": 0.00019957314787559036, |
|
"loss": 0.0557, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.6167664670658684, |
|
"grad_norm": 0.4017570912837982, |
|
"learning_rate": 0.00019947650092095494, |
|
"loss": 0.0514, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.6766467065868262, |
|
"grad_norm": 0.2945254445075989, |
|
"learning_rate": 0.00019937002817176146, |
|
"loss": 0.0506, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.7365269461077846, |
|
"grad_norm": 0.2502083480358124, |
|
"learning_rate": 0.00019925374014485912, |
|
"loss": 0.0518, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.7964071856287425, |
|
"grad_norm": 0.3017183840274811, |
|
"learning_rate": 0.00019912764832660156, |
|
"loss": 0.0541, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.8562874251497006, |
|
"grad_norm": 0.23154252767562866, |
|
"learning_rate": 0.00019899176517171208, |
|
"loss": 0.0501, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.9161676646706587, |
|
"grad_norm": 0.2425275295972824, |
|
"learning_rate": 0.0001988461041020537, |
|
"loss": 0.0437, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.9760479041916168, |
|
"grad_norm": 0.2978982925415039, |
|
"learning_rate": 0.0001986906795053032, |
|
"loss": 0.045, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.035928143712575, |
|
"grad_norm": 0.3514959514141083, |
|
"learning_rate": 0.00019852550673353004, |
|
"loss": 0.0492, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.095808383233533, |
|
"grad_norm": 0.488469660282135, |
|
"learning_rate": 0.00019835060210168017, |
|
"loss": 0.0479, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.155688622754491, |
|
"grad_norm": 0.26303964853286743, |
|
"learning_rate": 0.00019816598288596412, |
|
"loss": 0.0475, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.215568862275449, |
|
"grad_norm": 0.25606536865234375, |
|
"learning_rate": 0.00019797166732215076, |
|
"loss": 0.0413, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.2754491017964074, |
|
"grad_norm": 0.291185200214386, |
|
"learning_rate": 0.0001977676746037662, |
|
"loss": 0.0443, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.3353293413173652, |
|
"grad_norm": 0.314779669046402, |
|
"learning_rate": 0.0001975540248801977, |
|
"loss": 0.044, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.3952095808383236, |
|
"grad_norm": 0.259378582239151, |
|
"learning_rate": 0.00019733073925470352, |
|
"loss": 0.0413, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.4550898203592815, |
|
"grad_norm": 0.5142641067504883, |
|
"learning_rate": 0.00019709783978232842, |
|
"loss": 0.0453, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.5149700598802394, |
|
"grad_norm": 0.33504176139831543, |
|
"learning_rate": 0.00019685534946772536, |
|
"loss": 0.0438, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.5748502994011977, |
|
"grad_norm": 0.33974334597587585, |
|
"learning_rate": 0.00019660329226288273, |
|
"loss": 0.0436, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.6347305389221556, |
|
"grad_norm": 0.29948851466178894, |
|
"learning_rate": 0.00019634169306475917, |
|
"loss": 0.0404, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.694610778443114, |
|
"grad_norm": 0.2677200436592102, |
|
"learning_rate": 0.00019607057771282372, |
|
"loss": 0.0423, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.754491017964072, |
|
"grad_norm": 0.2578097879886627, |
|
"learning_rate": 0.00019578997298650395, |
|
"loss": 0.0385, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.81437125748503, |
|
"grad_norm": 0.2699326276779175, |
|
"learning_rate": 0.00019549990660254073, |
|
"loss": 0.0402, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.874251497005988, |
|
"grad_norm": 0.22369785606861115, |
|
"learning_rate": 0.00019520040721225033, |
|
"loss": 0.0402, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.934131736526946, |
|
"grad_norm": 0.23244518041610718, |
|
"learning_rate": 0.00019489150439869465, |
|
"loss": 0.0393, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.9940119760479043, |
|
"grad_norm": 0.2809353768825531, |
|
"learning_rate": 0.000194573228673759, |
|
"loss": 0.0402, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.053892215568862, |
|
"grad_norm": 0.1485118865966797, |
|
"learning_rate": 0.0001942456114751382, |
|
"loss": 0.0364, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.1137724550898205, |
|
"grad_norm": 0.35376912355422974, |
|
"learning_rate": 0.00019390868516323156, |
|
"loss": 0.0358, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.1736526946107784, |
|
"grad_norm": 0.24453511834144592, |
|
"learning_rate": 0.00019356248301794632, |
|
"loss": 0.0366, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.2335329341317367, |
|
"grad_norm": 0.2331007570028305, |
|
"learning_rate": 0.00019320703923541045, |
|
"loss": 0.0387, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.2934131736526946, |
|
"grad_norm": 0.2162063866853714, |
|
"learning_rate": 0.00019284238892459485, |
|
"loss": 0.0373, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.3532934131736525, |
|
"grad_norm": 0.22156478464603424, |
|
"learning_rate": 0.00019246856810384576, |
|
"loss": 0.042, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.413173652694611, |
|
"grad_norm": 0.31770089268684387, |
|
"learning_rate": 0.0001920856136973266, |
|
"loss": 0.0421, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.4730538922155687, |
|
"grad_norm": 0.37347620725631714, |
|
"learning_rate": 0.0001916935635313711, |
|
"loss": 0.0354, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.532934131736527, |
|
"grad_norm": 0.2527390420436859, |
|
"learning_rate": 0.00019129245633074703, |
|
"loss": 0.0378, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.592814371257485, |
|
"grad_norm": 0.2797776162624359, |
|
"learning_rate": 0.0001908823317148309, |
|
"loss": 0.039, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.6526946107784433, |
|
"grad_norm": 0.3039669692516327, |
|
"learning_rate": 0.00019046323019369474, |
|
"loss": 0.0376, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.712574850299401, |
|
"grad_norm": 0.3194757103919983, |
|
"learning_rate": 0.0001900351931641047, |
|
"loss": 0.0364, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.772455089820359, |
|
"grad_norm": 0.29081788659095764, |
|
"learning_rate": 0.00018959826290543207, |
|
"loss": 0.0387, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.8323353293413174, |
|
"grad_norm": 0.2967931628227234, |
|
"learning_rate": 0.00018915248257547713, |
|
"loss": 0.0363, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.8922155688622757, |
|
"grad_norm": 0.29823440313339233, |
|
"learning_rate": 0.0001886978962062062, |
|
"loss": 0.0413, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.9520958083832336, |
|
"grad_norm": 0.2680460810661316, |
|
"learning_rate": 0.00018823454869940242, |
|
"loss": 0.0381, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.0119760479041915, |
|
"grad_norm": 0.18794329464435577, |
|
"learning_rate": 0.00018776248582223063, |
|
"loss": 0.035, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.07185628742515, |
|
"grad_norm": 0.31890541315078735, |
|
"learning_rate": 0.00018728175420271658, |
|
"loss": 0.0309, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.131736526946108, |
|
"grad_norm": 0.2308373749256134, |
|
"learning_rate": 0.00018679240132514136, |
|
"loss": 0.0338, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.191616766467066, |
|
"grad_norm": 0.21351340413093567, |
|
"learning_rate": 0.0001862944755253511, |
|
"loss": 0.0319, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.251497005988024, |
|
"grad_norm": 0.22991211712360382, |
|
"learning_rate": 0.00018578802598598256, |
|
"loss": 0.0339, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.311377245508982, |
|
"grad_norm": 0.1862100064754486, |
|
"learning_rate": 0.00018527310273160522, |
|
"loss": 0.0323, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.37125748502994, |
|
"grad_norm": 0.34272634983062744, |
|
"learning_rate": 0.00018474975662377996, |
|
"loss": 0.0324, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.431137724550898, |
|
"grad_norm": 0.3091452419757843, |
|
"learning_rate": 0.00018421803935603532, |
|
"loss": 0.0338, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.491017964071856, |
|
"grad_norm": 0.31789442896842957, |
|
"learning_rate": 0.0001836780034487614, |
|
"loss": 0.0343, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.550898203592815, |
|
"grad_norm": 0.46529826521873474, |
|
"learning_rate": 0.00018312970224402224, |
|
"loss": 0.0359, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.610778443113772, |
|
"grad_norm": 0.3199298679828644, |
|
"learning_rate": 0.00018257318990028682, |
|
"loss": 0.0375, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.6706586826347305, |
|
"grad_norm": 0.30249571800231934, |
|
"learning_rate": 0.00018200852138707975, |
|
"loss": 0.039, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.730538922155689, |
|
"grad_norm": 0.18588168919086456, |
|
"learning_rate": 0.00018143575247955135, |
|
"loss": 0.0338, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.790419161676647, |
|
"grad_norm": 0.2602488100528717, |
|
"learning_rate": 0.0001808549397529689, |
|
"loss": 0.034, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.850299401197605, |
|
"grad_norm": 0.3416588306427002, |
|
"learning_rate": 0.00018026614057712795, |
|
"loss": 0.0321, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.910179640718563, |
|
"grad_norm": 0.31246596574783325, |
|
"learning_rate": 0.00017966941311068594, |
|
"loss": 0.0313, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.970059880239521, |
|
"grad_norm": 0.20728591084480286, |
|
"learning_rate": 0.00017906481629541743, |
|
"loss": 0.0315, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 5.029940119760479, |
|
"grad_norm": 0.2376144826412201, |
|
"learning_rate": 0.00017845240985039218, |
|
"loss": 0.0301, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 5.089820359281437, |
|
"grad_norm": 0.2364414483308792, |
|
"learning_rate": 0.0001778322542660764, |
|
"loss": 0.0299, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 5.149700598802395, |
|
"grad_norm": 0.22445429861545563, |
|
"learning_rate": 0.0001772044107983577, |
|
"loss": 0.0296, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 5.209580838323353, |
|
"grad_norm": 0.2109883427619934, |
|
"learning_rate": 0.0001765689414624947, |
|
"loss": 0.0326, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 5.269461077844311, |
|
"grad_norm": 0.17161297798156738, |
|
"learning_rate": 0.00017592590902699138, |
|
"loss": 0.0321, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 5.3293413173652695, |
|
"grad_norm": 0.18407276272773743, |
|
"learning_rate": 0.00017527537700739708, |
|
"loss": 0.0339, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 5.389221556886228, |
|
"grad_norm": 0.1970146745443344, |
|
"learning_rate": 0.00017461740966003282, |
|
"loss": 0.0356, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 5.449101796407185, |
|
"grad_norm": 0.1647009700536728, |
|
"learning_rate": 0.00017395207197564442, |
|
"loss": 0.0303, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 5.508982035928144, |
|
"grad_norm": 0.17965902388095856, |
|
"learning_rate": 0.00017327942967298285, |
|
"loss": 0.0273, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 5.568862275449102, |
|
"grad_norm": 0.2345510870218277, |
|
"learning_rate": 0.0001725995491923131, |
|
"loss": 0.0274, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 5.62874251497006, |
|
"grad_norm": 0.33345136046409607, |
|
"learning_rate": 0.00017191249768885137, |
|
"loss": 0.0341, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 5.688622754491018, |
|
"grad_norm": 0.1947910636663437, |
|
"learning_rate": 0.00017121834302613186, |
|
"loss": 0.029, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 5.748502994011976, |
|
"grad_norm": 0.17892029881477356, |
|
"learning_rate": 0.00017051715376930363, |
|
"loss": 0.0343, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 5.808383233532934, |
|
"grad_norm": 0.174368754029274, |
|
"learning_rate": 0.00016980899917835783, |
|
"loss": 0.0299, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 5.868263473053892, |
|
"grad_norm": 0.24912439286708832, |
|
"learning_rate": 0.00016909394920128695, |
|
"loss": 0.0295, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 5.92814371257485, |
|
"grad_norm": 0.2845669984817505, |
|
"learning_rate": 0.00016837207446717518, |
|
"loss": 0.0289, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 5.9880239520958085, |
|
"grad_norm": 0.22086086869239807, |
|
"learning_rate": 0.00016764344627922254, |
|
"loss": 0.0327, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 6.047904191616767, |
|
"grad_norm": 0.3092862665653229, |
|
"learning_rate": 0.0001669081366077016, |
|
"loss": 0.026, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 6.107784431137724, |
|
"grad_norm": 0.3457212746143341, |
|
"learning_rate": 0.00016616621808284864, |
|
"loss": 0.0291, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 6.167664670658683, |
|
"grad_norm": 0.19416464865207672, |
|
"learning_rate": 0.00016541776398768966, |
|
"loss": 0.0297, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 6.227544910179641, |
|
"grad_norm": 0.24748080968856812, |
|
"learning_rate": 0.00016466284825080184, |
|
"loss": 0.028, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 6.287425149700598, |
|
"grad_norm": 0.20852451026439667, |
|
"learning_rate": 0.00016390154543901122, |
|
"loss": 0.0237, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 6.347305389221557, |
|
"grad_norm": 0.2635754346847534, |
|
"learning_rate": 0.00016313393075002736, |
|
"loss": 0.0267, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 6.407185628742515, |
|
"grad_norm": 0.28253108263015747, |
|
"learning_rate": 0.00016236008000501573, |
|
"loss": 0.0257, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 6.467065868263473, |
|
"grad_norm": 0.22715069353580475, |
|
"learning_rate": 0.00016158006964110837, |
|
"loss": 0.0308, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 6.526946107784431, |
|
"grad_norm": 0.2551535964012146, |
|
"learning_rate": 0.0001607939767038539, |
|
"loss": 0.0265, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 6.586826347305389, |
|
"grad_norm": 0.1504376381635666, |
|
"learning_rate": 0.00016000187883960734, |
|
"loss": 0.0264, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 6.6467065868263475, |
|
"grad_norm": 0.3545721173286438, |
|
"learning_rate": 0.00015920385428786057, |
|
"loss": 0.026, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 6.706586826347305, |
|
"grad_norm": 0.16215580701828003, |
|
"learning_rate": 0.0001583999818735142, |
|
"loss": 0.024, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 6.766467065868263, |
|
"grad_norm": 0.24395804107189178, |
|
"learning_rate": 0.00015759034099909158, |
|
"loss": 0.0275, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 6.826347305389222, |
|
"grad_norm": 0.18488508462905884, |
|
"learning_rate": 0.00015677501163689604, |
|
"loss": 0.0299, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 6.88622754491018, |
|
"grad_norm": 0.1932033747434616, |
|
"learning_rate": 0.00015595407432111134, |
|
"loss": 0.0259, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 6.946107784431137, |
|
"grad_norm": 0.19903336465358734, |
|
"learning_rate": 0.00015512761013984711, |
|
"loss": 0.0278, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 7.005988023952096, |
|
"grad_norm": 0.24045361578464508, |
|
"learning_rate": 0.00015429570072712925, |
|
"loss": 0.029, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 7.065868263473054, |
|
"grad_norm": 0.24332760274410248, |
|
"learning_rate": 0.00015345842825483666, |
|
"loss": 0.0271, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 7.125748502994012, |
|
"grad_norm": 0.2306133508682251, |
|
"learning_rate": 0.00015261587542458443, |
|
"loss": 0.0232, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 7.18562874251497, |
|
"grad_norm": 0.2224716693162918, |
|
"learning_rate": 0.0001517681254595554, |
|
"loss": 0.0246, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 7.245508982035928, |
|
"grad_norm": 0.2902887463569641, |
|
"learning_rate": 0.00015091526209627944, |
|
"loss": 0.024, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 7.3053892215568865, |
|
"grad_norm": 0.35143372416496277, |
|
"learning_rate": 0.0001500573695763625, |
|
"loss": 0.0248, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 7.365269461077844, |
|
"grad_norm": 0.3402245342731476, |
|
"learning_rate": 0.00014919453263816565, |
|
"loss": 0.0256, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 7.425149700598802, |
|
"grad_norm": 0.17631883919239044, |
|
"learning_rate": 0.0001483268365084351, |
|
"loss": 0.0274, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 7.485029940119761, |
|
"grad_norm": 0.1852467656135559, |
|
"learning_rate": 0.0001474543668938837, |
|
"loss": 0.0242, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 7.544910179640718, |
|
"grad_norm": 0.20840848982334137, |
|
"learning_rate": 0.0001465772099727256, |
|
"loss": 0.027, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 7.604790419161676, |
|
"grad_norm": 0.19811859726905823, |
|
"learning_rate": 0.00014569545238616368, |
|
"loss": 0.0229, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 7.664670658682635, |
|
"grad_norm": 0.15369650721549988, |
|
"learning_rate": 0.00014480918122983174, |
|
"loss": 0.0266, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 7.724550898203593, |
|
"grad_norm": 0.4044421315193176, |
|
"learning_rate": 0.00014391848404519158, |
|
"loss": 0.0251, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 7.7844311377245505, |
|
"grad_norm": 0.17981642484664917, |
|
"learning_rate": 0.0001430234488108861, |
|
"loss": 0.0248, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 7.844311377245509, |
|
"grad_norm": 0.2915557324886322, |
|
"learning_rate": 0.00014212416393404915, |
|
"loss": 0.0231, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 7.904191616766467, |
|
"grad_norm": 0.20533527433872223, |
|
"learning_rate": 0.00014122071824157318, |
|
"loss": 0.0257, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 7.9640718562874255, |
|
"grad_norm": 0.15892498195171356, |
|
"learning_rate": 0.0001403132009713354, |
|
"loss": 0.0253, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 8.023952095808383, |
|
"grad_norm": 0.34935975074768066, |
|
"learning_rate": 0.0001394017017633831, |
|
"loss": 0.0261, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 8.08383233532934, |
|
"grad_norm": 0.1838160902261734, |
|
"learning_rate": 0.00013848631065107973, |
|
"loss": 0.0231, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 8.1437125748503, |
|
"grad_norm": 0.24909768998622894, |
|
"learning_rate": 0.00013756711805221168, |
|
"loss": 0.0256, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 8.203592814371257, |
|
"grad_norm": 0.2749064266681671, |
|
"learning_rate": 0.00013664421476005723, |
|
"loss": 0.0258, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 8.263473053892216, |
|
"grad_norm": 0.24632593989372253, |
|
"learning_rate": 0.0001357176919344186, |
|
"loss": 0.023, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 8.323353293413174, |
|
"grad_norm": 0.22254560887813568, |
|
"learning_rate": 0.0001347876410926175, |
|
"loss": 0.0249, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 8.383233532934131, |
|
"grad_norm": 0.1739770621061325, |
|
"learning_rate": 0.00013385415410045555, |
|
"loss": 0.0215, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 8.44311377245509, |
|
"grad_norm": 0.15595784783363342, |
|
"learning_rate": 0.00013291732316314024, |
|
"loss": 0.023, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 8.502994011976048, |
|
"grad_norm": 0.12900565564632416, |
|
"learning_rate": 0.00013197724081617736, |
|
"loss": 0.0227, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 8.562874251497005, |
|
"grad_norm": 0.18408159911632538, |
|
"learning_rate": 0.00013103399991623078, |
|
"loss": 0.0249, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 8.622754491017965, |
|
"grad_norm": 0.3930126130580902, |
|
"learning_rate": 0.00013008769363195052, |
|
"loss": 0.0235, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 8.682634730538922, |
|
"grad_norm": 0.22931550443172455, |
|
"learning_rate": 0.00012913841543477005, |
|
"loss": 0.02, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 8.74251497005988, |
|
"grad_norm": 0.1572323441505432, |
|
"learning_rate": 0.00012818625908967367, |
|
"loss": 0.0244, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 8.802395209580839, |
|
"grad_norm": 0.09360220283269882, |
|
"learning_rate": 0.0001272313186459348, |
|
"loss": 0.021, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 8.862275449101796, |
|
"grad_norm": 0.17760924994945526, |
|
"learning_rate": 0.00012627368842782626, |
|
"loss": 0.0221, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 8.922155688622755, |
|
"grad_norm": 0.18415001034736633, |
|
"learning_rate": 0.00012531346302530348, |
|
"loss": 0.0255, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 8.982035928143713, |
|
"grad_norm": 0.479499489068985, |
|
"learning_rate": 0.0001243507372846613, |
|
"loss": 0.0207, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 9.04191616766467, |
|
"grad_norm": 0.22728319466114044, |
|
"learning_rate": 0.00012338560629916544, |
|
"loss": 0.0253, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 9.10179640718563, |
|
"grad_norm": 0.1806812733411789, |
|
"learning_rate": 0.00012241816539965996, |
|
"loss": 0.0255, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 9.161676646706587, |
|
"grad_norm": 0.26954516768455505, |
|
"learning_rate": 0.00012144851014515055, |
|
"loss": 0.0219, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 9.221556886227544, |
|
"grad_norm": 0.30244171619415283, |
|
"learning_rate": 0.000120476736313366, |
|
"loss": 0.0253, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 9.281437125748504, |
|
"grad_norm": 0.1826360821723938, |
|
"learning_rate": 0.0001195029398912976, |
|
"loss": 0.022, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 9.341317365269461, |
|
"grad_norm": 0.22383596003055573, |
|
"learning_rate": 0.00011852721706571803, |
|
"loss": 0.0253, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 9.401197604790418, |
|
"grad_norm": 0.1451132595539093, |
|
"learning_rate": 0.00011754966421368066, |
|
"loss": 0.0227, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 9.461077844311378, |
|
"grad_norm": 0.1684962958097458, |
|
"learning_rate": 0.00011657037789299961, |
|
"loss": 0.022, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 9.520958083832335, |
|
"grad_norm": 0.18602758646011353, |
|
"learning_rate": 0.00011558945483271253, |
|
"loss": 0.0238, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 9.580838323353294, |
|
"grad_norm": 0.1612151712179184, |
|
"learning_rate": 0.00011460699192352608, |
|
"loss": 0.0229, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 9.640718562874252, |
|
"grad_norm": 0.1520804762840271, |
|
"learning_rate": 0.00011362308620824549, |
|
"loss": 0.0215, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 9.70059880239521, |
|
"grad_norm": 0.23648953437805176, |
|
"learning_rate": 0.00011263783487218924, |
|
"loss": 0.0205, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 9.760479041916168, |
|
"grad_norm": 0.17972023785114288, |
|
"learning_rate": 0.00011165133523358955, |
|
"loss": 0.0171, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 9.820359281437126, |
|
"grad_norm": 0.20409125089645386, |
|
"learning_rate": 0.00011066368473397977, |
|
"loss": 0.0229, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 9.880239520958083, |
|
"grad_norm": 0.1429627388715744, |
|
"learning_rate": 0.0001096749809285695, |
|
"loss": 0.0212, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 9.940119760479043, |
|
"grad_norm": 0.20382557809352875, |
|
"learning_rate": 0.0001086853214766088, |
|
"loss": 0.019, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.25044798851013184, |
|
"learning_rate": 0.00010769480413174162, |
|
"loss": 0.0192, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 10.059880239520957, |
|
"grad_norm": 0.18214142322540283, |
|
"learning_rate": 0.0001067035267323504, |
|
"loss": 0.0219, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 10.119760479041917, |
|
"grad_norm": 0.17343035340309143, |
|
"learning_rate": 0.00010571158719189192, |
|
"loss": 0.0224, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 10.179640718562874, |
|
"grad_norm": 0.1327686607837677, |
|
"learning_rate": 0.00010471908348922596, |
|
"loss": 0.0221, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 10.239520958083832, |
|
"grad_norm": 0.22235077619552612, |
|
"learning_rate": 0.00010372611365893758, |
|
"loss": 0.0234, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 10.29940119760479, |
|
"grad_norm": 0.2747216522693634, |
|
"learning_rate": 0.00010273277578165341, |
|
"loss": 0.0222, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 10.359281437125748, |
|
"grad_norm": 0.22264912724494934, |
|
"learning_rate": 0.00010173916797435405, |
|
"loss": 0.0211, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 10.419161676646706, |
|
"grad_norm": 0.6474366784095764, |
|
"learning_rate": 0.00010074538838068243, |
|
"loss": 0.0212, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 10.479041916167665, |
|
"grad_norm": 0.24274206161499023, |
|
"learning_rate": 9.975153516124972e-05, |
|
"loss": 0.0214, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 10.538922155688622, |
|
"grad_norm": 0.19830718636512756, |
|
"learning_rate": 9.875770648393936e-05, |
|
"loss": 0.0233, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 10.598802395209582, |
|
"grad_norm": 0.12125121057033539, |
|
"learning_rate": 9.776400051421078e-05, |
|
"loss": 0.0194, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 10.658682634730539, |
|
"grad_norm": 0.16296449303627014, |
|
"learning_rate": 9.677051540540286e-05, |
|
"loss": 0.0225, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 10.718562874251496, |
|
"grad_norm": 0.24490657448768616, |
|
"learning_rate": 9.577734928903899e-05, |
|
"loss": 0.0213, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 10.778443113772456, |
|
"grad_norm": 0.16158710420131683, |
|
"learning_rate": 9.47846002651341e-05, |
|
"loss": 0.0199, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 10.838323353293413, |
|
"grad_norm": 0.13847924768924713, |
|
"learning_rate": 9.379236639250473e-05, |
|
"loss": 0.0185, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 10.89820359281437, |
|
"grad_norm": 0.18506354093551636, |
|
"learning_rate": 9.28007456790833e-05, |
|
"loss": 0.0207, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 10.95808383233533, |
|
"grad_norm": 0.1724933683872223, |
|
"learning_rate": 9.180983607223757e-05, |
|
"loss": 0.022, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 11.017964071856287, |
|
"grad_norm": 0.2975943088531494, |
|
"learning_rate": 9.081973544909557e-05, |
|
"loss": 0.0179, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 11.077844311377245, |
|
"grad_norm": 0.12449675798416138, |
|
"learning_rate": 8.983054160687801e-05, |
|
"loss": 0.0211, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 11.137724550898204, |
|
"grad_norm": 0.15286274254322052, |
|
"learning_rate": 8.88423522532383e-05, |
|
"loss": 0.0212, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 11.197604790419161, |
|
"grad_norm": 0.17604878544807434, |
|
"learning_rate": 8.785526499661137e-05, |
|
"loss": 0.02, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 11.25748502994012, |
|
"grad_norm": 0.33563774824142456, |
|
"learning_rate": 8.686937733657264e-05, |
|
"loss": 0.0222, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 11.317365269461078, |
|
"grad_norm": 0.1683143824338913, |
|
"learning_rate": 8.588478665420717e-05, |
|
"loss": 0.0215, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 11.377245508982035, |
|
"grad_norm": 0.36611148715019226, |
|
"learning_rate": 8.490159020249107e-05, |
|
"loss": 0.0203, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 11.437125748502995, |
|
"grad_norm": 0.13477054238319397, |
|
"learning_rate": 8.391988509668534e-05, |
|
"loss": 0.0204, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 11.497005988023952, |
|
"grad_norm": 0.25305667519569397, |
|
"learning_rate": 8.293976830474312e-05, |
|
"loss": 0.0222, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 11.55688622754491, |
|
"grad_norm": 0.13116030395030975, |
|
"learning_rate": 8.196133663773191e-05, |
|
"loss": 0.0173, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 11.616766467065869, |
|
"grad_norm": 0.15626917779445648, |
|
"learning_rate": 8.09846867402709e-05, |
|
"loss": 0.0208, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 11.676646706586826, |
|
"grad_norm": 0.13971462845802307, |
|
"learning_rate": 8.000991508098492e-05, |
|
"loss": 0.0177, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 11.736526946107784, |
|
"grad_norm": 0.26388251781463623, |
|
"learning_rate": 7.903711794297572e-05, |
|
"loss": 0.019, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 11.796407185628743, |
|
"grad_norm": 0.17708484828472137, |
|
"learning_rate": 7.806639141431178e-05, |
|
"loss": 0.0189, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 11.8562874251497, |
|
"grad_norm": 0.2096002697944641, |
|
"learning_rate": 7.709783137853692e-05, |
|
"loss": 0.0204, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 11.91616766467066, |
|
"grad_norm": 0.14451850950717926, |
|
"learning_rate": 7.613153350519969e-05, |
|
"loss": 0.0173, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 11.976047904191617, |
|
"grad_norm": 0.12203036993741989, |
|
"learning_rate": 7.516759324040338e-05, |
|
"loss": 0.0168, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 12.035928143712574, |
|
"grad_norm": 0.14412252604961395, |
|
"learning_rate": 7.42061057973785e-05, |
|
"loss": 0.0185, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 12.095808383233534, |
|
"grad_norm": 0.27144360542297363, |
|
"learning_rate": 7.324716614707793e-05, |
|
"loss": 0.0195, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 12.155688622754491, |
|
"grad_norm": 0.15820185840129852, |
|
"learning_rate": 7.229086900879629e-05, |
|
"loss": 0.0191, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 12.215568862275449, |
|
"grad_norm": 0.16200768947601318, |
|
"learning_rate": 7.133730884081395e-05, |
|
"loss": 0.0193, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 12.275449101796408, |
|
"grad_norm": 0.18649928271770477, |
|
"learning_rate": 7.038657983106702e-05, |
|
"loss": 0.0159, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 12.335329341317365, |
|
"grad_norm": 0.17984548211097717, |
|
"learning_rate": 6.943877588784382e-05, |
|
"loss": 0.0199, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 12.395209580838323, |
|
"grad_norm": 0.16168512403964996, |
|
"learning_rate": 6.849399063050912e-05, |
|
"loss": 0.0148, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 12.455089820359282, |
|
"grad_norm": 0.14747169613838196, |
|
"learning_rate": 6.755231738025693e-05, |
|
"loss": 0.0189, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 12.51497005988024, |
|
"grad_norm": 0.1350451409816742, |
|
"learning_rate": 6.661384915089268e-05, |
|
"loss": 0.0142, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 12.574850299401197, |
|
"grad_norm": 0.12176797538995743, |
|
"learning_rate": 6.567867863964561e-05, |
|
"loss": 0.018, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 12.634730538922156, |
|
"grad_norm": 0.12629830837249756, |
|
"learning_rate": 6.474689821801295e-05, |
|
"loss": 0.0151, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 12.694610778443113, |
|
"grad_norm": 0.2103053331375122, |
|
"learning_rate": 6.38185999226356e-05, |
|
"loss": 0.0148, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 12.754491017964071, |
|
"grad_norm": 0.18460889160633087, |
|
"learning_rate": 6.289387544620736e-05, |
|
"loss": 0.0172, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 12.81437125748503, |
|
"grad_norm": 0.22776320576667786, |
|
"learning_rate": 6.197281612841795e-05, |
|
"loss": 0.0175, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 12.874251497005988, |
|
"grad_norm": 0.1890844702720642, |
|
"learning_rate": 6.10555129469309e-05, |
|
"loss": 0.0154, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 12.934131736526947, |
|
"grad_norm": 0.17597252130508423, |
|
"learning_rate": 6.014205650839737e-05, |
|
"loss": 0.0151, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 12.994011976047904, |
|
"grad_norm": 0.15856590867042542, |
|
"learning_rate": 5.923253703950617e-05, |
|
"loss": 0.0154, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 13.053892215568862, |
|
"grad_norm": 0.22898879647254944, |
|
"learning_rate": 5.832704437807195e-05, |
|
"loss": 0.0137, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 13.113772455089821, |
|
"grad_norm": 0.34772729873657227, |
|
"learning_rate": 5.742566796416129e-05, |
|
"loss": 0.0144, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 13.173652694610778, |
|
"grad_norm": 0.18779855966567993, |
|
"learning_rate": 5.652849683125823e-05, |
|
"loss": 0.0146, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 13.233532934131736, |
|
"grad_norm": 0.17576445639133453, |
|
"learning_rate": 5.563561959747019e-05, |
|
"loss": 0.0196, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 13.293413173652695, |
|
"grad_norm": 0.15298061072826385, |
|
"learning_rate": 5.4747124456774445e-05, |
|
"loss": 0.0178, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 13.353293413173652, |
|
"grad_norm": 0.14338500797748566, |
|
"learning_rate": 5.38630991703069e-05, |
|
"loss": 0.0179, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 13.41317365269461, |
|
"grad_norm": 0.18957915902137756, |
|
"learning_rate": 5.2983631057693484e-05, |
|
"loss": 0.0165, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 13.47305389221557, |
|
"grad_norm": 0.19262787699699402, |
|
"learning_rate": 5.210880698842514e-05, |
|
"loss": 0.0139, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 13.532934131736527, |
|
"grad_norm": 0.3551245331764221, |
|
"learning_rate": 5.123871337327724e-05, |
|
"loss": 0.0172, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 13.592814371257486, |
|
"grad_norm": 0.21090568602085114, |
|
"learning_rate": 5.037343615577448e-05, |
|
"loss": 0.0159, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 13.652694610778443, |
|
"grad_norm": 0.10332461446523666, |
|
"learning_rate": 4.951306080370165e-05, |
|
"loss": 0.0147, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 13.7125748502994, |
|
"grad_norm": 0.12004910409450531, |
|
"learning_rate": 4.865767230066166e-05, |
|
"loss": 0.0132, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 13.77245508982036, |
|
"grad_norm": 0.17986303567886353, |
|
"learning_rate": 4.78073551376812e-05, |
|
"loss": 0.0142, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 13.832335329341317, |
|
"grad_norm": 0.17033332586288452, |
|
"learning_rate": 4.6962193304865335e-05, |
|
"loss": 0.0145, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 13.892215568862275, |
|
"grad_norm": 0.5156183838844299, |
|
"learning_rate": 4.6122270283100944e-05, |
|
"loss": 0.0162, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 13.952095808383234, |
|
"grad_norm": 0.47306326031684875, |
|
"learning_rate": 4.528766903581149e-05, |
|
"loss": 0.0151, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 14.011976047904191, |
|
"grad_norm": 0.10242114216089249, |
|
"learning_rate": 4.44584720007618e-05, |
|
"loss": 0.0197, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 14.071856287425149, |
|
"grad_norm": 0.13059602677822113, |
|
"learning_rate": 4.363476108191552e-05, |
|
"loss": 0.0144, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 14.131736526946108, |
|
"grad_norm": 0.1200447678565979, |
|
"learning_rate": 4.2816617641344934e-05, |
|
"loss": 0.0158, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 14.191616766467066, |
|
"grad_norm": 0.15896056592464447, |
|
"learning_rate": 4.200412249119463e-05, |
|
"loss": 0.0135, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 14.251497005988025, |
|
"grad_norm": 0.18628185987472534, |
|
"learning_rate": 4.119735588569899e-05, |
|
"loss": 0.0131, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 14.311377245508982, |
|
"grad_norm": 0.1670549213886261, |
|
"learning_rate": 4.039639751325525e-05, |
|
"loss": 0.0145, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 14.37125748502994, |
|
"grad_norm": 0.2762540280818939, |
|
"learning_rate": 3.960132648855226e-05, |
|
"loss": 0.0167, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 14.431137724550899, |
|
"grad_norm": 0.09425695985555649, |
|
"learning_rate": 3.881222134475589e-05, |
|
"loss": 0.0133, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 14.491017964071856, |
|
"grad_norm": 0.176064133644104, |
|
"learning_rate": 3.802916002575192e-05, |
|
"loss": 0.0124, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 14.550898203592814, |
|
"grad_norm": 0.19551633298397064, |
|
"learning_rate": 3.725221987844721e-05, |
|
"loss": 0.0177, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 14.610778443113773, |
|
"grad_norm": 0.20848046243190765, |
|
"learning_rate": 3.6481477645129624e-05, |
|
"loss": 0.0154, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 14.67065868263473, |
|
"grad_norm": 0.2565270960330963, |
|
"learning_rate": 3.571700945588789e-05, |
|
"loss": 0.015, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 14.730538922155688, |
|
"grad_norm": 0.13702988624572754, |
|
"learning_rate": 3.495889082109185e-05, |
|
"loss": 0.0157, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 14.790419161676647, |
|
"grad_norm": 0.5474858283996582, |
|
"learning_rate": 3.4207196623933857e-05, |
|
"loss": 0.0143, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 14.850299401197605, |
|
"grad_norm": 0.1655379682779312, |
|
"learning_rate": 3.346200111303226e-05, |
|
"loss": 0.0152, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 14.910179640718562, |
|
"grad_norm": 0.13053831458091736, |
|
"learning_rate": 3.272337789509751e-05, |
|
"loss": 0.0139, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 14.970059880239521, |
|
"grad_norm": 0.1283467561006546, |
|
"learning_rate": 3.199139992766155e-05, |
|
"loss": 0.0161, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 15.029940119760479, |
|
"grad_norm": 0.13677707314491272, |
|
"learning_rate": 3.1266139511871465e-05, |
|
"loss": 0.0126, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 15.089820359281438, |
|
"grad_norm": 0.10139447450637817, |
|
"learning_rate": 3.0547668285348105e-05, |
|
"loss": 0.0143, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 15.149700598802395, |
|
"grad_norm": 0.14132164418697357, |
|
"learning_rate": 2.9836057215109746e-05, |
|
"loss": 0.0149, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 15.209580838323353, |
|
"grad_norm": 0.13676463067531586, |
|
"learning_rate": 2.9131376590562587e-05, |
|
"loss": 0.0131, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 15.269461077844312, |
|
"grad_norm": 0.20659254491329193, |
|
"learning_rate": 2.843369601655792e-05, |
|
"loss": 0.0125, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 15.32934131736527, |
|
"grad_norm": 0.13563022017478943, |
|
"learning_rate": 2.7743084406516693e-05, |
|
"loss": 0.0147, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 15.389221556886227, |
|
"grad_norm": 0.13761834800243378, |
|
"learning_rate": 2.705960997562277e-05, |
|
"loss": 0.0159, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 15.449101796407186, |
|
"grad_norm": 0.11985321342945099, |
|
"learning_rate": 2.6383340234084875e-05, |
|
"loss": 0.0111, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 15.508982035928144, |
|
"grad_norm": 0.09939990192651749, |
|
"learning_rate": 2.5714341980468437e-05, |
|
"loss": 0.0122, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 15.568862275449101, |
|
"grad_norm": 0.19175921380519867, |
|
"learning_rate": 2.5052681295097225e-05, |
|
"loss": 0.0134, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 15.62874251497006, |
|
"grad_norm": 0.11813875287771225, |
|
"learning_rate": 2.439842353352667e-05, |
|
"loss": 0.015, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 15.688622754491018, |
|
"grad_norm": 0.16614750027656555, |
|
"learning_rate": 2.3751633320088064e-05, |
|
"loss": 0.0147, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 15.748502994011975, |
|
"grad_norm": 0.2053995579481125, |
|
"learning_rate": 2.311237454150539e-05, |
|
"loss": 0.0122, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 15.808383233532934, |
|
"grad_norm": 0.21074947714805603, |
|
"learning_rate": 2.248071034058492e-05, |
|
"loss": 0.0133, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 15.868263473053892, |
|
"grad_norm": 0.18404161930084229, |
|
"learning_rate": 2.185670310997835e-05, |
|
"loss": 0.0155, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 15.928143712574851, |
|
"grad_norm": 0.12167193740606308, |
|
"learning_rate": 2.124041448601979e-05, |
|
"loss": 0.0113, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 15.988023952095809, |
|
"grad_norm": 0.19261722266674042, |
|
"learning_rate": 2.0631905342637782e-05, |
|
"loss": 0.0124, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 16.047904191616766, |
|
"grad_norm": 0.152290940284729, |
|
"learning_rate": 2.0031235785342407e-05, |
|
"loss": 0.014, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 16.107784431137723, |
|
"grad_norm": 0.13786451518535614, |
|
"learning_rate": 1.9438465145288374e-05, |
|
"loss": 0.0125, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 16.16766467065868, |
|
"grad_norm": 0.11830327659845352, |
|
"learning_rate": 1.885365197341461e-05, |
|
"loss": 0.013, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 16.227544910179642, |
|
"grad_norm": 0.1224045529961586, |
|
"learning_rate": 1.827685403466087e-05, |
|
"loss": 0.0105, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 16.2874251497006, |
|
"grad_norm": 0.12195927649736404, |
|
"learning_rate": 1.7708128302261994e-05, |
|
"loss": 0.0117, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 16.347305389221557, |
|
"grad_norm": 0.1412719041109085, |
|
"learning_rate": 1.7147530952120337e-05, |
|
"loss": 0.0131, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 16.407185628742514, |
|
"grad_norm": 0.1535654515028, |
|
"learning_rate": 1.659511735725716e-05, |
|
"loss": 0.013, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 16.46706586826347, |
|
"grad_norm": 0.3097449839115143, |
|
"learning_rate": 1.605094208234288e-05, |
|
"loss": 0.0105, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 16.526946107784433, |
|
"grad_norm": 0.1134740337729454, |
|
"learning_rate": 1.5515058878307665e-05, |
|
"loss": 0.0107, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 16.58682634730539, |
|
"grad_norm": 0.23593778908252716, |
|
"learning_rate": 1.4987520677032174e-05, |
|
"loss": 0.0117, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 16.646706586826348, |
|
"grad_norm": 0.1333216279745102, |
|
"learning_rate": 1.4468379586119074e-05, |
|
"loss": 0.0151, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 16.706586826347305, |
|
"grad_norm": 0.2893410623073578, |
|
"learning_rate": 1.3957686883746191e-05, |
|
"loss": 0.0101, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 16.766467065868262, |
|
"grad_norm": 0.12125154584646225, |
|
"learning_rate": 1.3455493013601561e-05, |
|
"loss": 0.0115, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 16.82634730538922, |
|
"grad_norm": 0.3616254925727844, |
|
"learning_rate": 1.2961847579900777e-05, |
|
"loss": 0.0133, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 16.88622754491018, |
|
"grad_norm": 0.1681334674358368, |
|
"learning_rate": 1.2476799342487255e-05, |
|
"loss": 0.0126, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 16.94610778443114, |
|
"grad_norm": 0.14123430848121643, |
|
"learning_rate": 1.2000396212016218e-05, |
|
"loss": 0.0146, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 17.005988023952096, |
|
"grad_norm": 0.15791960060596466, |
|
"learning_rate": 1.1532685245222075e-05, |
|
"loss": 0.0113, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 17.065868263473053, |
|
"grad_norm": 0.20897255837917328, |
|
"learning_rate": 1.1073712640270506e-05, |
|
"loss": 0.0111, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 17.12574850299401, |
|
"grad_norm": 0.07108011096715927, |
|
"learning_rate": 1.0623523732195284e-05, |
|
"loss": 0.0123, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 17.18562874251497, |
|
"grad_norm": 0.2679906189441681, |
|
"learning_rate": 1.0182162988420163e-05, |
|
"loss": 0.0145, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 17.24550898203593, |
|
"grad_norm": 0.11328988522291183, |
|
"learning_rate": 9.749674004366727e-06, |
|
"loss": 0.0122, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 17.305389221556887, |
|
"grad_norm": 0.3787294924259186, |
|
"learning_rate": 9.326099499148177e-06, |
|
"loss": 0.0103, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 17.365269461077844, |
|
"grad_norm": 0.2220940738916397, |
|
"learning_rate": 8.911481311349834e-06, |
|
"loss": 0.0123, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 17.4251497005988, |
|
"grad_norm": 0.12199822813272476, |
|
"learning_rate": 8.505860394896414e-06, |
|
"loss": 0.011, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 17.48502994011976, |
|
"grad_norm": 0.10359202325344086, |
|
"learning_rate": 8.109276815006938e-06, |
|
"loss": 0.0102, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 17.54491017964072, |
|
"grad_norm": 0.49252983927726746, |
|
"learning_rate": 7.721769744237184e-06, |
|
"loss": 0.014, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 17.604790419161677, |
|
"grad_norm": 0.3275662064552307, |
|
"learning_rate": 7.343377458610446e-06, |
|
"loss": 0.0148, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 17.664670658682635, |
|
"grad_norm": 0.7134996056556702, |
|
"learning_rate": 6.9741373338368165e-06, |
|
"loss": 0.0129, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 17.724550898203592, |
|
"grad_norm": 0.10740635544061661, |
|
"learning_rate": 6.614085841621442e-06, |
|
"loss": 0.0111, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 17.78443113772455, |
|
"grad_norm": 0.13902905583381653, |
|
"learning_rate": 6.263258546061978e-06, |
|
"loss": 0.0124, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 17.84431137724551, |
|
"grad_norm": 0.2780143916606903, |
|
"learning_rate": 5.921690100135713e-06, |
|
"loss": 0.009, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 17.904191616766468, |
|
"grad_norm": 0.11749708652496338, |
|
"learning_rate": 5.589414242276869e-06, |
|
"loss": 0.0098, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 17.964071856287426, |
|
"grad_norm": 0.45455411076545715, |
|
"learning_rate": 5.266463793043896e-06, |
|
"loss": 0.0087, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 18.023952095808383, |
|
"grad_norm": 0.22297538816928864, |
|
"learning_rate": 4.952870651877739e-06, |
|
"loss": 0.0114, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 18.08383233532934, |
|
"grad_norm": 0.5311406254768372, |
|
"learning_rate": 4.648665793950968e-06, |
|
"loss": 0.0093, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 18.143712574850298, |
|
"grad_norm": 0.20427249372005463, |
|
"learning_rate": 4.353879267108119e-06, |
|
"loss": 0.0123, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 18.20359281437126, |
|
"grad_norm": 0.2326945662498474, |
|
"learning_rate": 4.068540188897796e-06, |
|
"loss": 0.0099, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 18.263473053892216, |
|
"grad_norm": 0.14544923603534698, |
|
"learning_rate": 3.792676743696588e-06, |
|
"loss": 0.0123, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 18.323353293413174, |
|
"grad_norm": 0.1755136400461197, |
|
"learning_rate": 3.5263161799251154e-06, |
|
"loss": 0.0095, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 18.38323353293413, |
|
"grad_norm": 0.10239781439304352, |
|
"learning_rate": 3.2694848073565596e-06, |
|
"loss": 0.0095, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 18.44311377245509, |
|
"grad_norm": 0.13217875361442566, |
|
"learning_rate": 3.0222079945179895e-06, |
|
"loss": 0.0111, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 18.50299401197605, |
|
"grad_norm": 0.11932236701250076, |
|
"learning_rate": 2.7845101661844618e-06, |
|
"loss": 0.0099, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 18.562874251497007, |
|
"grad_norm": 0.12827470898628235, |
|
"learning_rate": 2.5564148009665778e-06, |
|
"loss": 0.0125, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 18.622754491017965, |
|
"grad_norm": 0.44992345571517944, |
|
"learning_rate": 2.3379444289913342e-06, |
|
"loss": 0.0114, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 18.682634730538922, |
|
"grad_norm": 0.22988931834697723, |
|
"learning_rate": 2.1291206296767063e-06, |
|
"loss": 0.0121, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 18.74251497005988, |
|
"grad_norm": 0.2384101152420044, |
|
"learning_rate": 1.9299640296001288e-06, |
|
"loss": 0.0094, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 18.802395209580837, |
|
"grad_norm": 0.10543084144592285, |
|
"learning_rate": 1.7404943004611596e-06, |
|
"loss": 0.013, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 18.862275449101798, |
|
"grad_norm": 0.12687674164772034, |
|
"learning_rate": 1.5607301571383459e-06, |
|
"loss": 0.0112, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 18.922155688622755, |
|
"grad_norm": 0.26404881477355957, |
|
"learning_rate": 1.390689355840691e-06, |
|
"loss": 0.0133, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 18.982035928143713, |
|
"grad_norm": 0.11557133495807648, |
|
"learning_rate": 1.2303886923537677e-06, |
|
"loss": 0.0107, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 19.04191616766467, |
|
"grad_norm": 0.10522342473268509, |
|
"learning_rate": 1.0798440003807474e-06, |
|
"loss": 0.0107, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 19.101796407185628, |
|
"grad_norm": 0.1134905070066452, |
|
"learning_rate": 9.390701499783827e-07, |
|
"loss": 0.0086, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 19.161676646706585, |
|
"grad_norm": 0.1318759173154831, |
|
"learning_rate": 8.080810460882493e-07, |
|
"loss": 0.0107, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 19.221556886227546, |
|
"grad_norm": 0.11792134493589401, |
|
"learning_rate": 6.868896271632785e-07, |
|
"loss": 0.0099, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 19.281437125748504, |
|
"grad_norm": 0.18841320276260376, |
|
"learning_rate": 5.755078638897459e-07, |
|
"loss": 0.0118, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 19.34131736526946, |
|
"grad_norm": 0.1288711428642273, |
|
"learning_rate": 4.739467580048618e-07, |
|
"loss": 0.0092, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 19.40119760479042, |
|
"grad_norm": 0.3439689874649048, |
|
"learning_rate": 3.8221634121013983e-07, |
|
"loss": 0.0083, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 19.461077844311376, |
|
"grad_norm": 0.07992304116487503, |
|
"learning_rate": 3.003256741803906e-07, |
|
"loss": 0.012, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 19.520958083832337, |
|
"grad_norm": 0.15533998608589172, |
|
"learning_rate": 2.2828284566890345e-07, |
|
"loss": 0.0108, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 19.580838323353294, |
|
"grad_norm": 0.3759068548679352, |
|
"learning_rate": 1.6609497170834155e-07, |
|
"loss": 0.0112, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 19.64071856287425, |
|
"grad_norm": 0.12365289032459259, |
|
"learning_rate": 1.1376819490795942e-07, |
|
"loss": 0.0135, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 19.70059880239521, |
|
"grad_norm": 0.12051455676555634, |
|
"learning_rate": 7.130768384679965e-08, |
|
"loss": 0.01, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 19.760479041916167, |
|
"grad_norm": 0.10926169902086258, |
|
"learning_rate": 3.8717632563201046e-08, |
|
"loss": 0.0152, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 19.820359281437124, |
|
"grad_norm": 0.15941037237644196, |
|
"learning_rate": 1.600126014051906e-08, |
|
"loss": 0.0109, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 19.880239520958085, |
|
"grad_norm": 0.07696834206581116, |
|
"learning_rate": 3.1608103891689335e-09, |
|
"loss": 0.0093, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 19.92814371257485, |
|
"step": 3328, |
|
"total_flos": 4.678773863650698e+17, |
|
"train_loss": 0.034036972624912426, |
|
"train_runtime": 3742.5882, |
|
"train_samples_per_second": 56.91, |
|
"train_steps_per_second": 0.889 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3328, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.678773863650698e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|