|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.997354497354497, |
|
"eval_steps": 500, |
|
"global_step": 7559, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.026455026455026454, |
|
"grad_norm": 6.989788055419922, |
|
"learning_rate": 5.291005291005291e-06, |
|
"loss": 1.1645, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05291005291005291, |
|
"grad_norm": 5.150631427764893, |
|
"learning_rate": 1.0582010582010582e-05, |
|
"loss": 0.9316, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07936507936507936, |
|
"grad_norm": 3.7024333477020264, |
|
"learning_rate": 1.5873015873015872e-05, |
|
"loss": 0.5019, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10582010582010581, |
|
"grad_norm": 0.8457810878753662, |
|
"learning_rate": 2.1164021164021164e-05, |
|
"loss": 0.3289, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13227513227513227, |
|
"grad_norm": 1.1393985748291016, |
|
"learning_rate": 2.6455026455026456e-05, |
|
"loss": 0.2681, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15873015873015872, |
|
"grad_norm": 1.797629714012146, |
|
"learning_rate": 3.1746031746031745e-05, |
|
"loss": 0.2378, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 1.5710021257400513, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 0.1902, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21164021164021163, |
|
"grad_norm": 2.3919057846069336, |
|
"learning_rate": 4.232804232804233e-05, |
|
"loss": 0.1454, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23809523809523808, |
|
"grad_norm": 1.9616649150848389, |
|
"learning_rate": 4.761904761904762e-05, |
|
"loss": 0.1157, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26455026455026454, |
|
"grad_norm": 2.282738208770752, |
|
"learning_rate": 5.291005291005291e-05, |
|
"loss": 0.1006, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.291005291005291, |
|
"grad_norm": 0.6997108459472656, |
|
"learning_rate": 5.82010582010582e-05, |
|
"loss": 0.0969, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.31746031746031744, |
|
"grad_norm": 1.1011691093444824, |
|
"learning_rate": 6.349206349206349e-05, |
|
"loss": 0.0777, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3439153439153439, |
|
"grad_norm": 1.2826348543167114, |
|
"learning_rate": 6.878306878306878e-05, |
|
"loss": 0.0701, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 1.0125218629837036, |
|
"learning_rate": 7.407407407407407e-05, |
|
"loss": 0.0781, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3968253968253968, |
|
"grad_norm": 1.344835877418518, |
|
"learning_rate": 7.936507936507937e-05, |
|
"loss": 0.0717, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.42328042328042326, |
|
"grad_norm": 0.7368573546409607, |
|
"learning_rate": 8.465608465608466e-05, |
|
"loss": 0.0681, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4497354497354497, |
|
"grad_norm": 0.8334409594535828, |
|
"learning_rate": 8.994708994708995e-05, |
|
"loss": 0.0652, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.47619047619047616, |
|
"grad_norm": 0.9617886543273926, |
|
"learning_rate": 9.523809523809524e-05, |
|
"loss": 0.0591, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5026455026455027, |
|
"grad_norm": 1.6967134475708008, |
|
"learning_rate": 0.00010052910052910055, |
|
"loss": 0.0603, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5291005291005291, |
|
"grad_norm": 1.2042906284332275, |
|
"learning_rate": 0.00010582010582010582, |
|
"loss": 0.0569, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.8804681897163391, |
|
"learning_rate": 0.00011111111111111112, |
|
"loss": 0.0572, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.582010582010582, |
|
"grad_norm": 0.5275452136993408, |
|
"learning_rate": 0.0001164021164021164, |
|
"loss": 0.0564, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6084656084656085, |
|
"grad_norm": 0.8496862053871155, |
|
"learning_rate": 0.0001216931216931217, |
|
"loss": 0.0559, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6349206349206349, |
|
"grad_norm": 0.9702103137969971, |
|
"learning_rate": 0.00012698412698412698, |
|
"loss": 0.0575, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.6613756613756614, |
|
"grad_norm": 0.5400204658508301, |
|
"learning_rate": 0.00013227513227513228, |
|
"loss": 0.0492, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.6878306878306878, |
|
"grad_norm": 0.7467755079269409, |
|
"learning_rate": 0.00013756613756613756, |
|
"loss": 0.0486, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.7477474808692932, |
|
"learning_rate": 0.00014285714285714287, |
|
"loss": 0.0491, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.9659186005592346, |
|
"learning_rate": 0.00014814814814814815, |
|
"loss": 0.0455, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.7671957671957672, |
|
"grad_norm": 0.8414769172668457, |
|
"learning_rate": 0.00015343915343915345, |
|
"loss": 0.0465, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.7936507936507936, |
|
"grad_norm": 0.690024733543396, |
|
"learning_rate": 0.00015873015873015873, |
|
"loss": 0.0506, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8201058201058201, |
|
"grad_norm": 0.6750884056091309, |
|
"learning_rate": 0.00016402116402116404, |
|
"loss": 0.0525, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8465608465608465, |
|
"grad_norm": 0.8909515142440796, |
|
"learning_rate": 0.00016931216931216931, |
|
"loss": 0.0417, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.873015873015873, |
|
"grad_norm": 0.6845270395278931, |
|
"learning_rate": 0.00017460317460317462, |
|
"loss": 0.0468, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.8994708994708994, |
|
"grad_norm": 0.5316764116287231, |
|
"learning_rate": 0.0001798941798941799, |
|
"loss": 0.0525, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9259259259259259, |
|
"grad_norm": 0.7101507186889648, |
|
"learning_rate": 0.0001851851851851852, |
|
"loss": 0.0456, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.6450228095054626, |
|
"learning_rate": 0.00019047619047619048, |
|
"loss": 0.0528, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.9788359788359788, |
|
"grad_norm": 0.5121487975120544, |
|
"learning_rate": 0.0001957671957671958, |
|
"loss": 0.0486, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0052910052910053, |
|
"grad_norm": 0.8493865728378296, |
|
"learning_rate": 0.0001999999617210638, |
|
"loss": 0.0459, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.0317460317460316, |
|
"grad_norm": 0.6720256805419922, |
|
"learning_rate": 0.00019999862196137396, |
|
"loss": 0.0435, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.0582010582010581, |
|
"grad_norm": 0.43235063552856445, |
|
"learning_rate": 0.00019999536828417959, |
|
"loss": 0.0449, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0846560846560847, |
|
"grad_norm": 0.5137812495231628, |
|
"learning_rate": 0.00019999020075175426, |
|
"loss": 0.0452, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 0.6568519473075867, |
|
"learning_rate": 0.00019998311946300173, |
|
"loss": 0.0442, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.1375661375661377, |
|
"grad_norm": 1.1188125610351562, |
|
"learning_rate": 0.0001999741245534538, |
|
"loss": 0.0484, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.164021164021164, |
|
"grad_norm": 0.6816080808639526, |
|
"learning_rate": 0.00019996321619526804, |
|
"loss": 0.0443, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.1904761904761905, |
|
"grad_norm": 0.41056472063064575, |
|
"learning_rate": 0.00019995039459722423, |
|
"loss": 0.0462, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.216931216931217, |
|
"grad_norm": 0.528852641582489, |
|
"learning_rate": 0.00019993566000472068, |
|
"loss": 0.0416, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.2433862433862433, |
|
"grad_norm": 0.549899697303772, |
|
"learning_rate": 0.00019991901269976911, |
|
"loss": 0.0417, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.2698412698412698, |
|
"grad_norm": 0.48012372851371765, |
|
"learning_rate": 0.00019990045300098967, |
|
"loss": 0.0396, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.2962962962962963, |
|
"grad_norm": 0.5004888772964478, |
|
"learning_rate": 0.00019987998126360453, |
|
"loss": 0.0397, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.3227513227513228, |
|
"grad_norm": 0.5583204627037048, |
|
"learning_rate": 0.00019985759787943127, |
|
"loss": 0.0437, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.3492063492063493, |
|
"grad_norm": 0.7729330062866211, |
|
"learning_rate": 0.00019983330327687533, |
|
"loss": 0.0374, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.3756613756613756, |
|
"grad_norm": 0.5652401447296143, |
|
"learning_rate": 0.0001998070979209217, |
|
"loss": 0.0402, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.402116402116402, |
|
"grad_norm": 0.7112484574317932, |
|
"learning_rate": 0.00019977898231312628, |
|
"loss": 0.0428, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.4285714285714286, |
|
"grad_norm": 0.66225665807724, |
|
"learning_rate": 0.00019974895699160597, |
|
"loss": 0.0415, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.455026455026455, |
|
"grad_norm": 0.4615730047225952, |
|
"learning_rate": 0.00019971702253102856, |
|
"loss": 0.0351, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 0.5178949236869812, |
|
"learning_rate": 0.00019968317954260168, |
|
"loss": 0.0409, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.507936507936508, |
|
"grad_norm": 0.5937110781669617, |
|
"learning_rate": 0.00019964742867406126, |
|
"loss": 0.0423, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.5343915343915344, |
|
"grad_norm": 0.45062029361724854, |
|
"learning_rate": 0.00019960977060965872, |
|
"loss": 0.0404, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.560846560846561, |
|
"grad_norm": 0.41606688499450684, |
|
"learning_rate": 0.00019957020607014835, |
|
"loss": 0.038, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.5873015873015874, |
|
"grad_norm": 0.36349207162857056, |
|
"learning_rate": 0.00019952873581277324, |
|
"loss": 0.0392, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.6137566137566137, |
|
"grad_norm": 0.7711144685745239, |
|
"learning_rate": 0.0001994853606312508, |
|
"loss": 0.0393, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.6402116402116402, |
|
"grad_norm": 0.5946980714797974, |
|
"learning_rate": 0.00019944008135575772, |
|
"loss": 0.0408, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.21440181136131287, |
|
"learning_rate": 0.00019939289885291384, |
|
"loss": 0.0399, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.693121693121693, |
|
"grad_norm": 0.48613789677619934, |
|
"learning_rate": 0.00019934381402576584, |
|
"loss": 0.0406, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.7195767195767195, |
|
"grad_norm": 0.47162196040153503, |
|
"learning_rate": 0.00019929282781376976, |
|
"loss": 0.0433, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.746031746031746, |
|
"grad_norm": 0.5799857974052429, |
|
"learning_rate": 0.00019923994119277309, |
|
"loss": 0.0412, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.7724867724867726, |
|
"grad_norm": 0.6739380955696106, |
|
"learning_rate": 0.00019918515517499606, |
|
"loss": 0.0365, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.798941798941799, |
|
"grad_norm": 0.5352742671966553, |
|
"learning_rate": 0.0001991284708090123, |
|
"loss": 0.0367, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.8253968253968254, |
|
"grad_norm": 0.6227455735206604, |
|
"learning_rate": 0.00019906988917972878, |
|
"loss": 0.0357, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.8518518518518519, |
|
"grad_norm": 0.546350359916687, |
|
"learning_rate": 0.00019900941140836497, |
|
"loss": 0.0369, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.8783068783068781, |
|
"grad_norm": 0.5400755405426025, |
|
"learning_rate": 0.00019894703865243156, |
|
"loss": 0.0391, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.9047619047619047, |
|
"grad_norm": 0.6319063305854797, |
|
"learning_rate": 0.000198882772105708, |
|
"loss": 0.0382, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.9312169312169312, |
|
"grad_norm": 0.5921104550361633, |
|
"learning_rate": 0.00019881661299822, |
|
"loss": 0.0361, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.9576719576719577, |
|
"grad_norm": 0.5720839500427246, |
|
"learning_rate": 0.00019874856259621568, |
|
"loss": 0.0419, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.9841269841269842, |
|
"grad_norm": 0.47676175832748413, |
|
"learning_rate": 0.0001986786222021416, |
|
"loss": 0.0387, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.0105820105820107, |
|
"grad_norm": 0.4507744312286377, |
|
"learning_rate": 0.0001986067931546176, |
|
"loss": 0.0376, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.037037037037037, |
|
"grad_norm": 0.5624145269393921, |
|
"learning_rate": 0.0001985330768284114, |
|
"loss": 0.0402, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.0634920634920633, |
|
"grad_norm": 0.3807164430618286, |
|
"learning_rate": 0.00019845747463441207, |
|
"loss": 0.0375, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.0899470899470898, |
|
"grad_norm": 0.579364001750946, |
|
"learning_rate": 0.0001983799880196032, |
|
"loss": 0.0384, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.1164021164021163, |
|
"grad_norm": 0.5431824922561646, |
|
"learning_rate": 0.00019830061846703507, |
|
"loss": 0.0374, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.142857142857143, |
|
"grad_norm": 0.5862318873405457, |
|
"learning_rate": 0.0001982193674957964, |
|
"loss": 0.033, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.1693121693121693, |
|
"grad_norm": 0.6154080629348755, |
|
"learning_rate": 0.00019813623666098523, |
|
"loss": 0.0375, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.195767195767196, |
|
"grad_norm": 0.48721227049827576, |
|
"learning_rate": 0.000198051227553679, |
|
"loss": 0.0351, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 0.4979737401008606, |
|
"learning_rate": 0.00019796434180090436, |
|
"loss": 0.0388, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.248677248677249, |
|
"grad_norm": 0.47598177194595337, |
|
"learning_rate": 0.00019787558106560584, |
|
"loss": 0.0362, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.2751322751322753, |
|
"grad_norm": 0.3844626247882843, |
|
"learning_rate": 0.00019778494704661412, |
|
"loss": 0.0356, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.3015873015873014, |
|
"grad_norm": 0.24894066154956818, |
|
"learning_rate": 0.00019769244147861348, |
|
"loss": 0.0331, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.328042328042328, |
|
"grad_norm": 0.5516997575759888, |
|
"learning_rate": 0.00019759806613210853, |
|
"loss": 0.0377, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.3544973544973544, |
|
"grad_norm": 0.23550976812839508, |
|
"learning_rate": 0.0001975018228133904, |
|
"loss": 0.0357, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.380952380952381, |
|
"grad_norm": 0.24155914783477783, |
|
"learning_rate": 0.00019740371336450234, |
|
"loss": 0.0378, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.4074074074074074, |
|
"grad_norm": 0.278036504983902, |
|
"learning_rate": 0.00019730373966320402, |
|
"loss": 0.0335, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.433862433862434, |
|
"grad_norm": 0.48762863874435425, |
|
"learning_rate": 0.000197201903622936, |
|
"loss": 0.0357, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.4603174603174605, |
|
"grad_norm": 0.37752997875213623, |
|
"learning_rate": 0.000197098207192783, |
|
"loss": 0.0358, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.4867724867724865, |
|
"grad_norm": 0.49835503101348877, |
|
"learning_rate": 0.00019699265235743638, |
|
"loss": 0.0359, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.5132275132275135, |
|
"grad_norm": 0.453590452671051, |
|
"learning_rate": 0.00019688524113715657, |
|
"loss": 0.0367, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.5396825396825395, |
|
"grad_norm": 0.375345379114151, |
|
"learning_rate": 0.00019677597558773397, |
|
"loss": 0.0339, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.566137566137566, |
|
"grad_norm": 0.5068901777267456, |
|
"learning_rate": 0.0001966648578004499, |
|
"loss": 0.0363, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.5925925925925926, |
|
"grad_norm": 0.3547157347202301, |
|
"learning_rate": 0.00019655188990203647, |
|
"loss": 0.0349, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.619047619047619, |
|
"grad_norm": 0.3495565354824066, |
|
"learning_rate": 0.00019643707405463584, |
|
"loss": 0.0416, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.6455026455026456, |
|
"grad_norm": 0.24774406850337982, |
|
"learning_rate": 0.00019632041245575888, |
|
"loss": 0.0339, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.671957671957672, |
|
"grad_norm": 0.3105117082595825, |
|
"learning_rate": 0.00019620190733824315, |
|
"loss": 0.034, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.6984126984126986, |
|
"grad_norm": 0.2921735644340515, |
|
"learning_rate": 0.00019608156097021006, |
|
"loss": 0.0433, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.7248677248677247, |
|
"grad_norm": 0.4697803258895874, |
|
"learning_rate": 0.00019595937565502157, |
|
"loss": 0.0342, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.751322751322751, |
|
"grad_norm": 0.2624008059501648, |
|
"learning_rate": 0.00019583535373123606, |
|
"loss": 0.0325, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.7777777777777777, |
|
"grad_norm": 0.4016210734844208, |
|
"learning_rate": 0.00019570949757256355, |
|
"loss": 0.0332, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.804232804232804, |
|
"grad_norm": 0.4004408121109009, |
|
"learning_rate": 0.00019558180958782024, |
|
"loss": 0.0302, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.8306878306878307, |
|
"grad_norm": 0.17768095433712006, |
|
"learning_rate": 0.0001954522922208825, |
|
"loss": 0.0342, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.857142857142857, |
|
"grad_norm": 0.443602591753006, |
|
"learning_rate": 0.00019532094795064015, |
|
"loss": 0.0345, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.8835978835978837, |
|
"grad_norm": 0.3766569197177887, |
|
"learning_rate": 0.00019518777929094868, |
|
"loss": 0.0338, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.91005291005291, |
|
"grad_norm": 0.3146336078643799, |
|
"learning_rate": 0.00019505278879058157, |
|
"loss": 0.0329, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.9365079365079367, |
|
"grad_norm": 0.44845908880233765, |
|
"learning_rate": 0.00019491597903318125, |
|
"loss": 0.0318, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.962962962962963, |
|
"grad_norm": 0.43101152777671814, |
|
"learning_rate": 0.00019477735263720964, |
|
"loss": 0.0344, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.9894179894179893, |
|
"grad_norm": 0.3073725998401642, |
|
"learning_rate": 0.0001946369122558982, |
|
"loss": 0.0363, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.015873015873016, |
|
"grad_norm": 0.5444226861000061, |
|
"learning_rate": 0.00019449466057719703, |
|
"loss": 0.0334, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.0423280423280423, |
|
"grad_norm": 0.5659046173095703, |
|
"learning_rate": 0.00019435060032372338, |
|
"loss": 0.0319, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.068783068783069, |
|
"grad_norm": 0.444764643907547, |
|
"learning_rate": 0.0001942047342527097, |
|
"loss": 0.0316, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.0952380952380953, |
|
"grad_norm": 0.3891999125480652, |
|
"learning_rate": 0.0001940570651559507, |
|
"loss": 0.0361, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.121693121693122, |
|
"grad_norm": 0.3401958644390106, |
|
"learning_rate": 0.00019390759585975005, |
|
"loss": 0.0336, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.148148148148148, |
|
"grad_norm": 0.24082757532596588, |
|
"learning_rate": 0.00019375632922486615, |
|
"loss": 0.0319, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.1746031746031744, |
|
"grad_norm": 0.34355929493904114, |
|
"learning_rate": 0.00019360326814645752, |
|
"loss": 0.035, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.201058201058201, |
|
"grad_norm": 0.3685361444950104, |
|
"learning_rate": 0.00019344841555402731, |
|
"loss": 0.0307, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.2275132275132274, |
|
"grad_norm": 0.30369487404823303, |
|
"learning_rate": 0.00019329177441136723, |
|
"loss": 0.0315, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.253968253968254, |
|
"grad_norm": 0.27075985074043274, |
|
"learning_rate": 0.00019313334771650084, |
|
"loss": 0.0348, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.2804232804232805, |
|
"grad_norm": 0.2865367829799652, |
|
"learning_rate": 0.0001929731385016262, |
|
"loss": 0.0322, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.306878306878307, |
|
"grad_norm": 0.3830226957798004, |
|
"learning_rate": 0.00019281114983305773, |
|
"loss": 0.0312, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 0.41300126910209656, |
|
"learning_rate": 0.00019264738481116763, |
|
"loss": 0.0339, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.35978835978836, |
|
"grad_norm": 0.2901158332824707, |
|
"learning_rate": 0.00019248184657032655, |
|
"loss": 0.0317, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.386243386243386, |
|
"grad_norm": 0.29940736293792725, |
|
"learning_rate": 0.00019231453827884352, |
|
"loss": 0.035, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.4126984126984126, |
|
"grad_norm": 0.36355215311050415, |
|
"learning_rate": 0.0001921454631389053, |
|
"loss": 0.0319, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.439153439153439, |
|
"grad_norm": 0.34896090626716614, |
|
"learning_rate": 0.00019197462438651523, |
|
"loss": 0.0333, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.4656084656084656, |
|
"grad_norm": 0.24769645929336548, |
|
"learning_rate": 0.00019180202529143112, |
|
"loss": 0.0305, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.492063492063492, |
|
"grad_norm": 0.2804192006587982, |
|
"learning_rate": 0.00019162766915710282, |
|
"loss": 0.0266, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.5185185185185186, |
|
"grad_norm": 0.1939479410648346, |
|
"learning_rate": 0.00019145155932060885, |
|
"loss": 0.0366, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.544973544973545, |
|
"grad_norm": 0.3781014084815979, |
|
"learning_rate": 0.0001912736991525927, |
|
"loss": 0.0313, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.571428571428571, |
|
"grad_norm": 0.28336915373802185, |
|
"learning_rate": 0.00019109409205719812, |
|
"loss": 0.0318, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.597883597883598, |
|
"grad_norm": 0.32090702652931213, |
|
"learning_rate": 0.00019091274147200413, |
|
"loss": 0.0295, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.624338624338624, |
|
"grad_norm": 0.4037337601184845, |
|
"learning_rate": 0.0001907296508679592, |
|
"loss": 0.0312, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.6507936507936507, |
|
"grad_norm": 0.2988503575325012, |
|
"learning_rate": 0.00019054482374931467, |
|
"loss": 0.0285, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.677248677248677, |
|
"grad_norm": 0.3322925567626953, |
|
"learning_rate": 0.0001903582636535579, |
|
"loss": 0.0307, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.7037037037037037, |
|
"grad_norm": 0.5639650821685791, |
|
"learning_rate": 0.00019016997415134443, |
|
"loss": 0.0323, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.7301587301587302, |
|
"grad_norm": 0.30782419443130493, |
|
"learning_rate": 0.00018997995884642967, |
|
"loss": 0.0305, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.7566137566137567, |
|
"grad_norm": 0.36455053091049194, |
|
"learning_rate": 0.00018978822137559988, |
|
"loss": 0.0327, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.7830687830687832, |
|
"grad_norm": 0.3742886185646057, |
|
"learning_rate": 0.00018959476540860267, |
|
"loss": 0.0287, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.8095238095238093, |
|
"grad_norm": 0.36028748750686646, |
|
"learning_rate": 0.00018939959464807672, |
|
"loss": 0.0293, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.835978835978836, |
|
"grad_norm": 0.34717637300491333, |
|
"learning_rate": 0.0001892027128294808, |
|
"loss": 0.0292, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.8624338624338623, |
|
"grad_norm": 0.32690536975860596, |
|
"learning_rate": 0.00018900412372102244, |
|
"loss": 0.0349, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.888888888888889, |
|
"grad_norm": 0.21768151223659515, |
|
"learning_rate": 0.0001888038311235857, |
|
"loss": 0.0295, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.9153439153439153, |
|
"grad_norm": 0.2899485230445862, |
|
"learning_rate": 0.00018860183887065856, |
|
"loss": 0.0287, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.941798941798942, |
|
"grad_norm": 0.2762286961078644, |
|
"learning_rate": 0.00018839815082825933, |
|
"loss": 0.0307, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.9682539682539684, |
|
"grad_norm": 0.24080318212509155, |
|
"learning_rate": 0.00018819277089486281, |
|
"loss": 0.0317, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.9947089947089944, |
|
"grad_norm": 0.25071361660957336, |
|
"learning_rate": 0.00018798570300132571, |
|
"loss": 0.0335, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.021164021164021, |
|
"grad_norm": 0.47212567925453186, |
|
"learning_rate": 0.00018777695111081132, |
|
"loss": 0.0283, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.0476190476190474, |
|
"grad_norm": 0.25096508860588074, |
|
"learning_rate": 0.0001875665192187136, |
|
"loss": 0.0339, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.074074074074074, |
|
"grad_norm": 0.41704872250556946, |
|
"learning_rate": 0.00018735441135258097, |
|
"loss": 0.0305, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.1005291005291005, |
|
"grad_norm": 0.3602231442928314, |
|
"learning_rate": 0.00018714063157203886, |
|
"loss": 0.0339, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.1269841269841265, |
|
"grad_norm": 0.21357379853725433, |
|
"learning_rate": 0.00018692518396871234, |
|
"loss": 0.0336, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.1534391534391535, |
|
"grad_norm": 0.37265297770500183, |
|
"learning_rate": 0.00018670807266614757, |
|
"loss": 0.0316, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.1798941798941796, |
|
"grad_norm": 0.32641279697418213, |
|
"learning_rate": 0.00018648930181973313, |
|
"loss": 0.0298, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.2063492063492065, |
|
"grad_norm": 0.28364816308021545, |
|
"learning_rate": 0.00018626887561662016, |
|
"loss": 0.0318, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.232804232804233, |
|
"grad_norm": 0.38192281126976013, |
|
"learning_rate": 0.00018604679827564254, |
|
"loss": 0.0321, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.2592592592592595, |
|
"grad_norm": 0.29732367396354675, |
|
"learning_rate": 0.00018582307404723593, |
|
"loss": 0.0302, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.285714285714286, |
|
"grad_norm": 0.32292482256889343, |
|
"learning_rate": 0.00018559770721335653, |
|
"loss": 0.0296, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.3121693121693125, |
|
"grad_norm": 0.36669158935546875, |
|
"learning_rate": 0.00018537070208739898, |
|
"loss": 0.0313, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.338624338624339, |
|
"grad_norm": 0.32983410358428955, |
|
"learning_rate": 0.00018514206301411407, |
|
"loss": 0.0293, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.365079365079365, |
|
"grad_norm": 0.38306760787963867, |
|
"learning_rate": 0.00018491179436952532, |
|
"loss": 0.0309, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.391534391534392, |
|
"grad_norm": 0.31832560896873474, |
|
"learning_rate": 0.00018467990056084536, |
|
"loss": 0.03, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.417989417989418, |
|
"grad_norm": 0.4297352731227875, |
|
"learning_rate": 0.0001844463860263916, |
|
"loss": 0.0302, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 0.22931234538555145, |
|
"learning_rate": 0.0001842112552355011, |
|
"loss": 0.0318, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.470899470899471, |
|
"grad_norm": 0.49042898416519165, |
|
"learning_rate": 0.00018397451268844524, |
|
"loss": 0.0353, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.497354497354498, |
|
"grad_norm": 0.29628482460975647, |
|
"learning_rate": 0.0001837361629163436, |
|
"loss": 0.0364, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.523809523809524, |
|
"grad_norm": 0.3375132977962494, |
|
"learning_rate": 0.000183496210481077, |
|
"loss": 0.0324, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.550264550264551, |
|
"grad_norm": 0.2872624397277832, |
|
"learning_rate": 0.00018325465997520035, |
|
"loss": 0.0284, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.576719576719577, |
|
"grad_norm": 0.3233286142349243, |
|
"learning_rate": 0.0001830115160218548, |
|
"loss": 0.0338, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.603174603174603, |
|
"grad_norm": 0.25356996059417725, |
|
"learning_rate": 0.00018276678327467923, |
|
"loss": 0.0286, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.62962962962963, |
|
"grad_norm": 0.21880115568637848, |
|
"learning_rate": 0.00018252046641772097, |
|
"loss": 0.0335, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.656084656084656, |
|
"grad_norm": 0.30065304040908813, |
|
"learning_rate": 0.0001822725701653465, |
|
"loss": 0.0294, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.682539682539683, |
|
"grad_norm": 0.44395706057548523, |
|
"learning_rate": 0.00018202309926215092, |
|
"loss": 0.0318, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.708994708994709, |
|
"grad_norm": 0.4432872235774994, |
|
"learning_rate": 0.00018177205848286736, |
|
"loss": 0.0332, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.735449735449736, |
|
"grad_norm": 0.3368462026119232, |
|
"learning_rate": 0.00018151945263227543, |
|
"loss": 0.0351, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.761904761904762, |
|
"grad_norm": 0.4848134517669678, |
|
"learning_rate": 0.00018126528654510934, |
|
"loss": 0.0281, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.788359788359788, |
|
"grad_norm": 0.339212566614151, |
|
"learning_rate": 0.00018100956508596537, |
|
"loss": 0.0295, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.814814814814815, |
|
"grad_norm": 0.37739694118499756, |
|
"learning_rate": 0.00018075229314920872, |
|
"loss": 0.0293, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.841269841269841, |
|
"grad_norm": 0.2847217619419098, |
|
"learning_rate": 0.00018049347565887987, |
|
"loss": 0.0291, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.867724867724868, |
|
"grad_norm": 0.2565479278564453, |
|
"learning_rate": 0.00018023311756860038, |
|
"loss": 0.0318, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.894179894179894, |
|
"grad_norm": 0.2911958396434784, |
|
"learning_rate": 0.0001799712238614779, |
|
"loss": 0.0293, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.920634920634921, |
|
"grad_norm": 0.35511329770088196, |
|
"learning_rate": 0.0001797077995500111, |
|
"loss": 0.0325, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.947089947089947, |
|
"grad_norm": 0.36546435952186584, |
|
"learning_rate": 0.00017944284967599344, |
|
"loss": 0.0288, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.973544973544973, |
|
"grad_norm": 0.24980072677135468, |
|
"learning_rate": 0.0001791763793104168, |
|
"loss": 0.03, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.2070397436618805, |
|
"learning_rate": 0.00017890839355337443, |
|
"loss": 0.0334, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 5.026455026455026, |
|
"grad_norm": 0.34367257356643677, |
|
"learning_rate": 0.00017863889753396334, |
|
"loss": 0.0287, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 5.052910052910053, |
|
"grad_norm": 0.24305817484855652, |
|
"learning_rate": 0.00017836789641018606, |
|
"loss": 0.0297, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 5.079365079365079, |
|
"grad_norm": 0.28586849570274353, |
|
"learning_rate": 0.0001780953953688521, |
|
"loss": 0.0324, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 5.105820105820106, |
|
"grad_norm": 0.3378239572048187, |
|
"learning_rate": 0.00017782139962547834, |
|
"loss": 0.0308, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 5.132275132275132, |
|
"grad_norm": 0.26583924889564514, |
|
"learning_rate": 0.00017754591442418964, |
|
"loss": 0.0298, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 5.158730158730159, |
|
"grad_norm": 0.22006455063819885, |
|
"learning_rate": 0.0001772689450376181, |
|
"loss": 0.026, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 5.185185185185185, |
|
"grad_norm": 0.26729926466941833, |
|
"learning_rate": 0.00017699049676680236, |
|
"loss": 0.0304, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 5.211640211640212, |
|
"grad_norm": 0.34223079681396484, |
|
"learning_rate": 0.00017671057494108604, |
|
"loss": 0.0331, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 5.238095238095238, |
|
"grad_norm": 0.18476738035678864, |
|
"learning_rate": 0.00017642918491801582, |
|
"loss": 0.0318, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 5.264550264550264, |
|
"grad_norm": 0.2811238467693329, |
|
"learning_rate": 0.00017614633208323877, |
|
"loss": 0.029, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 5.291005291005291, |
|
"grad_norm": 0.27177298069000244, |
|
"learning_rate": 0.00017586202185039952, |
|
"loss": 0.0304, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.317460317460317, |
|
"grad_norm": 0.4007485508918762, |
|
"learning_rate": 0.00017557625966103623, |
|
"loss": 0.0285, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 5.343915343915344, |
|
"grad_norm": 0.2058725655078888, |
|
"learning_rate": 0.0001752890509844769, |
|
"loss": 0.0296, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 5.37037037037037, |
|
"grad_norm": 0.3283293843269348, |
|
"learning_rate": 0.00017500040131773444, |
|
"loss": 0.0304, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.396825396825397, |
|
"grad_norm": 0.33090895414352417, |
|
"learning_rate": 0.00017471031618540141, |
|
"loss": 0.0289, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.423280423280423, |
|
"grad_norm": 0.3411179780960083, |
|
"learning_rate": 0.0001744188011395445, |
|
"loss": 0.0306, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.449735449735449, |
|
"grad_norm": 0.3357543349266052, |
|
"learning_rate": 0.0001741258617595981, |
|
"loss": 0.0294, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.476190476190476, |
|
"grad_norm": 0.2584282457828522, |
|
"learning_rate": 0.0001738315036522575, |
|
"loss": 0.0301, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.502645502645502, |
|
"grad_norm": 0.2944985628128052, |
|
"learning_rate": 0.00017353573245137171, |
|
"loss": 0.0256, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.529100529100529, |
|
"grad_norm": 0.38644468784332275, |
|
"learning_rate": 0.00017323855381783551, |
|
"loss": 0.032, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.555555555555555, |
|
"grad_norm": 0.2968820035457611, |
|
"learning_rate": 0.00017293997343948117, |
|
"loss": 0.0304, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.582010582010582, |
|
"grad_norm": 0.428739458322525, |
|
"learning_rate": 0.00017263999703096957, |
|
"loss": 0.0284, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.608465608465608, |
|
"grad_norm": 0.28147000074386597, |
|
"learning_rate": 0.00017233863033368077, |
|
"loss": 0.0315, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.634920634920634, |
|
"grad_norm": 0.3461019992828369, |
|
"learning_rate": 0.00017203587911560436, |
|
"loss": 0.0313, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.661375661375661, |
|
"grad_norm": 0.30748844146728516, |
|
"learning_rate": 0.0001717317491712286, |
|
"loss": 0.0276, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.6878306878306875, |
|
"grad_norm": 0.3075932264328003, |
|
"learning_rate": 0.00017142624632143002, |
|
"loss": 0.0308, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.714285714285714, |
|
"grad_norm": 0.2982342839241028, |
|
"learning_rate": 0.0001711193764133617, |
|
"loss": 0.0279, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.7407407407407405, |
|
"grad_norm": 0.3656872510910034, |
|
"learning_rate": 0.00017081114532034147, |
|
"loss": 0.0278, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.767195767195767, |
|
"grad_norm": 0.29646244645118713, |
|
"learning_rate": 0.0001705015589417395, |
|
"loss": 0.0283, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.7936507936507935, |
|
"grad_norm": 0.2913481891155243, |
|
"learning_rate": 0.00017019062320286526, |
|
"loss": 0.0269, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.8201058201058204, |
|
"grad_norm": 0.25158271193504333, |
|
"learning_rate": 0.0001698783440548543, |
|
"loss": 0.0324, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.8465608465608465, |
|
"grad_norm": 0.34147316217422485, |
|
"learning_rate": 0.0001695647274745544, |
|
"loss": 0.0308, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.8730158730158735, |
|
"grad_norm": 0.23324055969715118, |
|
"learning_rate": 0.00016924977946441072, |
|
"loss": 0.0309, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.8994708994708995, |
|
"grad_norm": 0.375702440738678, |
|
"learning_rate": 0.0001689335060523515, |
|
"loss": 0.0298, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.925925925925926, |
|
"grad_norm": 0.22085419297218323, |
|
"learning_rate": 0.00016861591329167238, |
|
"loss": 0.0297, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.9523809523809526, |
|
"grad_norm": 0.29211702942848206, |
|
"learning_rate": 0.0001682970072609205, |
|
"loss": 0.0296, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.978835978835979, |
|
"grad_norm": 0.2560863494873047, |
|
"learning_rate": 0.00016797679406377837, |
|
"loss": 0.0297, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 6.005291005291006, |
|
"grad_norm": 0.22338557243347168, |
|
"learning_rate": 0.00016765527982894689, |
|
"loss": 0.0308, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 6.031746031746032, |
|
"grad_norm": 0.29270821809768677, |
|
"learning_rate": 0.00016733247071002802, |
|
"loss": 0.0249, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 6.058201058201059, |
|
"grad_norm": 0.33310195803642273, |
|
"learning_rate": 0.00016700837288540716, |
|
"loss": 0.0294, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 6.084656084656085, |
|
"grad_norm": 0.28385937213897705, |
|
"learning_rate": 0.0001666829925581348, |
|
"loss": 0.0277, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 6.111111111111111, |
|
"grad_norm": 0.2993493974208832, |
|
"learning_rate": 0.0001663563359558078, |
|
"loss": 0.0298, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 6.137566137566138, |
|
"grad_norm": 0.308386892080307, |
|
"learning_rate": 0.00016602840933045018, |
|
"loss": 0.0295, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 6.164021164021164, |
|
"grad_norm": 0.38717547059059143, |
|
"learning_rate": 0.00016569921895839354, |
|
"loss": 0.0342, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 6.190476190476191, |
|
"grad_norm": 0.2936514914035797, |
|
"learning_rate": 0.00016536877114015685, |
|
"loss": 0.0278, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 6.216931216931217, |
|
"grad_norm": 0.26422831416130066, |
|
"learning_rate": 0.00016503707220032586, |
|
"loss": 0.0304, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 6.243386243386244, |
|
"grad_norm": 0.3753129839897156, |
|
"learning_rate": 0.00016470412848743223, |
|
"loss": 0.0278, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 6.26984126984127, |
|
"grad_norm": 0.33631107211112976, |
|
"learning_rate": 0.00016436994637383166, |
|
"loss": 0.0279, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 6.296296296296296, |
|
"grad_norm": 0.2561590075492859, |
|
"learning_rate": 0.00016403453225558235, |
|
"loss": 0.0266, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 6.322751322751323, |
|
"grad_norm": 0.342010498046875, |
|
"learning_rate": 0.00016369789255232223, |
|
"loss": 0.0287, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 6.349206349206349, |
|
"grad_norm": 0.2974427044391632, |
|
"learning_rate": 0.0001633600337071463, |
|
"loss": 0.0264, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.375661375661376, |
|
"grad_norm": 0.24735130369663239, |
|
"learning_rate": 0.0001630209621864833, |
|
"loss": 0.025, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 6.402116402116402, |
|
"grad_norm": 0.3415727913379669, |
|
"learning_rate": 0.00016268068447997176, |
|
"loss": 0.034, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 6.428571428571429, |
|
"grad_norm": 0.21212635934352875, |
|
"learning_rate": 0.00016233920710033607, |
|
"loss": 0.0297, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.455026455026455, |
|
"grad_norm": 0.4277040958404541, |
|
"learning_rate": 0.00016199653658326168, |
|
"loss": 0.0305, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.481481481481482, |
|
"grad_norm": 0.377903014421463, |
|
"learning_rate": 0.00016165267948726987, |
|
"loss": 0.0292, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.507936507936508, |
|
"grad_norm": 0.3249509632587433, |
|
"learning_rate": 0.0001613076423935926, |
|
"loss": 0.027, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.534391534391535, |
|
"grad_norm": 0.2562958002090454, |
|
"learning_rate": 0.0001609614319060461, |
|
"loss": 0.0277, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.560846560846561, |
|
"grad_norm": 0.24918289482593536, |
|
"learning_rate": 0.0001606140546509049, |
|
"loss": 0.0263, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.587301587301587, |
|
"grad_norm": 0.24581879377365112, |
|
"learning_rate": 0.00016026551727677465, |
|
"loss": 0.0305, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.613756613756614, |
|
"grad_norm": 0.20334988832473755, |
|
"learning_rate": 0.0001599158264544651, |
|
"loss": 0.0273, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.64021164021164, |
|
"grad_norm": 0.20349211990833282, |
|
"learning_rate": 0.00015956498887686232, |
|
"loss": 0.0301, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.666666666666667, |
|
"grad_norm": 0.27112945914268494, |
|
"learning_rate": 0.0001592130112588007, |
|
"loss": 0.027, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.693121693121693, |
|
"grad_norm": 0.18712319433689117, |
|
"learning_rate": 0.00015885990033693434, |
|
"loss": 0.0262, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.71957671957672, |
|
"grad_norm": 0.3410026729106903, |
|
"learning_rate": 0.00015850566286960803, |
|
"loss": 0.0266, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.746031746031746, |
|
"grad_norm": 0.29726535081863403, |
|
"learning_rate": 0.00015815030563672813, |
|
"loss": 0.0286, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.772486772486772, |
|
"grad_norm": 0.20028139650821686, |
|
"learning_rate": 0.00015779383543963271, |
|
"loss": 0.0298, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.798941798941799, |
|
"grad_norm": 0.28673526644706726, |
|
"learning_rate": 0.00015743625910096117, |
|
"loss": 0.0258, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.825396825396825, |
|
"grad_norm": 0.3958338499069214, |
|
"learning_rate": 0.00015707758346452408, |
|
"loss": 0.0286, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.851851851851852, |
|
"grad_norm": 0.2631092369556427, |
|
"learning_rate": 0.00015671781539517175, |
|
"loss": 0.0269, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.878306878306878, |
|
"grad_norm": 0.24416978657245636, |
|
"learning_rate": 0.00015635696177866316, |
|
"loss": 0.0314, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.904761904761905, |
|
"grad_norm": 0.4131467640399933, |
|
"learning_rate": 0.000155995029521534, |
|
"loss": 0.0297, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.931216931216931, |
|
"grad_norm": 0.42825278639793396, |
|
"learning_rate": 0.00015563202555096453, |
|
"loss": 0.0277, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.957671957671957, |
|
"grad_norm": 0.2998770475387573, |
|
"learning_rate": 0.00015526795681464713, |
|
"loss": 0.027, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 6.984126984126984, |
|
"grad_norm": 0.35698017477989197, |
|
"learning_rate": 0.00015490283028065296, |
|
"loss": 0.032, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 7.01058201058201, |
|
"grad_norm": 0.3422069847583771, |
|
"learning_rate": 0.00015453665293729905, |
|
"loss": 0.029, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 7.037037037037037, |
|
"grad_norm": 0.3283518850803375, |
|
"learning_rate": 0.00015416943179301422, |
|
"loss": 0.0285, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 7.063492063492063, |
|
"grad_norm": 0.17980273067951202, |
|
"learning_rate": 0.00015380117387620506, |
|
"loss": 0.0262, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 7.08994708994709, |
|
"grad_norm": 0.22317390143871307, |
|
"learning_rate": 0.00015343188623512152, |
|
"loss": 0.0292, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 7.116402116402116, |
|
"grad_norm": 0.2997380793094635, |
|
"learning_rate": 0.0001530615759377217, |
|
"loss": 0.0271, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 7.142857142857143, |
|
"grad_norm": 0.3536098301410675, |
|
"learning_rate": 0.000152690250071537, |
|
"loss": 0.0282, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 7.169312169312169, |
|
"grad_norm": 0.23207667469978333, |
|
"learning_rate": 0.00015231791574353594, |
|
"loss": 0.0258, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 7.195767195767195, |
|
"grad_norm": 0.4378697872161865, |
|
"learning_rate": 0.00015194458007998876, |
|
"loss": 0.0262, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 7.222222222222222, |
|
"grad_norm": 0.28229495882987976, |
|
"learning_rate": 0.00015157025022633042, |
|
"loss": 0.0285, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 7.248677248677248, |
|
"grad_norm": 0.3776451647281647, |
|
"learning_rate": 0.00015119493334702432, |
|
"loss": 0.0258, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 7.275132275132275, |
|
"grad_norm": 0.37293151021003723, |
|
"learning_rate": 0.00015081863662542487, |
|
"loss": 0.0273, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 7.301587301587301, |
|
"grad_norm": 0.19970954954624176, |
|
"learning_rate": 0.00015044136726364015, |
|
"loss": 0.0262, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 7.328042328042328, |
|
"grad_norm": 0.3114972412586212, |
|
"learning_rate": 0.000150063132482394, |
|
"loss": 0.0275, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 7.354497354497354, |
|
"grad_norm": 0.2646826207637787, |
|
"learning_rate": 0.0001496839395208879, |
|
"loss": 0.0282, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 7.380952380952381, |
|
"grad_norm": 0.4848107397556305, |
|
"learning_rate": 0.0001493037956366623, |
|
"loss": 0.0276, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 7.407407407407407, |
|
"grad_norm": 0.4195409119129181, |
|
"learning_rate": 0.0001489227081054578, |
|
"loss": 0.0286, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.4338624338624335, |
|
"grad_norm": 0.2596321702003479, |
|
"learning_rate": 0.00014854068422107589, |
|
"loss": 0.0288, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 7.4603174603174605, |
|
"grad_norm": 0.20885036885738373, |
|
"learning_rate": 0.00014815773129523934, |
|
"loss": 0.0262, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 7.4867724867724865, |
|
"grad_norm": 0.32868385314941406, |
|
"learning_rate": 0.0001477738566574522, |
|
"loss": 0.0255, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 7.5132275132275135, |
|
"grad_norm": 0.25176772475242615, |
|
"learning_rate": 0.00014738906765485963, |
|
"loss": 0.0263, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.5396825396825395, |
|
"grad_norm": 0.265190988779068, |
|
"learning_rate": 0.0001470033716521072, |
|
"loss": 0.0268, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.5661375661375665, |
|
"grad_norm": 0.3301750123500824, |
|
"learning_rate": 0.00014661677603119993, |
|
"loss": 0.0288, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.592592592592593, |
|
"grad_norm": 0.2590659260749817, |
|
"learning_rate": 0.00014622928819136105, |
|
"loss": 0.0287, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.619047619047619, |
|
"grad_norm": 0.2190064787864685, |
|
"learning_rate": 0.00014584091554889034, |
|
"loss": 0.025, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.645502645502646, |
|
"grad_norm": 0.21431300044059753, |
|
"learning_rate": 0.00014545166553702222, |
|
"loss": 0.0267, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.671957671957672, |
|
"grad_norm": 0.25257354974746704, |
|
"learning_rate": 0.00014506154560578352, |
|
"loss": 0.0251, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.698412698412699, |
|
"grad_norm": 0.23436136543750763, |
|
"learning_rate": 0.0001446705632218507, |
|
"loss": 0.0259, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.724867724867725, |
|
"grad_norm": 0.294412225484848, |
|
"learning_rate": 0.00014427872586840725, |
|
"loss": 0.0274, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.751322751322752, |
|
"grad_norm": 0.29302552342414856, |
|
"learning_rate": 0.00014388604104500016, |
|
"loss": 0.0286, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.777777777777778, |
|
"grad_norm": 0.3188011646270752, |
|
"learning_rate": 0.00014349251626739656, |
|
"loss": 0.0304, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.804232804232804, |
|
"grad_norm": 0.28278741240501404, |
|
"learning_rate": 0.00014309815906743994, |
|
"loss": 0.0258, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.830687830687831, |
|
"grad_norm": 0.311074823141098, |
|
"learning_rate": 0.00014270297699290566, |
|
"loss": 0.0276, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.857142857142857, |
|
"grad_norm": 0.2844277322292328, |
|
"learning_rate": 0.000142306977607357, |
|
"loss": 0.0248, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 7.883597883597884, |
|
"grad_norm": 0.3190961182117462, |
|
"learning_rate": 0.0001419101684899998, |
|
"loss": 0.027, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 7.91005291005291, |
|
"grad_norm": 0.24902918934822083, |
|
"learning_rate": 0.00014151255723553795, |
|
"loss": 0.0262, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 7.936507936507937, |
|
"grad_norm": 0.2748497724533081, |
|
"learning_rate": 0.00014111415145402772, |
|
"loss": 0.027, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.962962962962963, |
|
"grad_norm": 0.24207575619220734, |
|
"learning_rate": 0.00014071495877073222, |
|
"loss": 0.0254, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 7.98941798941799, |
|
"grad_norm": 0.19475680589675903, |
|
"learning_rate": 0.00014031498682597532, |
|
"loss": 0.0257, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 8.015873015873016, |
|
"grad_norm": 0.2495572566986084, |
|
"learning_rate": 0.00013991424327499557, |
|
"loss": 0.023, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 8.042328042328043, |
|
"grad_norm": 0.20358052849769592, |
|
"learning_rate": 0.00013951273578779972, |
|
"loss": 0.029, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 8.068783068783068, |
|
"grad_norm": 0.27466002106666565, |
|
"learning_rate": 0.0001391104720490156, |
|
"loss": 0.0255, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 8.095238095238095, |
|
"grad_norm": 0.22618280351161957, |
|
"learning_rate": 0.00013870745975774559, |
|
"loss": 0.0263, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 8.121693121693122, |
|
"grad_norm": 0.23149581253528595, |
|
"learning_rate": 0.0001383037066274186, |
|
"loss": 0.0242, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 8.148148148148149, |
|
"grad_norm": 0.32176461815834045, |
|
"learning_rate": 0.0001378992203856431, |
|
"loss": 0.0237, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 8.174603174603174, |
|
"grad_norm": 0.2506023347377777, |
|
"learning_rate": 0.00013749400877405865, |
|
"loss": 0.0251, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 8.201058201058201, |
|
"grad_norm": 0.18478137254714966, |
|
"learning_rate": 0.00013708807954818819, |
|
"loss": 0.0278, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 8.227513227513228, |
|
"grad_norm": 0.27599525451660156, |
|
"learning_rate": 0.0001366814404772892, |
|
"loss": 0.0254, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 8.253968253968253, |
|
"grad_norm": 0.2510387897491455, |
|
"learning_rate": 0.00013627409934420534, |
|
"loss": 0.0249, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 8.28042328042328, |
|
"grad_norm": 0.2076123058795929, |
|
"learning_rate": 0.0001358660639452173, |
|
"loss": 0.0269, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 8.306878306878307, |
|
"grad_norm": 0.23005664348602295, |
|
"learning_rate": 0.0001354573420898936, |
|
"loss": 0.0274, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 8.333333333333334, |
|
"grad_norm": 0.1964070051908493, |
|
"learning_rate": 0.00013504794160094116, |
|
"loss": 0.0265, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 8.359788359788359, |
|
"grad_norm": 0.22786502540111542, |
|
"learning_rate": 0.0001346378703140556, |
|
"loss": 0.0253, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 8.386243386243386, |
|
"grad_norm": 0.32075852155685425, |
|
"learning_rate": 0.00013422713607777123, |
|
"loss": 0.0254, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 8.412698412698413, |
|
"grad_norm": 0.40378686785697937, |
|
"learning_rate": 0.00013381574675331082, |
|
"loss": 0.0251, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 8.43915343915344, |
|
"grad_norm": 0.29405686259269714, |
|
"learning_rate": 0.00013340371021443515, |
|
"loss": 0.0255, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 8.465608465608465, |
|
"grad_norm": 0.2700364291667938, |
|
"learning_rate": 0.0001329910343472924, |
|
"loss": 0.0275, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.492063492063492, |
|
"grad_norm": 0.29450562596321106, |
|
"learning_rate": 0.00013257772705026693, |
|
"loss": 0.026, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 8.518518518518519, |
|
"grad_norm": 0.3529842793941498, |
|
"learning_rate": 0.0001321637962338286, |
|
"loss": 0.025, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 8.544973544973544, |
|
"grad_norm": 0.35684412717819214, |
|
"learning_rate": 0.00013174924982038087, |
|
"loss": 0.0255, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 8.571428571428571, |
|
"grad_norm": 0.2878738343715668, |
|
"learning_rate": 0.00013133409574410943, |
|
"loss": 0.0266, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 8.597883597883598, |
|
"grad_norm": 0.2095337063074112, |
|
"learning_rate": 0.00013091834195083036, |
|
"loss": 0.0267, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.624338624338625, |
|
"grad_norm": 0.337873637676239, |
|
"learning_rate": 0.0001305019963978379, |
|
"loss": 0.0252, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.65079365079365, |
|
"grad_norm": 0.191969633102417, |
|
"learning_rate": 0.0001300850670537523, |
|
"loss": 0.028, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.677248677248677, |
|
"grad_norm": 0.36041873693466187, |
|
"learning_rate": 0.00012966756189836725, |
|
"loss": 0.0254, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.703703703703704, |
|
"grad_norm": 0.3025069832801819, |
|
"learning_rate": 0.00012924948892249706, |
|
"loss": 0.0309, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.73015873015873, |
|
"grad_norm": 0.2568103075027466, |
|
"learning_rate": 0.00012883085612782388, |
|
"loss": 0.0251, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.756613756613756, |
|
"grad_norm": 0.32407626509666443, |
|
"learning_rate": 0.0001284116715267445, |
|
"loss": 0.0248, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.783068783068783, |
|
"grad_norm": 0.28700342774391174, |
|
"learning_rate": 0.00012799194314221687, |
|
"loss": 0.0281, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.80952380952381, |
|
"grad_norm": 0.2833905816078186, |
|
"learning_rate": 0.00012757167900760678, |
|
"loss": 0.0234, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 8.835978835978835, |
|
"grad_norm": 0.24850112199783325, |
|
"learning_rate": 0.00012715088716653387, |
|
"loss": 0.0264, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 8.862433862433862, |
|
"grad_norm": 0.30938786268234253, |
|
"learning_rate": 0.00012672957567271784, |
|
"loss": 0.0246, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 8.88888888888889, |
|
"grad_norm": 0.2675461769104004, |
|
"learning_rate": 0.00012630775258982424, |
|
"loss": 0.0244, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.915343915343914, |
|
"grad_norm": 0.20268189907073975, |
|
"learning_rate": 0.00012588542599131013, |
|
"loss": 0.0279, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 8.941798941798941, |
|
"grad_norm": 0.21915365755558014, |
|
"learning_rate": 0.00012546260396026964, |
|
"loss": 0.0225, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 8.968253968253968, |
|
"grad_norm": 0.20165018737316132, |
|
"learning_rate": 0.00012503929458927912, |
|
"loss": 0.0229, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 8.994708994708995, |
|
"grad_norm": 0.33086565136909485, |
|
"learning_rate": 0.00012461550598024234, |
|
"loss": 0.0264, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 9.02116402116402, |
|
"grad_norm": 0.24809397757053375, |
|
"learning_rate": 0.00012419124624423548, |
|
"loss": 0.0242, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 9.047619047619047, |
|
"grad_norm": 0.24256815016269684, |
|
"learning_rate": 0.0001237665235013518, |
|
"loss": 0.0215, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 9.074074074074074, |
|
"grad_norm": 0.1913849264383316, |
|
"learning_rate": 0.00012334134588054625, |
|
"loss": 0.0237, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 9.100529100529101, |
|
"grad_norm": 0.34869614243507385, |
|
"learning_rate": 0.00012291572151947982, |
|
"loss": 0.0266, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 9.126984126984127, |
|
"grad_norm": 0.2579362690448761, |
|
"learning_rate": 0.00012248965856436394, |
|
"loss": 0.0239, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 9.153439153439153, |
|
"grad_norm": 0.27626147866249084, |
|
"learning_rate": 0.0001220631651698045, |
|
"loss": 0.027, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 9.17989417989418, |
|
"grad_norm": 0.2195921093225479, |
|
"learning_rate": 0.00012163624949864569, |
|
"loss": 0.0231, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 9.206349206349206, |
|
"grad_norm": 0.2434544414281845, |
|
"learning_rate": 0.00012120891972181387, |
|
"loss": 0.0262, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 9.232804232804233, |
|
"grad_norm": 0.31113576889038086, |
|
"learning_rate": 0.00012078118401816119, |
|
"loss": 0.0262, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 9.25925925925926, |
|
"grad_norm": 0.24409563839435577, |
|
"learning_rate": 0.00012035305057430888, |
|
"loss": 0.0263, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 9.285714285714286, |
|
"grad_norm": 0.2240586131811142, |
|
"learning_rate": 0.00011992452758449084, |
|
"loss": 0.0243, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 9.312169312169312, |
|
"grad_norm": 0.306912362575531, |
|
"learning_rate": 0.00011949562325039665, |
|
"loss": 0.0259, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 9.338624338624339, |
|
"grad_norm": 0.18710291385650635, |
|
"learning_rate": 0.00011906634578101447, |
|
"loss": 0.0252, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 9.365079365079366, |
|
"grad_norm": 0.2894376218318939, |
|
"learning_rate": 0.0001186367033924742, |
|
"loss": 0.0239, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 9.39153439153439, |
|
"grad_norm": 0.29621022939682007, |
|
"learning_rate": 0.00011820670430789004, |
|
"loss": 0.0237, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 9.417989417989418, |
|
"grad_norm": 0.2515980303287506, |
|
"learning_rate": 0.00011777635675720313, |
|
"loss": 0.0243, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 9.444444444444445, |
|
"grad_norm": 0.27100127935409546, |
|
"learning_rate": 0.00011734566897702412, |
|
"loss": 0.0239, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 9.470899470899472, |
|
"grad_norm": 0.3107968270778656, |
|
"learning_rate": 0.00011691464921047536, |
|
"loss": 0.0259, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 9.497354497354497, |
|
"grad_norm": 0.2154056280851364, |
|
"learning_rate": 0.00011648330570703331, |
|
"loss": 0.025, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 9.523809523809524, |
|
"grad_norm": 0.25029587745666504, |
|
"learning_rate": 0.00011605164672237059, |
|
"loss": 0.0253, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 9.55026455026455, |
|
"grad_norm": 0.3177552819252014, |
|
"learning_rate": 0.0001156196805181979, |
|
"loss": 0.0244, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 9.576719576719576, |
|
"grad_norm": 0.2663916051387787, |
|
"learning_rate": 0.00011518741536210594, |
|
"loss": 0.0231, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 9.603174603174603, |
|
"grad_norm": 0.2276095300912857, |
|
"learning_rate": 0.00011475485952740718, |
|
"loss": 0.0224, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 9.62962962962963, |
|
"grad_norm": 0.30287131667137146, |
|
"learning_rate": 0.0001143220212929776, |
|
"loss": 0.0248, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 9.656084656084657, |
|
"grad_norm": 0.2559273838996887, |
|
"learning_rate": 0.00011388890894309812, |
|
"loss": 0.0242, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 9.682539682539682, |
|
"grad_norm": 0.3561594486236572, |
|
"learning_rate": 0.000113455530767296, |
|
"loss": 0.0258, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.708994708994709, |
|
"grad_norm": 0.24624326825141907, |
|
"learning_rate": 0.0001130218950601863, |
|
"loss": 0.0258, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 9.735449735449736, |
|
"grad_norm": 0.1778760552406311, |
|
"learning_rate": 0.00011258801012131313, |
|
"loss": 0.0224, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.761904761904763, |
|
"grad_norm": 0.3034067153930664, |
|
"learning_rate": 0.00011215388425499069, |
|
"loss": 0.0247, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 9.788359788359788, |
|
"grad_norm": 0.1993577629327774, |
|
"learning_rate": 0.00011171952577014448, |
|
"loss": 0.0253, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 9.814814814814815, |
|
"grad_norm": 0.27741366624832153, |
|
"learning_rate": 0.00011128494298015208, |
|
"loss": 0.0222, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 9.841269841269842, |
|
"grad_norm": 0.2273164540529251, |
|
"learning_rate": 0.00011085014420268423, |
|
"loss": 0.0239, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 9.867724867724867, |
|
"grad_norm": 0.28204792737960815, |
|
"learning_rate": 0.00011041513775954549, |
|
"loss": 0.0229, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 9.894179894179894, |
|
"grad_norm": 0.18121418356895447, |
|
"learning_rate": 0.00010997993197651508, |
|
"loss": 0.024, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 9.920634920634921, |
|
"grad_norm": 0.20097821950912476, |
|
"learning_rate": 0.00010954453518318746, |
|
"loss": 0.0233, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 9.947089947089948, |
|
"grad_norm": 0.22093629837036133, |
|
"learning_rate": 0.00010910895571281286, |
|
"loss": 0.0227, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 9.973544973544973, |
|
"grad_norm": 0.19535768032073975, |
|
"learning_rate": 0.00010867320190213792, |
|
"loss": 0.0225, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.2424137443304062, |
|
"learning_rate": 0.00010823728209124603, |
|
"loss": 0.0212, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 10.026455026455027, |
|
"grad_norm": 0.23939844965934753, |
|
"learning_rate": 0.00010780120462339775, |
|
"loss": 0.0249, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 10.052910052910052, |
|
"grad_norm": 0.2456304281949997, |
|
"learning_rate": 0.00010736497784487104, |
|
"loss": 0.0228, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 10.079365079365079, |
|
"grad_norm": 0.26618778705596924, |
|
"learning_rate": 0.00010692861010480166, |
|
"loss": 0.0202, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 10.105820105820106, |
|
"grad_norm": 0.29209059476852417, |
|
"learning_rate": 0.00010649210975502324, |
|
"loss": 0.0217, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 10.132275132275133, |
|
"grad_norm": 0.21608638763427734, |
|
"learning_rate": 0.00010605548514990753, |
|
"loss": 0.022, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 10.158730158730158, |
|
"grad_norm": 0.21823786199092865, |
|
"learning_rate": 0.00010561874464620447, |
|
"loss": 0.0222, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 10.185185185185185, |
|
"grad_norm": 0.21151448786258698, |
|
"learning_rate": 0.00010518189660288209, |
|
"loss": 0.0232, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 10.211640211640212, |
|
"grad_norm": 0.20828621089458466, |
|
"learning_rate": 0.00010474494938096687, |
|
"loss": 0.0236, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 10.238095238095237, |
|
"grad_norm": 0.29258930683135986, |
|
"learning_rate": 0.0001043079113433834, |
|
"loss": 0.0242, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 10.264550264550264, |
|
"grad_norm": 0.23913761973381042, |
|
"learning_rate": 0.00010387079085479443, |
|
"loss": 0.0222, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 10.291005291005291, |
|
"grad_norm": 0.3311878740787506, |
|
"learning_rate": 0.00010343359628144082, |
|
"loss": 0.0235, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 10.317460317460318, |
|
"grad_norm": 0.1927531361579895, |
|
"learning_rate": 0.00010299633599098135, |
|
"loss": 0.0229, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 10.343915343915343, |
|
"grad_norm": 0.18032406270503998, |
|
"learning_rate": 0.00010255901835233254, |
|
"loss": 0.0233, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 10.37037037037037, |
|
"grad_norm": 0.2483786642551422, |
|
"learning_rate": 0.00010212165173550863, |
|
"loss": 0.0238, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 10.396825396825397, |
|
"grad_norm": 0.20934239029884338, |
|
"learning_rate": 0.00010168424451146119, |
|
"loss": 0.0222, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 10.423280423280424, |
|
"grad_norm": 0.2310916781425476, |
|
"learning_rate": 0.00010124680505191901, |
|
"loss": 0.0233, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 10.44973544973545, |
|
"grad_norm": 0.2244623601436615, |
|
"learning_rate": 0.00010080934172922784, |
|
"loss": 0.0242, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 10.476190476190476, |
|
"grad_norm": 0.19429625570774078, |
|
"learning_rate": 0.00010037186291619017, |
|
"loss": 0.0244, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 10.502645502645503, |
|
"grad_norm": 0.3608129918575287, |
|
"learning_rate": 9.9934376985905e-05, |
|
"loss": 0.0258, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 10.529100529100528, |
|
"grad_norm": 0.25932079553604126, |
|
"learning_rate": 9.94968923116075e-05, |
|
"loss": 0.0279, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 10.555555555555555, |
|
"grad_norm": 0.2552861273288727, |
|
"learning_rate": 9.90594172665088e-05, |
|
"loss": 0.0257, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 10.582010582010582, |
|
"grad_norm": 0.24272707104682922, |
|
"learning_rate": 9.86219602236357e-05, |
|
"loss": 0.0238, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 10.60846560846561, |
|
"grad_norm": 0.33407488465309143, |
|
"learning_rate": 9.818452955567063e-05, |
|
"loss": 0.0243, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 10.634920634920634, |
|
"grad_norm": 0.25362735986709595, |
|
"learning_rate": 9.774713363479099e-05, |
|
"loss": 0.0228, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 10.661375661375661, |
|
"grad_norm": 0.2759397029876709, |
|
"learning_rate": 9.730978083250923e-05, |
|
"loss": 0.021, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 10.687830687830688, |
|
"grad_norm": 0.21675390005111694, |
|
"learning_rate": 9.68724795195126e-05, |
|
"loss": 0.0213, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 10.714285714285714, |
|
"grad_norm": 0.23203110694885254, |
|
"learning_rate": 9.643523806550278e-05, |
|
"loss": 0.0206, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 10.74074074074074, |
|
"grad_norm": 0.20020541548728943, |
|
"learning_rate": 9.59980648390358e-05, |
|
"loss": 0.0233, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 10.767195767195767, |
|
"grad_norm": 0.26615190505981445, |
|
"learning_rate": 9.556096820736193e-05, |
|
"loss": 0.0214, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 10.793650793650794, |
|
"grad_norm": 0.22349591553211212, |
|
"learning_rate": 9.512395653626533e-05, |
|
"loss": 0.0241, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 10.82010582010582, |
|
"grad_norm": 0.2637469172477722, |
|
"learning_rate": 9.468703818990412e-05, |
|
"loss": 0.0221, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 10.846560846560847, |
|
"grad_norm": 0.21997188031673431, |
|
"learning_rate": 9.425022153065034e-05, |
|
"loss": 0.0217, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 10.873015873015873, |
|
"grad_norm": 0.2709382474422455, |
|
"learning_rate": 9.381351491892966e-05, |
|
"loss": 0.0238, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 10.899470899470899, |
|
"grad_norm": 0.29111942648887634, |
|
"learning_rate": 9.33769267130615e-05, |
|
"loss": 0.0222, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 10.925925925925926, |
|
"grad_norm": 0.2430836260318756, |
|
"learning_rate": 9.294046526909917e-05, |
|
"loss": 0.0203, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 10.952380952380953, |
|
"grad_norm": 0.29842931032180786, |
|
"learning_rate": 9.250413894066974e-05, |
|
"loss": 0.0263, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 10.97883597883598, |
|
"grad_norm": 0.21202361583709717, |
|
"learning_rate": 9.206795607881419e-05, |
|
"loss": 0.0206, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 11.005291005291005, |
|
"grad_norm": 0.2802582085132599, |
|
"learning_rate": 9.163192503182789e-05, |
|
"loss": 0.0239, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 11.031746031746032, |
|
"grad_norm": 0.176875039935112, |
|
"learning_rate": 9.119605414510021e-05, |
|
"loss": 0.0228, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 11.058201058201059, |
|
"grad_norm": 0.24896326661109924, |
|
"learning_rate": 9.076035176095533e-05, |
|
"loss": 0.0232, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 11.084656084656086, |
|
"grad_norm": 0.17039614915847778, |
|
"learning_rate": 9.032482621849244e-05, |
|
"loss": 0.0202, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 11.11111111111111, |
|
"grad_norm": 0.17327331006526947, |
|
"learning_rate": 8.988948585342597e-05, |
|
"loss": 0.0198, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 11.137566137566138, |
|
"grad_norm": 0.22939947247505188, |
|
"learning_rate": 8.945433899792614e-05, |
|
"loss": 0.0207, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 11.164021164021165, |
|
"grad_norm": 0.1739424169063568, |
|
"learning_rate": 8.901939398045963e-05, |
|
"loss": 0.0183, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 11.19047619047619, |
|
"grad_norm": 0.20427298545837402, |
|
"learning_rate": 8.858465912562991e-05, |
|
"loss": 0.0205, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 11.216931216931217, |
|
"grad_norm": 0.30859696865081787, |
|
"learning_rate": 8.81501427540182e-05, |
|
"loss": 0.0223, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 11.243386243386244, |
|
"grad_norm": 0.27141624689102173, |
|
"learning_rate": 8.771585318202397e-05, |
|
"loss": 0.0213, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 11.26984126984127, |
|
"grad_norm": 0.31934279203414917, |
|
"learning_rate": 8.728179872170587e-05, |
|
"loss": 0.0232, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 11.296296296296296, |
|
"grad_norm": 0.35330238938331604, |
|
"learning_rate": 8.684798768062273e-05, |
|
"loss": 0.0212, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 11.322751322751323, |
|
"grad_norm": 0.27975142002105713, |
|
"learning_rate": 8.64144283616744e-05, |
|
"loss": 0.0234, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 11.34920634920635, |
|
"grad_norm": 0.3430609107017517, |
|
"learning_rate": 8.598112906294287e-05, |
|
"loss": 0.0217, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 11.375661375661375, |
|
"grad_norm": 0.17652027308940887, |
|
"learning_rate": 8.554809807753364e-05, |
|
"loss": 0.023, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 11.402116402116402, |
|
"grad_norm": 0.24938417971134186, |
|
"learning_rate": 8.51153436934167e-05, |
|
"loss": 0.0236, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 11.428571428571429, |
|
"grad_norm": 0.3108935058116913, |
|
"learning_rate": 8.468287419326808e-05, |
|
"loss": 0.0196, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 11.455026455026456, |
|
"grad_norm": 0.17053309082984924, |
|
"learning_rate": 8.425069785431137e-05, |
|
"loss": 0.0202, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 11.481481481481481, |
|
"grad_norm": 0.21392136812210083, |
|
"learning_rate": 8.38188229481591e-05, |
|
"loss": 0.0247, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 11.507936507936508, |
|
"grad_norm": 0.27775999903678894, |
|
"learning_rate": 8.338725774065462e-05, |
|
"loss": 0.0209, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 11.534391534391535, |
|
"grad_norm": 0.277752161026001, |
|
"learning_rate": 8.295601049171384e-05, |
|
"loss": 0.0212, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 11.56084656084656, |
|
"grad_norm": 0.23016278445720673, |
|
"learning_rate": 8.252508945516704e-05, |
|
"loss": 0.0217, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 11.587301587301587, |
|
"grad_norm": 0.22993695735931396, |
|
"learning_rate": 8.209450287860103e-05, |
|
"loss": 0.0232, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 11.613756613756614, |
|
"grad_norm": 0.26726651191711426, |
|
"learning_rate": 8.166425900320126e-05, |
|
"loss": 0.0235, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 11.640211640211641, |
|
"grad_norm": 0.24999497830867767, |
|
"learning_rate": 8.123436606359403e-05, |
|
"loss": 0.0236, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 11.666666666666666, |
|
"grad_norm": 0.20490936934947968, |
|
"learning_rate": 8.080483228768892e-05, |
|
"loss": 0.0224, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 11.693121693121693, |
|
"grad_norm": 0.1914476752281189, |
|
"learning_rate": 8.037566589652141e-05, |
|
"loss": 0.021, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 11.71957671957672, |
|
"grad_norm": 0.19754275679588318, |
|
"learning_rate": 7.994687510409536e-05, |
|
"loss": 0.0212, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 11.746031746031747, |
|
"grad_norm": 0.19561535120010376, |
|
"learning_rate": 7.951846811722583e-05, |
|
"loss": 0.021, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 11.772486772486772, |
|
"grad_norm": 0.28997474908828735, |
|
"learning_rate": 7.909045313538222e-05, |
|
"loss": 0.0204, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 11.798941798941799, |
|
"grad_norm": 0.18335215747356415, |
|
"learning_rate": 7.8662838350531e-05, |
|
"loss": 0.0205, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 11.825396825396826, |
|
"grad_norm": 0.2745349109172821, |
|
"learning_rate": 7.823563194697919e-05, |
|
"loss": 0.0206, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 11.851851851851851, |
|
"grad_norm": 0.2598635256290436, |
|
"learning_rate": 7.780884210121767e-05, |
|
"loss": 0.0238, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 11.878306878306878, |
|
"grad_norm": 0.24171608686447144, |
|
"learning_rate": 7.73824769817645e-05, |
|
"loss": 0.0223, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 11.904761904761905, |
|
"grad_norm": 0.19994641840457916, |
|
"learning_rate": 7.695654474900875e-05, |
|
"loss": 0.0214, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 11.93121693121693, |
|
"grad_norm": 0.22974646091461182, |
|
"learning_rate": 7.653105355505442e-05, |
|
"loss": 0.0201, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 11.957671957671957, |
|
"grad_norm": 0.24392633140087128, |
|
"learning_rate": 7.610601154356413e-05, |
|
"loss": 0.0189, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 11.984126984126984, |
|
"grad_norm": 0.22622373700141907, |
|
"learning_rate": 7.568142684960342e-05, |
|
"loss": 0.0205, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 12.010582010582011, |
|
"grad_norm": 0.21608489751815796, |
|
"learning_rate": 7.525730759948508e-05, |
|
"loss": 0.0213, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 12.037037037037036, |
|
"grad_norm": 0.20682625472545624, |
|
"learning_rate": 7.483366191061354e-05, |
|
"loss": 0.0202, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 12.063492063492063, |
|
"grad_norm": 0.22226907312870026, |
|
"learning_rate": 7.441049789132948e-05, |
|
"loss": 0.0222, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 12.08994708994709, |
|
"grad_norm": 0.3071605861186981, |
|
"learning_rate": 7.398782364075485e-05, |
|
"loss": 0.019, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 12.116402116402117, |
|
"grad_norm": 0.22628101706504822, |
|
"learning_rate": 7.35656472486375e-05, |
|
"loss": 0.0229, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 12.142857142857142, |
|
"grad_norm": 0.24755476415157318, |
|
"learning_rate": 7.314397679519673e-05, |
|
"loss": 0.0231, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 12.16931216931217, |
|
"grad_norm": 0.3300286531448364, |
|
"learning_rate": 7.272282035096831e-05, |
|
"loss": 0.0212, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 12.195767195767196, |
|
"grad_norm": 0.24590083956718445, |
|
"learning_rate": 7.230218597665024e-05, |
|
"loss": 0.0212, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 12.222222222222221, |
|
"grad_norm": 0.21087048947811127, |
|
"learning_rate": 7.188208172294841e-05, |
|
"loss": 0.0202, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 12.248677248677248, |
|
"grad_norm": 0.18404772877693176, |
|
"learning_rate": 7.146251563042246e-05, |
|
"loss": 0.0225, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 12.275132275132275, |
|
"grad_norm": 0.22472181916236877, |
|
"learning_rate": 7.104349572933187e-05, |
|
"loss": 0.0207, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 12.301587301587302, |
|
"grad_norm": 0.17883093655109406, |
|
"learning_rate": 7.06250300394825e-05, |
|
"loss": 0.0203, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 12.328042328042327, |
|
"grad_norm": 0.23257587850093842, |
|
"learning_rate": 7.020712657007276e-05, |
|
"loss": 0.0199, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 12.354497354497354, |
|
"grad_norm": 0.30548185110092163, |
|
"learning_rate": 6.97897933195405e-05, |
|
"loss": 0.022, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 12.380952380952381, |
|
"grad_norm": 0.2183784395456314, |
|
"learning_rate": 6.937303827540996e-05, |
|
"loss": 0.0211, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 12.407407407407407, |
|
"grad_norm": 0.22535958886146545, |
|
"learning_rate": 6.89568694141388e-05, |
|
"loss": 0.0233, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 12.433862433862434, |
|
"grad_norm": 0.2497279942035675, |
|
"learning_rate": 6.854129470096539e-05, |
|
"loss": 0.019, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 12.46031746031746, |
|
"grad_norm": 0.2837906777858734, |
|
"learning_rate": 6.812632208975667e-05, |
|
"loss": 0.0197, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 12.486772486772487, |
|
"grad_norm": 0.1884712129831314, |
|
"learning_rate": 6.77119595228554e-05, |
|
"loss": 0.022, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 12.513227513227513, |
|
"grad_norm": 0.23100075125694275, |
|
"learning_rate": 6.72982149309286e-05, |
|
"loss": 0.0206, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 12.53968253968254, |
|
"grad_norm": 0.2137850522994995, |
|
"learning_rate": 6.688509623281559e-05, |
|
"loss": 0.0208, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 12.566137566137566, |
|
"grad_norm": 0.2913530766963959, |
|
"learning_rate": 6.647261133537642e-05, |
|
"loss": 0.0212, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 12.592592592592592, |
|
"grad_norm": 0.21118688583374023, |
|
"learning_rate": 6.606076813334046e-05, |
|
"loss": 0.0199, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 12.619047619047619, |
|
"grad_norm": 0.20391783118247986, |
|
"learning_rate": 6.564957450915556e-05, |
|
"loss": 0.0217, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 12.645502645502646, |
|
"grad_norm": 0.3074709177017212, |
|
"learning_rate": 6.523903833283688e-05, |
|
"loss": 0.023, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 12.671957671957673, |
|
"grad_norm": 0.22795026004314423, |
|
"learning_rate": 6.48291674618164e-05, |
|
"loss": 0.022, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 12.698412698412698, |
|
"grad_norm": 0.26107490062713623, |
|
"learning_rate": 6.441996974079264e-05, |
|
"loss": 0.0204, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 12.724867724867725, |
|
"grad_norm": 0.33507269620895386, |
|
"learning_rate": 6.401145300158033e-05, |
|
"loss": 0.0238, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 12.751322751322752, |
|
"grad_norm": 0.27531009912490845, |
|
"learning_rate": 6.360362506296052e-05, |
|
"loss": 0.0214, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 12.777777777777779, |
|
"grad_norm": 0.18488064408302307, |
|
"learning_rate": 6.319649373053116e-05, |
|
"loss": 0.0216, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 12.804232804232804, |
|
"grad_norm": 0.23100340366363525, |
|
"learning_rate": 6.279006679655746e-05, |
|
"loss": 0.0203, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 12.83068783068783, |
|
"grad_norm": 0.19215638935565948, |
|
"learning_rate": 6.238435203982278e-05, |
|
"loss": 0.0202, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 12.857142857142858, |
|
"grad_norm": 0.20781166851520538, |
|
"learning_rate": 6.197935722547996e-05, |
|
"loss": 0.0214, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 12.883597883597883, |
|
"grad_norm": 0.27218201756477356, |
|
"learning_rate": 6.157509010490243e-05, |
|
"loss": 0.022, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 12.91005291005291, |
|
"grad_norm": 0.2349989116191864, |
|
"learning_rate": 6.117155841553594e-05, |
|
"loss": 0.0194, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 12.936507936507937, |
|
"grad_norm": 0.22253037989139557, |
|
"learning_rate": 6.076876988075064e-05, |
|
"loss": 0.0199, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 12.962962962962964, |
|
"grad_norm": 0.2949942350387573, |
|
"learning_rate": 6.0366732209693035e-05, |
|
"loss": 0.0206, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 12.989417989417989, |
|
"grad_norm": 0.23691920936107635, |
|
"learning_rate": 5.9965453097138455e-05, |
|
"loss": 0.0192, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 13.015873015873016, |
|
"grad_norm": 0.22252142429351807, |
|
"learning_rate": 5.9564940223344e-05, |
|
"loss": 0.0241, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 13.042328042328043, |
|
"grad_norm": 0.31423982977867126, |
|
"learning_rate": 5.916520125390124e-05, |
|
"loss": 0.0219, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 13.068783068783068, |
|
"grad_norm": 0.29873350262641907, |
|
"learning_rate": 5.8766243839589755e-05, |
|
"loss": 0.0193, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 13.095238095238095, |
|
"grad_norm": 0.16471320390701294, |
|
"learning_rate": 5.83680756162305e-05, |
|
"loss": 0.0213, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 13.121693121693122, |
|
"grad_norm": 0.22079427540302277, |
|
"learning_rate": 5.797070420453989e-05, |
|
"loss": 0.0216, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 13.148148148148149, |
|
"grad_norm": 0.34240761399269104, |
|
"learning_rate": 5.7574137209983706e-05, |
|
"loss": 0.0207, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 13.174603174603174, |
|
"grad_norm": 0.24463868141174316, |
|
"learning_rate": 5.717838222263165e-05, |
|
"loss": 0.022, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 13.201058201058201, |
|
"grad_norm": 0.27385976910591125, |
|
"learning_rate": 5.678344681701206e-05, |
|
"loss": 0.0184, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 13.227513227513228, |
|
"grad_norm": 0.18755285441875458, |
|
"learning_rate": 5.6389338551967044e-05, |
|
"loss": 0.021, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 13.253968253968253, |
|
"grad_norm": 0.2744396924972534, |
|
"learning_rate": 5.5996064970507555e-05, |
|
"loss": 0.0208, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 13.28042328042328, |
|
"grad_norm": 0.24906226992607117, |
|
"learning_rate": 5.56036335996692e-05, |
|
"loss": 0.0206, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 13.306878306878307, |
|
"grad_norm": 0.2173585295677185, |
|
"learning_rate": 5.5212051950368296e-05, |
|
"loss": 0.0192, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 13.333333333333334, |
|
"grad_norm": 0.2265741527080536, |
|
"learning_rate": 5.4821327517257756e-05, |
|
"loss": 0.0188, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 13.359788359788359, |
|
"grad_norm": 0.1848832368850708, |
|
"learning_rate": 5.4431467778584e-05, |
|
"loss": 0.0186, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 13.386243386243386, |
|
"grad_norm": 0.18450133502483368, |
|
"learning_rate": 5.404248019604361e-05, |
|
"loss": 0.0193, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 13.412698412698413, |
|
"grad_norm": 0.1744503378868103, |
|
"learning_rate": 5.3654372214640625e-05, |
|
"loss": 0.018, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 13.43915343915344, |
|
"grad_norm": 0.23882748186588287, |
|
"learning_rate": 5.326715126254396e-05, |
|
"loss": 0.02, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 13.465608465608465, |
|
"grad_norm": 0.23424524068832397, |
|
"learning_rate": 5.2880824750945404e-05, |
|
"loss": 0.0207, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 13.492063492063492, |
|
"grad_norm": 0.3038886487483978, |
|
"learning_rate": 5.249540007391752e-05, |
|
"loss": 0.0198, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 13.518518518518519, |
|
"grad_norm": 0.180315300822258, |
|
"learning_rate": 5.2110884608272315e-05, |
|
"loss": 0.0184, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 13.544973544973544, |
|
"grad_norm": 0.2620660662651062, |
|
"learning_rate": 5.172728571342008e-05, |
|
"loss": 0.0197, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 13.571428571428571, |
|
"grad_norm": 0.24521809816360474, |
|
"learning_rate": 5.1344610731228334e-05, |
|
"loss": 0.0198, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 13.597883597883598, |
|
"grad_norm": 0.2860581874847412, |
|
"learning_rate": 5.0962866985881495e-05, |
|
"loss": 0.0205, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 13.624338624338625, |
|
"grad_norm": 0.20572273433208466, |
|
"learning_rate": 5.05820617837406e-05, |
|
"loss": 0.0199, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 13.65079365079365, |
|
"grad_norm": 0.27270907163619995, |
|
"learning_rate": 5.0202202413203506e-05, |
|
"loss": 0.0213, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 13.677248677248677, |
|
"grad_norm": 0.16998854279518127, |
|
"learning_rate": 4.9823296144565346e-05, |
|
"loss": 0.0169, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 13.703703703703704, |
|
"grad_norm": 0.25765907764434814, |
|
"learning_rate": 4.944535022987954e-05, |
|
"loss": 0.0186, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 13.73015873015873, |
|
"grad_norm": 0.2524023652076721, |
|
"learning_rate": 4.906837190281875e-05, |
|
"loss": 0.0199, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 13.756613756613756, |
|
"grad_norm": 0.20784050226211548, |
|
"learning_rate": 4.8692368378536545e-05, |
|
"loss": 0.0211, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 13.783068783068783, |
|
"grad_norm": 0.2766719162464142, |
|
"learning_rate": 4.8317346853529465e-05, |
|
"loss": 0.019, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 13.80952380952381, |
|
"grad_norm": 0.1722410023212433, |
|
"learning_rate": 4.7943314505498974e-05, |
|
"loss": 0.0199, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 13.835978835978835, |
|
"grad_norm": 0.3369690179824829, |
|
"learning_rate": 4.757027849321427e-05, |
|
"loss": 0.0184, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 13.862433862433862, |
|
"grad_norm": 0.20445048809051514, |
|
"learning_rate": 4.7198245956375395e-05, |
|
"loss": 0.0192, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 13.88888888888889, |
|
"grad_norm": 0.2391098439693451, |
|
"learning_rate": 4.682722401547617e-05, |
|
"loss": 0.0175, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 13.915343915343914, |
|
"grad_norm": 0.23185895383358002, |
|
"learning_rate": 4.6457219771668426e-05, |
|
"loss": 0.0204, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 13.941798941798941, |
|
"grad_norm": 0.19505229592323303, |
|
"learning_rate": 4.608824030662571e-05, |
|
"loss": 0.0192, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 13.968253968253968, |
|
"grad_norm": 0.3211570382118225, |
|
"learning_rate": 4.5720292682407874e-05, |
|
"loss": 0.0179, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 13.994708994708995, |
|
"grad_norm": 0.24055220186710358, |
|
"learning_rate": 4.535338394132602e-05, |
|
"loss": 0.0205, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 14.02116402116402, |
|
"grad_norm": 0.24234339594841003, |
|
"learning_rate": 4.498752110580752e-05, |
|
"loss": 0.0192, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 14.047619047619047, |
|
"grad_norm": 0.317220002412796, |
|
"learning_rate": 4.4622711178261654e-05, |
|
"loss": 0.019, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 14.074074074074074, |
|
"grad_norm": 0.24808919429779053, |
|
"learning_rate": 4.425896114094581e-05, |
|
"loss": 0.0181, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 14.100529100529101, |
|
"grad_norm": 0.21401309967041016, |
|
"learning_rate": 4.38962779558315e-05, |
|
"loss": 0.0191, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 14.126984126984127, |
|
"grad_norm": 0.1712522953748703, |
|
"learning_rate": 4.353466856447135e-05, |
|
"loss": 0.0176, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 14.153439153439153, |
|
"grad_norm": 0.1504759043455124, |
|
"learning_rate": 4.317413988786618e-05, |
|
"loss": 0.0182, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 14.17989417989418, |
|
"grad_norm": 0.2544901669025421, |
|
"learning_rate": 4.281469882633254e-05, |
|
"loss": 0.0201, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 14.206349206349206, |
|
"grad_norm": 0.17256955802440643, |
|
"learning_rate": 4.2456352259370566e-05, |
|
"loss": 0.0176, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 14.232804232804233, |
|
"grad_norm": 0.2197188436985016, |
|
"learning_rate": 4.209910704553254e-05, |
|
"loss": 0.0173, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 14.25925925925926, |
|
"grad_norm": 0.18674366176128387, |
|
"learning_rate": 4.174297002229132e-05, |
|
"loss": 0.0191, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 14.285714285714286, |
|
"grad_norm": 0.19098690152168274, |
|
"learning_rate": 4.138794800590963e-05, |
|
"loss": 0.0186, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 14.312169312169312, |
|
"grad_norm": 0.3053850531578064, |
|
"learning_rate": 4.103404779130968e-05, |
|
"loss": 0.0195, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 14.338624338624339, |
|
"grad_norm": 0.2690741717815399, |
|
"learning_rate": 4.0681276151942946e-05, |
|
"loss": 0.0196, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 14.365079365079366, |
|
"grad_norm": 0.18734146654605865, |
|
"learning_rate": 4.032963983966064e-05, |
|
"loss": 0.0171, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 14.39153439153439, |
|
"grad_norm": 0.19453099370002747, |
|
"learning_rate": 3.997914558458441e-05, |
|
"loss": 0.0182, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 14.417989417989418, |
|
"grad_norm": 0.14661167562007904, |
|
"learning_rate": 3.962980009497762e-05, |
|
"loss": 0.0166, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 14.444444444444445, |
|
"grad_norm": 0.23112380504608154, |
|
"learning_rate": 3.928161005711685e-05, |
|
"loss": 0.0186, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 14.470899470899472, |
|
"grad_norm": 0.19269657135009766, |
|
"learning_rate": 3.8934582135164065e-05, |
|
"loss": 0.02, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 14.497354497354497, |
|
"grad_norm": 0.2662561237812042, |
|
"learning_rate": 3.858872297103893e-05, |
|
"loss": 0.0177, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 14.523809523809524, |
|
"grad_norm": 0.16409893333911896, |
|
"learning_rate": 3.8244039184291705e-05, |
|
"loss": 0.0193, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 14.55026455026455, |
|
"grad_norm": 0.2753513753414154, |
|
"learning_rate": 3.790053737197668e-05, |
|
"loss": 0.0202, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 14.576719576719576, |
|
"grad_norm": 0.2297661155462265, |
|
"learning_rate": 3.7558224108525766e-05, |
|
"loss": 0.0172, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 14.603174603174603, |
|
"grad_norm": 0.17383983731269836, |
|
"learning_rate": 3.72171059456227e-05, |
|
"loss": 0.0195, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 14.62962962962963, |
|
"grad_norm": 0.16098396480083466, |
|
"learning_rate": 3.687718941207767e-05, |
|
"loss": 0.0167, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 14.656084656084657, |
|
"grad_norm": 0.26572316884994507, |
|
"learning_rate": 3.653848101370239e-05, |
|
"loss": 0.0189, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 14.682539682539682, |
|
"grad_norm": 0.19508256018161774, |
|
"learning_rate": 3.6200987233185466e-05, |
|
"loss": 0.02, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 14.708994708994709, |
|
"grad_norm": 0.2050306499004364, |
|
"learning_rate": 3.586471452996853e-05, |
|
"loss": 0.0184, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 14.735449735449736, |
|
"grad_norm": 0.2767617702484131, |
|
"learning_rate": 3.5529669340122375e-05, |
|
"loss": 0.0197, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 14.761904761904763, |
|
"grad_norm": 0.21244259178638458, |
|
"learning_rate": 3.5195858076223856e-05, |
|
"loss": 0.0222, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 14.788359788359788, |
|
"grad_norm": 0.20567195117473602, |
|
"learning_rate": 3.48632871272333e-05, |
|
"loss": 0.0177, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 14.814814814814815, |
|
"grad_norm": 0.1751723289489746, |
|
"learning_rate": 3.4531962858372005e-05, |
|
"loss": 0.0207, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 14.841269841269842, |
|
"grad_norm": 0.23504501581192017, |
|
"learning_rate": 3.4201891611000526e-05, |
|
"loss": 0.0198, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 14.867724867724867, |
|
"grad_norm": 0.1576506644487381, |
|
"learning_rate": 3.38730797024973e-05, |
|
"loss": 0.0186, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 14.894179894179894, |
|
"grad_norm": 0.2593693435192108, |
|
"learning_rate": 3.35455334261377e-05, |
|
"loss": 0.0198, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 14.920634920634921, |
|
"grad_norm": 0.17774498462677002, |
|
"learning_rate": 3.3219259050973694e-05, |
|
"loss": 0.0172, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 14.947089947089948, |
|
"grad_norm": 0.186201810836792, |
|
"learning_rate": 3.2894262821713686e-05, |
|
"loss": 0.0173, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 14.973544973544973, |
|
"grad_norm": 0.1857055425643921, |
|
"learning_rate": 3.257055095860306e-05, |
|
"loss": 0.0162, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 0.1835918426513672, |
|
"learning_rate": 3.224812965730531e-05, |
|
"loss": 0.0178, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 15.026455026455027, |
|
"grad_norm": 0.22426722943782806, |
|
"learning_rate": 3.192700508878315e-05, |
|
"loss": 0.0184, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 15.052910052910052, |
|
"grad_norm": 0.19990061223506927, |
|
"learning_rate": 3.16071833991806e-05, |
|
"loss": 0.0187, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 15.079365079365079, |
|
"grad_norm": 0.2128632515668869, |
|
"learning_rate": 3.12886707097053e-05, |
|
"loss": 0.0152, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 15.105820105820106, |
|
"grad_norm": 0.18279944360256195, |
|
"learning_rate": 3.0971473116511394e-05, |
|
"loss": 0.0196, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 15.132275132275133, |
|
"grad_norm": 0.21548713743686676, |
|
"learning_rate": 3.0655596690582764e-05, |
|
"loss": 0.018, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 15.158730158730158, |
|
"grad_norm": 0.3082822263240814, |
|
"learning_rate": 3.0341047477617012e-05, |
|
"loss": 0.0209, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 15.185185185185185, |
|
"grad_norm": 0.15852923691272736, |
|
"learning_rate": 3.0027831497909486e-05, |
|
"loss": 0.0183, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 15.211640211640212, |
|
"grad_norm": 0.17777982354164124, |
|
"learning_rate": 2.9715954746238238e-05, |
|
"loss": 0.0171, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 15.238095238095237, |
|
"grad_norm": 0.14747212827205658, |
|
"learning_rate": 2.9405423191749338e-05, |
|
"loss": 0.0185, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 15.264550264550264, |
|
"grad_norm": 0.2689704895019531, |
|
"learning_rate": 2.909624277784243e-05, |
|
"loss": 0.018, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 15.291005291005291, |
|
"grad_norm": 0.18944290280342102, |
|
"learning_rate": 2.878841942205709e-05, |
|
"loss": 0.0172, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 15.317460317460318, |
|
"grad_norm": 0.14912503957748413, |
|
"learning_rate": 2.848195901595969e-05, |
|
"loss": 0.0165, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 15.343915343915343, |
|
"grad_norm": 0.1962699443101883, |
|
"learning_rate": 2.817686742503033e-05, |
|
"loss": 0.018, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 15.37037037037037, |
|
"grad_norm": 0.17675501108169556, |
|
"learning_rate": 2.7873150488550826e-05, |
|
"loss": 0.0173, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 15.396825396825397, |
|
"grad_norm": 0.22969458997249603, |
|
"learning_rate": 2.7570814019492964e-05, |
|
"loss": 0.0176, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 15.423280423280424, |
|
"grad_norm": 0.15065893530845642, |
|
"learning_rate": 2.7269863804407058e-05, |
|
"loss": 0.0162, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 15.44973544973545, |
|
"grad_norm": 0.19528082013130188, |
|
"learning_rate": 2.6970305603311297e-05, |
|
"loss": 0.0166, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 15.476190476190476, |
|
"grad_norm": 0.236960306763649, |
|
"learning_rate": 2.6672145149581605e-05, |
|
"loss": 0.018, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 15.502645502645503, |
|
"grad_norm": 0.22975318133831024, |
|
"learning_rate": 2.6375388149841696e-05, |
|
"loss": 0.0192, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 15.529100529100528, |
|
"grad_norm": 0.17225511372089386, |
|
"learning_rate": 2.608004028385398e-05, |
|
"loss": 0.0162, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 15.555555555555555, |
|
"grad_norm": 0.1448196917772293, |
|
"learning_rate": 2.5786107204410913e-05, |
|
"loss": 0.0167, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 15.582010582010582, |
|
"grad_norm": 0.13869361579418182, |
|
"learning_rate": 2.5493594537226682e-05, |
|
"loss": 0.016, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 15.60846560846561, |
|
"grad_norm": 0.1881704032421112, |
|
"learning_rate": 2.5202507880829484e-05, |
|
"loss": 0.0188, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 15.634920634920634, |
|
"grad_norm": 0.2088797241449356, |
|
"learning_rate": 2.4912852806454667e-05, |
|
"loss": 0.0182, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 15.661375661375661, |
|
"grad_norm": 0.18913866579532623, |
|
"learning_rate": 2.4624634857937755e-05, |
|
"loss": 0.0173, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 15.687830687830688, |
|
"grad_norm": 0.13622598350048065, |
|
"learning_rate": 2.433785955160851e-05, |
|
"loss": 0.0188, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 15.714285714285714, |
|
"grad_norm": 0.16862928867340088, |
|
"learning_rate": 2.405253237618541e-05, |
|
"loss": 0.0179, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 15.74074074074074, |
|
"grad_norm": 0.23106427490711212, |
|
"learning_rate": 2.3768658792670407e-05, |
|
"loss": 0.0195, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 15.767195767195767, |
|
"grad_norm": 0.2322971224784851, |
|
"learning_rate": 2.3486244234244637e-05, |
|
"loss": 0.0177, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 15.793650793650794, |
|
"grad_norm": 0.18966665863990784, |
|
"learning_rate": 2.3205294106164223e-05, |
|
"loss": 0.0176, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 15.82010582010582, |
|
"grad_norm": 0.20752005279064178, |
|
"learning_rate": 2.2925813785656945e-05, |
|
"loss": 0.0173, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 15.846560846560847, |
|
"grad_norm": 0.14546965062618256, |
|
"learning_rate": 2.264780862181931e-05, |
|
"loss": 0.0182, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 15.873015873015873, |
|
"grad_norm": 0.27061018347740173, |
|
"learning_rate": 2.2371283935514108e-05, |
|
"loss": 0.017, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 15.899470899470899, |
|
"grad_norm": 0.2078131139278412, |
|
"learning_rate": 2.2096245019268634e-05, |
|
"loss": 0.0171, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 15.925925925925926, |
|
"grad_norm": 0.21537968516349792, |
|
"learning_rate": 2.1822697137173444e-05, |
|
"loss": 0.0179, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 15.952380952380953, |
|
"grad_norm": 0.2436002641916275, |
|
"learning_rate": 2.1550645524781464e-05, |
|
"loss": 0.0174, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 15.97883597883598, |
|
"grad_norm": 0.23792731761932373, |
|
"learning_rate": 2.1280095389007836e-05, |
|
"loss": 0.0184, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 16.005291005291006, |
|
"grad_norm": 0.19010236859321594, |
|
"learning_rate": 2.1011051908030387e-05, |
|
"loss": 0.0167, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 16.03174603174603, |
|
"grad_norm": 0.325145959854126, |
|
"learning_rate": 2.074352023119034e-05, |
|
"loss": 0.0168, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 16.058201058201057, |
|
"grad_norm": 0.20572441816329956, |
|
"learning_rate": 2.0477505478893823e-05, |
|
"loss": 0.0162, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 16.084656084656086, |
|
"grad_norm": 0.15513984858989716, |
|
"learning_rate": 2.0213012742513927e-05, |
|
"loss": 0.0211, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 16.11111111111111, |
|
"grad_norm": 0.18062983453273773, |
|
"learning_rate": 1.9950047084293187e-05, |
|
"loss": 0.0171, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 16.137566137566136, |
|
"grad_norm": 0.20299988985061646, |
|
"learning_rate": 1.968861353724668e-05, |
|
"loss": 0.0173, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 16.164021164021165, |
|
"grad_norm": 0.23924869298934937, |
|
"learning_rate": 1.9428717105065864e-05, |
|
"loss": 0.0176, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 16.19047619047619, |
|
"grad_norm": 0.24165087938308716, |
|
"learning_rate": 1.917036276202253e-05, |
|
"loss": 0.0195, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 16.21693121693122, |
|
"grad_norm": 0.1869451254606247, |
|
"learning_rate": 1.891355545287382e-05, |
|
"loss": 0.0185, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 16.243386243386244, |
|
"grad_norm": 0.19750215113162994, |
|
"learning_rate": 1.8658300092767544e-05, |
|
"loss": 0.0174, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 16.26984126984127, |
|
"grad_norm": 0.2032746970653534, |
|
"learning_rate": 1.8404601567148026e-05, |
|
"loss": 0.0177, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 16.296296296296298, |
|
"grad_norm": 0.18800148367881775, |
|
"learning_rate": 1.8152464731662666e-05, |
|
"loss": 0.0166, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 16.322751322751323, |
|
"grad_norm": 0.16922627389431, |
|
"learning_rate": 1.790189441206899e-05, |
|
"loss": 0.0158, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 16.349206349206348, |
|
"grad_norm": 0.21169210970401764, |
|
"learning_rate": 1.76528954041423e-05, |
|
"loss": 0.0167, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 16.375661375661377, |
|
"grad_norm": 0.24106602370738983, |
|
"learning_rate": 1.740547247358384e-05, |
|
"loss": 0.0157, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 16.402116402116402, |
|
"grad_norm": 0.20507925748825073, |
|
"learning_rate": 1.715963035592969e-05, |
|
"loss": 0.0179, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 16.428571428571427, |
|
"grad_norm": 0.18841566145420074, |
|
"learning_rate": 1.6915373756460006e-05, |
|
"loss": 0.0179, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 16.455026455026456, |
|
"grad_norm": 0.20179235935211182, |
|
"learning_rate": 1.6672707350108973e-05, |
|
"loss": 0.0168, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 16.48148148148148, |
|
"grad_norm": 0.1482539176940918, |
|
"learning_rate": 1.64316357813755e-05, |
|
"loss": 0.0149, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 16.507936507936506, |
|
"grad_norm": 0.23648443818092346, |
|
"learning_rate": 1.6192163664234082e-05, |
|
"loss": 0.0193, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 16.534391534391535, |
|
"grad_norm": 0.1912665218114853, |
|
"learning_rate": 1.5954295582046642e-05, |
|
"loss": 0.0159, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 16.56084656084656, |
|
"grad_norm": 0.20422996580600739, |
|
"learning_rate": 1.5718036087474796e-05, |
|
"loss": 0.0171, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 16.58730158730159, |
|
"grad_norm": 0.13860765099525452, |
|
"learning_rate": 1.5483389702392657e-05, |
|
"loss": 0.0164, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 16.613756613756614, |
|
"grad_norm": 0.20141619443893433, |
|
"learning_rate": 1.5250360917800356e-05, |
|
"loss": 0.0163, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 16.64021164021164, |
|
"grad_norm": 0.2113092839717865, |
|
"learning_rate": 1.5018954193738077e-05, |
|
"loss": 0.0176, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 16.666666666666668, |
|
"grad_norm": 0.23735524713993073, |
|
"learning_rate": 1.478917395920062e-05, |
|
"loss": 0.0157, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 16.693121693121693, |
|
"grad_norm": 0.1875062733888626, |
|
"learning_rate": 1.4561024612052754e-05, |
|
"loss": 0.0161, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 16.719576719576718, |
|
"grad_norm": 0.18014168739318848, |
|
"learning_rate": 1.4334510518944932e-05, |
|
"loss": 0.0162, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 16.746031746031747, |
|
"grad_norm": 0.23076355457305908, |
|
"learning_rate": 1.4109636015229766e-05, |
|
"loss": 0.0177, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 16.772486772486772, |
|
"grad_norm": 0.21106301248073578, |
|
"learning_rate": 1.3886405404879055e-05, |
|
"loss": 0.0165, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 16.798941798941797, |
|
"grad_norm": 0.24004612863063812, |
|
"learning_rate": 1.3664822960401413e-05, |
|
"loss": 0.0174, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 16.825396825396826, |
|
"grad_norm": 0.17413417994976044, |
|
"learning_rate": 1.3444892922760444e-05, |
|
"loss": 0.0143, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 16.85185185185185, |
|
"grad_norm": 0.1467863917350769, |
|
"learning_rate": 1.3226619501293692e-05, |
|
"loss": 0.0166, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 16.87830687830688, |
|
"grad_norm": 0.17535103857517242, |
|
"learning_rate": 1.3010006873631919e-05, |
|
"loss": 0.0162, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 16.904761904761905, |
|
"grad_norm": 0.16379229724407196, |
|
"learning_rate": 1.2795059185619229e-05, |
|
"loss": 0.0167, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 16.93121693121693, |
|
"grad_norm": 0.17413221299648285, |
|
"learning_rate": 1.2581780551233801e-05, |
|
"loss": 0.0179, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 16.95767195767196, |
|
"grad_norm": 0.2470989227294922, |
|
"learning_rate": 1.237017505250897e-05, |
|
"loss": 0.0179, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 16.984126984126984, |
|
"grad_norm": 0.16806478798389435, |
|
"learning_rate": 1.2160246739455206e-05, |
|
"loss": 0.0193, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 17.01058201058201, |
|
"grad_norm": 0.19091880321502686, |
|
"learning_rate": 1.195199962998268e-05, |
|
"loss": 0.0158, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 17.037037037037038, |
|
"grad_norm": 0.20383872091770172, |
|
"learning_rate": 1.1745437709824114e-05, |
|
"loss": 0.0161, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 17.063492063492063, |
|
"grad_norm": 0.15551620721817017, |
|
"learning_rate": 1.1540564932458753e-05, |
|
"loss": 0.0155, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 17.08994708994709, |
|
"grad_norm": 0.2658255100250244, |
|
"learning_rate": 1.1337385219036623e-05, |
|
"loss": 0.0176, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 17.116402116402117, |
|
"grad_norm": 0.2648050785064697, |
|
"learning_rate": 1.1135902458303393e-05, |
|
"loss": 0.0152, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 17.142857142857142, |
|
"grad_norm": 0.17353011667728424, |
|
"learning_rate": 1.0936120506525994e-05, |
|
"loss": 0.0151, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 17.16931216931217, |
|
"grad_norm": 0.1721569001674652, |
|
"learning_rate": 1.0738043187418922e-05, |
|
"loss": 0.0193, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 17.195767195767196, |
|
"grad_norm": 0.2267681360244751, |
|
"learning_rate": 1.0541674292070868e-05, |
|
"loss": 0.0173, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 17.22222222222222, |
|
"grad_norm": 0.28807544708251953, |
|
"learning_rate": 1.0347017578872276e-05, |
|
"loss": 0.0181, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 17.24867724867725, |
|
"grad_norm": 0.1608646959066391, |
|
"learning_rate": 1.0154076773443432e-05, |
|
"loss": 0.0166, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 17.275132275132275, |
|
"grad_norm": 0.26136720180511475, |
|
"learning_rate": 9.962855568563067e-06, |
|
"loss": 0.0166, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 17.3015873015873, |
|
"grad_norm": 0.17855539917945862, |
|
"learning_rate": 9.773357624097678e-06, |
|
"loss": 0.0152, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 17.32804232804233, |
|
"grad_norm": 0.14467965066432953, |
|
"learning_rate": 9.585586566931625e-06, |
|
"loss": 0.0161, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 17.354497354497354, |
|
"grad_norm": 0.2321748286485672, |
|
"learning_rate": 9.39954599089754e-06, |
|
"loss": 0.0161, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 17.38095238095238, |
|
"grad_norm": 0.18389014899730682, |
|
"learning_rate": 9.215239456707635e-06, |
|
"loss": 0.0163, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 17.40740740740741, |
|
"grad_norm": 0.13891808688640594, |
|
"learning_rate": 9.032670491885575e-06, |
|
"loss": 0.0138, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 17.433862433862434, |
|
"grad_norm": 0.21685755252838135, |
|
"learning_rate": 8.851842590698877e-06, |
|
"loss": 0.0169, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 17.46031746031746, |
|
"grad_norm": 0.1880199909210205, |
|
"learning_rate": 8.672759214092042e-06, |
|
"loss": 0.0146, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 17.486772486772487, |
|
"grad_norm": 0.14832906424999237, |
|
"learning_rate": 8.49542378962046e-06, |
|
"loss": 0.0177, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 17.513227513227513, |
|
"grad_norm": 0.1760573536157608, |
|
"learning_rate": 8.319839711384603e-06, |
|
"loss": 0.0181, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 17.53968253968254, |
|
"grad_norm": 0.19962358474731445, |
|
"learning_rate": 8.146010339965193e-06, |
|
"loss": 0.0173, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 17.566137566137566, |
|
"grad_norm": 0.16452929377555847, |
|
"learning_rate": 7.973939002358833e-06, |
|
"loss": 0.0198, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 17.59259259259259, |
|
"grad_norm": 0.15749408304691315, |
|
"learning_rate": 7.803628991914358e-06, |
|
"loss": 0.0191, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 17.61904761904762, |
|
"grad_norm": 0.25544941425323486, |
|
"learning_rate": 7.635083568269818e-06, |
|
"loss": 0.015, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 17.645502645502646, |
|
"grad_norm": 0.19979047775268555, |
|
"learning_rate": 7.468305957290012e-06, |
|
"loss": 0.0173, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 17.67195767195767, |
|
"grad_norm": 0.20462428033351898, |
|
"learning_rate": 7.3032993510048e-06, |
|
"loss": 0.0168, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 17.6984126984127, |
|
"grad_norm": 0.16061115264892578, |
|
"learning_rate": 7.14006690754806e-06, |
|
"loss": 0.0153, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 17.724867724867725, |
|
"grad_norm": 0.2491457462310791, |
|
"learning_rate": 6.9786117510971415e-06, |
|
"loss": 0.017, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 17.75132275132275, |
|
"grad_norm": 0.21707351505756378, |
|
"learning_rate": 6.8189369718131165e-06, |
|
"loss": 0.0162, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 17.77777777777778, |
|
"grad_norm": 0.1476307213306427, |
|
"learning_rate": 6.661045625781659e-06, |
|
"loss": 0.0177, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 17.804232804232804, |
|
"grad_norm": 0.1483127921819687, |
|
"learning_rate": 6.504940734954512e-06, |
|
"loss": 0.0176, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 17.83068783068783, |
|
"grad_norm": 0.2029716670513153, |
|
"learning_rate": 6.350625287091683e-06, |
|
"loss": 0.0163, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 17.857142857142858, |
|
"grad_norm": 0.17894791066646576, |
|
"learning_rate": 6.198102235704251e-06, |
|
"loss": 0.0184, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 17.883597883597883, |
|
"grad_norm": 0.1477387249469757, |
|
"learning_rate": 6.047374499997827e-06, |
|
"loss": 0.0159, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 17.91005291005291, |
|
"grad_norm": 0.2098730206489563, |
|
"learning_rate": 5.898444964816652e-06, |
|
"loss": 0.0168, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 17.936507936507937, |
|
"grad_norm": 0.15353702008724213, |
|
"learning_rate": 5.751316480588475e-06, |
|
"loss": 0.0164, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 17.962962962962962, |
|
"grad_norm": 0.15158168971538544, |
|
"learning_rate": 5.6059918632699215e-06, |
|
"loss": 0.0176, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 17.98941798941799, |
|
"grad_norm": 0.19479572772979736, |
|
"learning_rate": 5.462473894292597e-06, |
|
"loss": 0.017, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 18.015873015873016, |
|
"grad_norm": 0.1755516678094864, |
|
"learning_rate": 5.3207653205098905e-06, |
|
"loss": 0.0165, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 18.04232804232804, |
|
"grad_norm": 0.2217196822166443, |
|
"learning_rate": 5.18086885414436e-06, |
|
"loss": 0.0185, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 18.06878306878307, |
|
"grad_norm": 0.1340661495923996, |
|
"learning_rate": 5.042787172735863e-06, |
|
"loss": 0.0157, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 18.095238095238095, |
|
"grad_norm": 0.2034100890159607, |
|
"learning_rate": 4.906522919090318e-06, |
|
"loss": 0.0144, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 18.12169312169312, |
|
"grad_norm": 0.21613432466983795, |
|
"learning_rate": 4.772078701229044e-06, |
|
"loss": 0.0174, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 18.14814814814815, |
|
"grad_norm": 0.20390327274799347, |
|
"learning_rate": 4.6394570923389055e-06, |
|
"loss": 0.0142, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 18.174603174603174, |
|
"grad_norm": 0.2700379192829132, |
|
"learning_rate": 4.508660630723116e-06, |
|
"loss": 0.0161, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 18.201058201058203, |
|
"grad_norm": 0.21042710542678833, |
|
"learning_rate": 4.379691819752507e-06, |
|
"loss": 0.0171, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 18.227513227513228, |
|
"grad_norm": 0.20383331179618835, |
|
"learning_rate": 4.252553127817749e-06, |
|
"loss": 0.0165, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 18.253968253968253, |
|
"grad_norm": 0.21164458990097046, |
|
"learning_rate": 4.1272469882820745e-06, |
|
"loss": 0.0163, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 18.280423280423282, |
|
"grad_norm": 0.1821017563343048, |
|
"learning_rate": 4.00377579943465e-06, |
|
"loss": 0.0187, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 18.306878306878307, |
|
"grad_norm": 0.1857336163520813, |
|
"learning_rate": 3.882141924444727e-06, |
|
"loss": 0.0175, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 18.333333333333332, |
|
"grad_norm": 0.20543745160102844, |
|
"learning_rate": 3.762347691316437e-06, |
|
"loss": 0.0157, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 18.35978835978836, |
|
"grad_norm": 0.202567458152771, |
|
"learning_rate": 3.6443953928441687e-06, |
|
"loss": 0.0141, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 18.386243386243386, |
|
"grad_norm": 0.19636744260787964, |
|
"learning_rate": 3.52828728656871e-06, |
|
"loss": 0.017, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 18.41269841269841, |
|
"grad_norm": 0.22278830409049988, |
|
"learning_rate": 3.4140255947340605e-06, |
|
"loss": 0.0173, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 18.43915343915344, |
|
"grad_norm": 0.19748513400554657, |
|
"learning_rate": 3.3016125042448776e-06, |
|
"loss": 0.0192, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 18.465608465608465, |
|
"grad_norm": 0.18418549001216888, |
|
"learning_rate": 3.191050166624632e-06, |
|
"loss": 0.0161, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 18.49206349206349, |
|
"grad_norm": 0.21692697703838348, |
|
"learning_rate": 3.082340697974395e-06, |
|
"loss": 0.0156, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 18.51851851851852, |
|
"grad_norm": 0.14934659004211426, |
|
"learning_rate": 2.9754861789324073e-06, |
|
"loss": 0.0161, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 18.544973544973544, |
|
"grad_norm": 0.20961934328079224, |
|
"learning_rate": 2.8704886546341956e-06, |
|
"loss": 0.0164, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 18.571428571428573, |
|
"grad_norm": 0.16422855854034424, |
|
"learning_rate": 2.767350134673441e-06, |
|
"loss": 0.0153, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 18.597883597883598, |
|
"grad_norm": 0.17347002029418945, |
|
"learning_rate": 2.66607259306354e-06, |
|
"loss": 0.0157, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 18.624338624338623, |
|
"grad_norm": 0.10744515806436539, |
|
"learning_rate": 2.5666579681998036e-06, |
|
"loss": 0.0163, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 18.650793650793652, |
|
"grad_norm": 0.16614548861980438, |
|
"learning_rate": 2.4691081628223533e-06, |
|
"loss": 0.0143, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 18.677248677248677, |
|
"grad_norm": 0.23060919344425201, |
|
"learning_rate": 2.3734250439797155e-06, |
|
"loss": 0.0156, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 18.703703703703702, |
|
"grad_norm": 0.21790482103824615, |
|
"learning_rate": 2.2796104429930963e-06, |
|
"loss": 0.017, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 18.73015873015873, |
|
"grad_norm": 0.19607238471508026, |
|
"learning_rate": 2.1876661554213197e-06, |
|
"loss": 0.0147, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 18.756613756613756, |
|
"grad_norm": 0.14693410694599152, |
|
"learning_rate": 2.097593941026421e-06, |
|
"loss": 0.0171, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 18.78306878306878, |
|
"grad_norm": 0.15378044545650482, |
|
"learning_rate": 2.009395523740054e-06, |
|
"loss": 0.0153, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 18.80952380952381, |
|
"grad_norm": 0.1617782860994339, |
|
"learning_rate": 1.923072591630415e-06, |
|
"loss": 0.0169, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 18.835978835978835, |
|
"grad_norm": 0.16385476291179657, |
|
"learning_rate": 1.838626796869991e-06, |
|
"loss": 0.0164, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 18.862433862433864, |
|
"grad_norm": 0.16427811980247498, |
|
"learning_rate": 1.7560597557038982e-06, |
|
"loss": 0.0172, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 18.88888888888889, |
|
"grad_norm": 0.15692010521888733, |
|
"learning_rate": 1.6753730484189822e-06, |
|
"loss": 0.0158, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 18.915343915343914, |
|
"grad_norm": 0.17159394919872284, |
|
"learning_rate": 1.59656821931351e-06, |
|
"loss": 0.0144, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 18.941798941798943, |
|
"grad_norm": 0.13447363674640656, |
|
"learning_rate": 1.5196467766677047e-06, |
|
"loss": 0.0162, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 18.96825396825397, |
|
"grad_norm": 0.16987329721450806, |
|
"learning_rate": 1.4446101927148014e-06, |
|
"loss": 0.0166, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 18.994708994708994, |
|
"grad_norm": 0.1536870151758194, |
|
"learning_rate": 1.371459903612915e-06, |
|
"loss": 0.0168, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 19.021164021164022, |
|
"grad_norm": 0.21800269186496735, |
|
"learning_rate": 1.3001973094175169e-06, |
|
"loss": 0.0172, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 19.047619047619047, |
|
"grad_norm": 0.17673099040985107, |
|
"learning_rate": 1.2308237740546791e-06, |
|
"loss": 0.0163, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 19.074074074074073, |
|
"grad_norm": 0.15784895420074463, |
|
"learning_rate": 1.1633406252949174e-06, |
|
"loss": 0.0155, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 19.1005291005291, |
|
"grad_norm": 0.1905474215745926, |
|
"learning_rate": 1.097749154727834e-06, |
|
"loss": 0.0139, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 19.126984126984127, |
|
"grad_norm": 0.1780664622783661, |
|
"learning_rate": 1.034050617737381e-06, |
|
"loss": 0.0172, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 19.15343915343915, |
|
"grad_norm": 0.16737215220928192, |
|
"learning_rate": 9.722462334777805e-07, |
|
"loss": 0.0149, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 19.17989417989418, |
|
"grad_norm": 0.17369748651981354, |
|
"learning_rate": 9.123371848502871e-07, |
|
"loss": 0.0169, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 19.206349206349206, |
|
"grad_norm": 0.17061293125152588, |
|
"learning_rate": 8.54324618480462e-07, |
|
"loss": 0.0148, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 19.232804232804234, |
|
"grad_norm": 0.2849694490432739, |
|
"learning_rate": 7.982096446962684e-07, |
|
"loss": 0.0172, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 19.25925925925926, |
|
"grad_norm": 0.16373994946479797, |
|
"learning_rate": 7.439933375068097e-07, |
|
"loss": 0.0155, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 19.285714285714285, |
|
"grad_norm": 0.1858544796705246, |
|
"learning_rate": 6.91676734581781e-07, |
|
"loss": 0.0178, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 19.312169312169313, |
|
"grad_norm": 0.22137916088104248, |
|
"learning_rate": 6.412608372315943e-07, |
|
"loss": 0.0158, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 19.33862433862434, |
|
"grad_norm": 0.21592257916927338, |
|
"learning_rate": 5.927466103882395e-07, |
|
"loss": 0.0147, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 19.365079365079364, |
|
"grad_norm": 0.16293910145759583, |
|
"learning_rate": 5.461349825867879e-07, |
|
"loss": 0.0164, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 19.391534391534393, |
|
"grad_norm": 0.19730526208877563, |
|
"learning_rate": 5.014268459476501e-07, |
|
"loss": 0.0149, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 19.417989417989418, |
|
"grad_norm": 0.32044050097465515, |
|
"learning_rate": 4.586230561594462e-07, |
|
"loss": 0.015, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 19.444444444444443, |
|
"grad_norm": 0.1815277338027954, |
|
"learning_rate": 4.177244324627183e-07, |
|
"loss": 0.0148, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 19.47089947089947, |
|
"grad_norm": 0.19404444098472595, |
|
"learning_rate": 3.7873175763415427e-07, |
|
"loss": 0.0198, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 19.497354497354497, |
|
"grad_norm": 0.24406063556671143, |
|
"learning_rate": 3.416457779716664e-07, |
|
"loss": 0.0179, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 19.523809523809526, |
|
"grad_norm": 0.19478613138198853, |
|
"learning_rate": 3.0646720328010303e-07, |
|
"loss": 0.0168, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 19.55026455026455, |
|
"grad_norm": 0.1256127655506134, |
|
"learning_rate": 2.731967068576369e-07, |
|
"loss": 0.0178, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 19.576719576719576, |
|
"grad_norm": 0.17166411876678467, |
|
"learning_rate": 2.4183492548288667e-07, |
|
"loss": 0.0165, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 19.603174603174605, |
|
"grad_norm": 0.1664625108242035, |
|
"learning_rate": 2.1238245940276014e-07, |
|
"loss": 0.0148, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 19.62962962962963, |
|
"grad_norm": 0.1637825071811676, |
|
"learning_rate": 1.8483987232094101e-07, |
|
"loss": 0.0165, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 19.656084656084655, |
|
"grad_norm": 0.17463766038417816, |
|
"learning_rate": 1.5920769138706438e-07, |
|
"loss": 0.0154, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 19.682539682539684, |
|
"grad_norm": 0.1705995351076126, |
|
"learning_rate": 1.3548640718669124e-07, |
|
"loss": 0.0151, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 19.70899470899471, |
|
"grad_norm": 0.14153502881526947, |
|
"learning_rate": 1.1367647373190516e-07, |
|
"loss": 0.016, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 19.735449735449734, |
|
"grad_norm": 0.2553439438343048, |
|
"learning_rate": 9.377830845258561e-08, |
|
"loss": 0.0162, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 19.761904761904763, |
|
"grad_norm": 0.1255766898393631, |
|
"learning_rate": 7.579229218843686e-08, |
|
"loss": 0.0158, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 19.788359788359788, |
|
"grad_norm": 0.12066392600536346, |
|
"learning_rate": 5.971876918172692e-08, |
|
"loss": 0.016, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 19.814814814814813, |
|
"grad_norm": 0.14524872601032257, |
|
"learning_rate": 4.555804707067069e-08, |
|
"loss": 0.0133, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 19.841269841269842, |
|
"grad_norm": 0.19802507758140564, |
|
"learning_rate": 3.331039688353465e-08, |
|
"loss": 0.0157, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 19.867724867724867, |
|
"grad_norm": 0.16935652494430542, |
|
"learning_rate": 2.297605303347439e-08, |
|
"loss": 0.0162, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 19.894179894179896, |
|
"grad_norm": 0.18010082840919495, |
|
"learning_rate": 1.4555213314026984e-08, |
|
"loss": 0.0155, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 19.92063492063492, |
|
"grad_norm": 0.14799325168132782, |
|
"learning_rate": 8.04803889533634e-09, |
|
"loss": 0.0161, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 19.947089947089946, |
|
"grad_norm": 0.1539669781923294, |
|
"learning_rate": 3.4546543210778325e-09, |
|
"loss": 0.0154, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 19.973544973544975, |
|
"grad_norm": 0.1273125857114792, |
|
"learning_rate": 7.751475060491231e-10, |
|
"loss": 0.0144, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 19.997354497354497, |
|
"step": 7559, |
|
"total_flos": 5.331242062105344e+17, |
|
"train_loss": 0.030525887037320242, |
|
"train_runtime": 4964.64, |
|
"train_samples_per_second": 97.444, |
|
"train_steps_per_second": 1.523 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 7559, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.331242062105344e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|