|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.982758620689655, |
|
"eval_steps": 500, |
|
"global_step": 1159, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 4.288447380065918, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 1.24, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 3.9249799251556396, |
|
"learning_rate": 6.896551724137931e-05, |
|
"loss": 0.5159, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 1.7602132558822632, |
|
"learning_rate": 0.00010344827586206898, |
|
"loss": 0.3011, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.8162740468978882, |
|
"learning_rate": 0.00013793103448275863, |
|
"loss": 0.1873, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 0.7871344685554504, |
|
"learning_rate": 0.00017241379310344826, |
|
"loss": 0.1371, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 1.0209136009216309, |
|
"learning_rate": 0.00019999837162688113, |
|
"loss": 0.1282, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 0.5767734050750732, |
|
"learning_rate": 0.00019994138413588491, |
|
"loss": 0.117, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 0.9807557463645935, |
|
"learning_rate": 0.00019980303101316228, |
|
"loss": 0.1053, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 0.8148991465568542, |
|
"learning_rate": 0.00019958342489662917, |
|
"loss": 0.095, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 0.7483856678009033, |
|
"learning_rate": 0.00019928274457498818, |
|
"loss": 0.0823, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 0.38755494356155396, |
|
"learning_rate": 0.00019890123484217056, |
|
"loss": 0.0852, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.0689655172413794, |
|
"grad_norm": 0.40888020396232605, |
|
"learning_rate": 0.0001984392062980413, |
|
"loss": 0.0776, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.2413793103448274, |
|
"grad_norm": 0.4752354919910431, |
|
"learning_rate": 0.00019789703509552945, |
|
"loss": 0.065, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.413793103448276, |
|
"grad_norm": 0.2943930923938751, |
|
"learning_rate": 0.00019727516263438918, |
|
"loss": 0.0703, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.586206896551724, |
|
"grad_norm": 0.3636096119880676, |
|
"learning_rate": 0.0001965740952018417, |
|
"loss": 0.0734, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.7586206896551726, |
|
"grad_norm": 0.49514421820640564, |
|
"learning_rate": 0.00019579440356038967, |
|
"loss": 0.0685, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.9310344827586206, |
|
"grad_norm": 0.7530002593994141, |
|
"learning_rate": 0.00019493672248314044, |
|
"loss": 0.0653, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.103448275862069, |
|
"grad_norm": 0.4943099021911621, |
|
"learning_rate": 0.00019400175023701586, |
|
"loss": 0.0569, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.2758620689655173, |
|
"grad_norm": 0.5510227084159851, |
|
"learning_rate": 0.00019299024801426994, |
|
"loss": 0.0612, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.4482758620689653, |
|
"grad_norm": 0.4278947114944458, |
|
"learning_rate": 0.0001919030393127765, |
|
"loss": 0.0554, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.6206896551724137, |
|
"grad_norm": 0.3584755063056946, |
|
"learning_rate": 0.0001907410092655921, |
|
"loss": 0.0599, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.793103448275862, |
|
"grad_norm": 0.5437096953392029, |
|
"learning_rate": 0.00018950510392033945, |
|
"loss": 0.0597, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.9655172413793105, |
|
"grad_norm": 0.3559582531452179, |
|
"learning_rate": 0.0001881963294689984, |
|
"loss": 0.0569, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.137931034482759, |
|
"grad_norm": 0.4884796440601349, |
|
"learning_rate": 0.00018681575142873126, |
|
"loss": 0.0578, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.310344827586207, |
|
"grad_norm": 0.4382438659667969, |
|
"learning_rate": 0.0001853644937744095, |
|
"loss": 0.051, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.482758620689655, |
|
"grad_norm": 0.34379124641418457, |
|
"learning_rate": 0.0001838437380235483, |
|
"loss": 0.0529, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.655172413793103, |
|
"grad_norm": 0.5280494093894958, |
|
"learning_rate": 0.0001822547222743933, |
|
"loss": 0.0503, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.827586206896552, |
|
"grad_norm": 0.3714671730995178, |
|
"learning_rate": 0.00018059874019794351, |
|
"loss": 0.0474, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.2870706617832184, |
|
"learning_rate": 0.00017887713998473022, |
|
"loss": 0.0473, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.172413793103448, |
|
"grad_norm": 0.35148724913597107, |
|
"learning_rate": 0.00017709132324720997, |
|
"loss": 0.0492, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.344827586206897, |
|
"grad_norm": 0.2515222728252411, |
|
"learning_rate": 0.00017524274387866484, |
|
"loss": 0.0462, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.517241379310345, |
|
"grad_norm": 0.22452038526535034, |
|
"learning_rate": 0.00017333290686953915, |
|
"loss": 0.0414, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.689655172413794, |
|
"grad_norm": 0.3675800561904907, |
|
"learning_rate": 0.00017136336708217632, |
|
"loss": 0.0483, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.862068965517241, |
|
"grad_norm": 0.3029170334339142, |
|
"learning_rate": 0.00016933572798495328, |
|
"loss": 0.0553, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 6.0344827586206895, |
|
"grad_norm": 0.2198949158191681, |
|
"learning_rate": 0.00016725164034684317, |
|
"loss": 0.0466, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.206896551724138, |
|
"grad_norm": 0.1851377636194229, |
|
"learning_rate": 0.00016511280089346876, |
|
"loss": 0.0462, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.379310344827586, |
|
"grad_norm": 0.3081173300743103, |
|
"learning_rate": 0.00016292095092574154, |
|
"loss": 0.0433, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.551724137931035, |
|
"grad_norm": 0.2842998206615448, |
|
"learning_rate": 0.00016067787490221008, |
|
"loss": 0.0389, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.724137931034483, |
|
"grad_norm": 0.31966182589530945, |
|
"learning_rate": 0.00015838539898627242, |
|
"loss": 0.04, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.896551724137931, |
|
"grad_norm": 0.23094028234481812, |
|
"learning_rate": 0.0001560453895594354, |
|
"loss": 0.0395, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 7.068965517241379, |
|
"grad_norm": 0.35125771164894104, |
|
"learning_rate": 0.00015365975170183085, |
|
"loss": 0.0445, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.241379310344827, |
|
"grad_norm": 0.18835288286209106, |
|
"learning_rate": 0.00015123042764122584, |
|
"loss": 0.0391, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.413793103448276, |
|
"grad_norm": 0.26209092140197754, |
|
"learning_rate": 0.00014875939517179016, |
|
"loss": 0.0433, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.586206896551724, |
|
"grad_norm": 0.23597943782806396, |
|
"learning_rate": 0.00014624866604390758, |
|
"loss": 0.0383, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.758620689655173, |
|
"grad_norm": 0.32963162660598755, |
|
"learning_rate": 0.00014370028432634252, |
|
"loss": 0.0438, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 7.931034482758621, |
|
"grad_norm": 0.2865336537361145, |
|
"learning_rate": 0.00014111632474209505, |
|
"loss": 0.0367, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 8.10344827586207, |
|
"grad_norm": 0.22742541134357452, |
|
"learning_rate": 0.0001384988909792995, |
|
"loss": 0.0366, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 8.275862068965518, |
|
"grad_norm": 0.23098573088645935, |
|
"learning_rate": 0.00013585011397854117, |
|
"loss": 0.0418, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.448275862068966, |
|
"grad_norm": 0.3277537524700165, |
|
"learning_rate": 0.00013317215019798638, |
|
"loss": 0.0391, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.620689655172415, |
|
"grad_norm": 0.33878064155578613, |
|
"learning_rate": 0.00013046717985773745, |
|
"loss": 0.0401, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.793103448275861, |
|
"grad_norm": 0.4036126434803009, |
|
"learning_rate": 0.00012773740516484235, |
|
"loss": 0.0369, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 8.96551724137931, |
|
"grad_norm": 0.2406553477048874, |
|
"learning_rate": 0.00012498504852040434, |
|
"loss": 0.0357, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 9.137931034482758, |
|
"grad_norm": 0.33557119965553284, |
|
"learning_rate": 0.00012221235071025046, |
|
"loss": 0.0366, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 9.310344827586206, |
|
"grad_norm": 0.2251608818769455, |
|
"learning_rate": 0.00011942156908063284, |
|
"loss": 0.0387, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.482758620689655, |
|
"grad_norm": 0.331117182970047, |
|
"learning_rate": 0.00011661497570044738, |
|
"loss": 0.0306, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.655172413793103, |
|
"grad_norm": 0.1580682098865509, |
|
"learning_rate": 0.00011379485551146621, |
|
"loss": 0.0332, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.827586206896552, |
|
"grad_norm": 0.21949926018714905, |
|
"learning_rate": 0.00011096350446808998, |
|
"loss": 0.0378, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.300108402967453, |
|
"learning_rate": 0.00010812322766813461, |
|
"loss": 0.0354, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 10.172413793103448, |
|
"grad_norm": 0.2582547664642334, |
|
"learning_rate": 0.00010527633747617362, |
|
"loss": 0.0348, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 10.344827586206897, |
|
"grad_norm": 0.3702482283115387, |
|
"learning_rate": 0.00010242515164096471, |
|
"loss": 0.0334, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 10.517241379310345, |
|
"grad_norm": 0.2247171700000763, |
|
"learning_rate": 9.957199140849278e-05, |
|
"loss": 0.0313, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 10.689655172413794, |
|
"grad_norm": 0.26956915855407715, |
|
"learning_rate": 9.671917963216574e-05, |
|
"loss": 0.0336, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 10.862068965517242, |
|
"grad_norm": 0.19159068167209625, |
|
"learning_rate": 9.386903888170172e-05, |
|
"loss": 0.0311, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 11.03448275862069, |
|
"grad_norm": 0.244057759642601, |
|
"learning_rate": 9.102388955224703e-05, |
|
"loss": 0.033, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 11.206896551724139, |
|
"grad_norm": 0.24532181024551392, |
|
"learning_rate": 8.818604797526493e-05, |
|
"loss": 0.0365, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 11.379310344827585, |
|
"grad_norm": 0.3451012670993805, |
|
"learning_rate": 8.535782453273217e-05, |
|
"loss": 0.0297, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 11.551724137931034, |
|
"grad_norm": 0.28239578008651733, |
|
"learning_rate": 8.254152177618e-05, |
|
"loss": 0.0285, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 11.724137931034482, |
|
"grad_norm": 0.2965984046459198, |
|
"learning_rate": 7.97394325521093e-05, |
|
"loss": 0.0303, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 11.89655172413793, |
|
"grad_norm": 0.25484567880630493, |
|
"learning_rate": 7.695383813530776e-05, |
|
"loss": 0.0283, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 12.068965517241379, |
|
"grad_norm": 0.15973417460918427, |
|
"learning_rate": 7.418700637158742e-05, |
|
"loss": 0.0324, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 12.241379310344827, |
|
"grad_norm": 0.16871625185012817, |
|
"learning_rate": 7.144118983145521e-05, |
|
"loss": 0.029, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 12.413793103448276, |
|
"grad_norm": 0.19246120750904083, |
|
"learning_rate": 6.871862397622005e-05, |
|
"loss": 0.0309, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 12.586206896551724, |
|
"grad_norm": 0.28560781478881836, |
|
"learning_rate": 6.60215253380287e-05, |
|
"loss": 0.03, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 12.758620689655173, |
|
"grad_norm": 0.2544420659542084, |
|
"learning_rate": 6.335208971531265e-05, |
|
"loss": 0.0338, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 12.931034482758621, |
|
"grad_norm": 0.16777703166007996, |
|
"learning_rate": 6.071249038511497e-05, |
|
"loss": 0.03, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 13.10344827586207, |
|
"grad_norm": 0.13849933445453644, |
|
"learning_rate": 5.810487633375261e-05, |
|
"loss": 0.03, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 13.275862068965518, |
|
"grad_norm": 0.2304876148700714, |
|
"learning_rate": 5.553137050725459e-05, |
|
"loss": 0.032, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 13.448275862068966, |
|
"grad_norm": 0.17199701070785522, |
|
"learning_rate": 5.2994068083000495e-05, |
|
"loss": 0.0324, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 13.620689655172415, |
|
"grad_norm": 0.15231479704380035, |
|
"learning_rate": 5.049503476396627e-05, |
|
"loss": 0.0269, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 13.793103448275861, |
|
"grad_norm": 0.20847296714782715, |
|
"learning_rate": 4.803630509696627e-05, |
|
"loss": 0.0276, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 13.96551724137931, |
|
"grad_norm": 0.2147344946861267, |
|
"learning_rate": 4.561988081626023e-05, |
|
"loss": 0.0252, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 14.137931034482758, |
|
"grad_norm": 0.1602960079908371, |
|
"learning_rate": 4.32477292138746e-05, |
|
"loss": 0.025, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 14.310344827586206, |
|
"grad_norm": 0.19987209141254425, |
|
"learning_rate": 4.092178153796401e-05, |
|
"loss": 0.0268, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 14.482758620689655, |
|
"grad_norm": 0.18080690503120422, |
|
"learning_rate": 3.8643931420517255e-05, |
|
"loss": 0.0279, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 14.655172413793103, |
|
"grad_norm": 0.1728052943944931, |
|
"learning_rate": 3.641603333568831e-05, |
|
"loss": 0.0226, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 14.827586206896552, |
|
"grad_norm": 0.19159851968288422, |
|
"learning_rate": 3.423990109000641e-05, |
|
"loss": 0.0281, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 0.17135493457317352, |
|
"learning_rate": 3.211730634569563e-05, |
|
"loss": 0.0291, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 15.172413793103448, |
|
"grad_norm": 0.24832935631275177, |
|
"learning_rate": 3.0049977178305076e-05, |
|
"loss": 0.0277, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 15.344827586206897, |
|
"grad_norm": 0.22584789991378784, |
|
"learning_rate": 2.803959666982474e-05, |
|
"loss": 0.025, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 15.517241379310345, |
|
"grad_norm": 0.17386794090270996, |
|
"learning_rate": 2.60878015384319e-05, |
|
"loss": 0.0289, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 15.689655172413794, |
|
"grad_norm": 0.16346997022628784, |
|
"learning_rate": 2.419618080598417e-05, |
|
"loss": 0.0207, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 15.862068965517242, |
|
"grad_norm": 0.11085375398397446, |
|
"learning_rate": 2.23662745043433e-05, |
|
"loss": 0.0227, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 16.03448275862069, |
|
"grad_norm": 0.1747630387544632, |
|
"learning_rate": 2.05995724215838e-05, |
|
"loss": 0.0257, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 16.20689655172414, |
|
"grad_norm": 0.15860582888126373, |
|
"learning_rate": 1.889751288910645e-05, |
|
"loss": 0.0254, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 16.379310344827587, |
|
"grad_norm": 0.15299658477306366, |
|
"learning_rate": 1.726148161064437e-05, |
|
"loss": 0.0251, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 16.551724137931036, |
|
"grad_norm": 0.1972336769104004, |
|
"learning_rate": 1.569281053411532e-05, |
|
"loss": 0.0256, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 16.724137931034484, |
|
"grad_norm": 0.13377782702445984, |
|
"learning_rate": 1.4192776767238158e-05, |
|
"loss": 0.0249, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 16.896551724137932, |
|
"grad_norm": 0.17352934181690216, |
|
"learning_rate": 1.276260153779667e-05, |
|
"loss": 0.0231, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 17.06896551724138, |
|
"grad_norm": 0.21800649166107178, |
|
"learning_rate": 1.1403449199396966e-05, |
|
"loss": 0.0279, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 17.24137931034483, |
|
"grad_norm": 0.18025921285152435, |
|
"learning_rate": 1.0116426283528302e-05, |
|
"loss": 0.0227, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 17.413793103448278, |
|
"grad_norm": 0.17031840980052948, |
|
"learning_rate": 8.902580598698663e-06, |
|
"loss": 0.0208, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 17.586206896551722, |
|
"grad_norm": 0.12430210411548615, |
|
"learning_rate": 7.7629003773787e-06, |
|
"loss": 0.0269, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 17.75862068965517, |
|
"grad_norm": 0.17070575058460236, |
|
"learning_rate": 6.698313471448547e-06, |
|
"loss": 0.0209, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 17.93103448275862, |
|
"grad_norm": 0.17201951146125793, |
|
"learning_rate": 5.709686596802521e-06, |
|
"loss": 0.0218, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 18.103448275862068, |
|
"grad_norm": 0.10487387329339981, |
|
"learning_rate": 4.797824627726577e-06, |
|
"loss": 0.0201, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 18.275862068965516, |
|
"grad_norm": 0.14748798310756683, |
|
"learning_rate": 3.963469941623288e-06, |
|
"loss": 0.0202, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 18.448275862068964, |
|
"grad_norm": 0.1550891101360321, |
|
"learning_rate": 3.2073018146173805e-06, |
|
"loss": 0.0202, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 18.620689655172413, |
|
"grad_norm": 0.14703543484210968, |
|
"learning_rate": 2.529935868534372e-06, |
|
"loss": 0.0243, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 18.79310344827586, |
|
"grad_norm": 0.1516178995370865, |
|
"learning_rate": 1.9319235697021763e-06, |
|
"loss": 0.0207, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 18.96551724137931, |
|
"grad_norm": 0.12201597541570663, |
|
"learning_rate": 1.4137517799837852e-06, |
|
"loss": 0.0225, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 19.137931034482758, |
|
"grad_norm": 0.15553522109985352, |
|
"learning_rate": 9.758423604068312e-07, |
|
"loss": 0.0188, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 19.310344827586206, |
|
"grad_norm": 0.13390740752220154, |
|
"learning_rate": 6.185518277123214e-07, |
|
"loss": 0.0214, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 19.482758620689655, |
|
"grad_norm": 0.17719490826129913, |
|
"learning_rate": 3.421710641023923e-07, |
|
"loss": 0.0245, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 19.655172413793103, |
|
"grad_norm": 0.206907719373703, |
|
"learning_rate": 1.4692508042337682e-07, |
|
"loss": 0.0216, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 19.82758620689655, |
|
"grad_norm": 0.08690398931503296, |
|
"learning_rate": 3.2972832976918554e-08, |
|
"loss": 0.0197, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 19.982758620689655, |
|
"step": 1159, |
|
"total_flos": 8.16714982017792e+16, |
|
"train_loss": 0.05862578803655708, |
|
"train_runtime": 806.3047, |
|
"train_samples_per_second": 91.995, |
|
"train_steps_per_second": 1.437 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1159, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.16714982017792e+16, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|