|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.962616822429908, |
|
"eval_steps": 500, |
|
"global_step": 2136, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09345794392523364, |
|
"grad_norm": 4.17994499206543, |
|
"learning_rate": 1.869158878504673e-05, |
|
"loss": 0.9424, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.18691588785046728, |
|
"grad_norm": 2.603377342224121, |
|
"learning_rate": 3.738317757009346e-05, |
|
"loss": 0.4804, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2803738317757009, |
|
"grad_norm": 1.72787606716156, |
|
"learning_rate": 5.607476635514019e-05, |
|
"loss": 0.2668, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.37383177570093457, |
|
"grad_norm": 0.5973343849182129, |
|
"learning_rate": 7.476635514018692e-05, |
|
"loss": 0.1802, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4672897196261682, |
|
"grad_norm": 1.0159034729003906, |
|
"learning_rate": 9.345794392523365e-05, |
|
"loss": 0.1275, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5607476635514018, |
|
"grad_norm": 0.8789228200912476, |
|
"learning_rate": 0.00011214953271028037, |
|
"loss": 0.1168, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6542056074766355, |
|
"grad_norm": 1.1317589282989502, |
|
"learning_rate": 0.0001308411214953271, |
|
"loss": 0.1075, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7476635514018691, |
|
"grad_norm": 1.391225814819336, |
|
"learning_rate": 0.00014953271028037384, |
|
"loss": 0.1022, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8411214953271028, |
|
"grad_norm": 1.0744414329528809, |
|
"learning_rate": 0.00016822429906542056, |
|
"loss": 0.0905, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9345794392523364, |
|
"grad_norm": 0.6361038684844971, |
|
"learning_rate": 0.0001869158878504673, |
|
"loss": 0.0794, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.02803738317757, |
|
"grad_norm": 0.6731115579605103, |
|
"learning_rate": 0.00019999892118398673, |
|
"loss": 0.0823, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.1214953271028036, |
|
"grad_norm": 0.6741369962692261, |
|
"learning_rate": 0.0001999797428801708, |
|
"loss": 0.0762, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.2149532710280373, |
|
"grad_norm": 0.7907876372337341, |
|
"learning_rate": 0.00019993659617930244, |
|
"loss": 0.0669, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.308411214953271, |
|
"grad_norm": 0.7666932940483093, |
|
"learning_rate": 0.00019986949142504959, |
|
"loss": 0.0739, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.4018691588785046, |
|
"grad_norm": 0.7501648664474487, |
|
"learning_rate": 0.00019977844470460494, |
|
"loss": 0.0688, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.4953271028037383, |
|
"grad_norm": 0.45659929513931274, |
|
"learning_rate": 0.00019966347784482945, |
|
"loss": 0.0654, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.588785046728972, |
|
"grad_norm": 0.6907665729522705, |
|
"learning_rate": 0.0001995246184070197, |
|
"loss": 0.0613, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.6822429906542056, |
|
"grad_norm": 0.47750258445739746, |
|
"learning_rate": 0.0001993618996803005, |
|
"loss": 0.0536, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.7757009345794392, |
|
"grad_norm": 0.7647998332977295, |
|
"learning_rate": 0.00019917536067364444, |
|
"loss": 0.0579, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.8691588785046729, |
|
"grad_norm": 0.5567947030067444, |
|
"learning_rate": 0.00019896504610652016, |
|
"loss": 0.0548, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.9626168224299065, |
|
"grad_norm": 0.46288764476776123, |
|
"learning_rate": 0.00019873100639817166, |
|
"loss": 0.0527, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.05607476635514, |
|
"grad_norm": 0.4530458450317383, |
|
"learning_rate": 0.00019847329765553115, |
|
"loss": 0.0465, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.149532710280374, |
|
"grad_norm": 0.3930870592594147, |
|
"learning_rate": 0.00019819198165976838, |
|
"loss": 0.0519, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.2429906542056073, |
|
"grad_norm": 0.41593921184539795, |
|
"learning_rate": 0.00019788712585147976, |
|
"loss": 0.0507, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.336448598130841, |
|
"grad_norm": 0.47823095321655273, |
|
"learning_rate": 0.00019755880331452045, |
|
"loss": 0.043, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.4299065420560746, |
|
"grad_norm": 0.316738486289978, |
|
"learning_rate": 0.00019720709275848408, |
|
"loss": 0.0421, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.5233644859813085, |
|
"grad_norm": 0.33323222398757935, |
|
"learning_rate": 0.00019683207849983326, |
|
"loss": 0.0451, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.616822429906542, |
|
"grad_norm": 0.43355607986450195, |
|
"learning_rate": 0.00019643385044168632, |
|
"loss": 0.0402, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.710280373831776, |
|
"grad_norm": 0.40903353691101074, |
|
"learning_rate": 0.0001960125040522645, |
|
"loss": 0.0433, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.803738317757009, |
|
"grad_norm": 0.3322107493877411, |
|
"learning_rate": 0.00019556814034200518, |
|
"loss": 0.0415, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.897196261682243, |
|
"grad_norm": 0.3895340859889984, |
|
"learning_rate": 0.00019510086583934643, |
|
"loss": 0.0419, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.9906542056074765, |
|
"grad_norm": 0.39366844296455383, |
|
"learning_rate": 0.0001946107925651885, |
|
"loss": 0.0358, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.0841121495327104, |
|
"grad_norm": 0.3918449282646179, |
|
"learning_rate": 0.00019409803800603904, |
|
"loss": 0.041, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.177570093457944, |
|
"grad_norm": 0.273848295211792, |
|
"learning_rate": 0.00019356272508584753, |
|
"loss": 0.0447, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.2710280373831777, |
|
"grad_norm": 0.4000929296016693, |
|
"learning_rate": 0.00019300498213653647, |
|
"loss": 0.0434, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.364485981308411, |
|
"grad_norm": 0.2600472867488861, |
|
"learning_rate": 0.00019242494286723612, |
|
"loss": 0.0429, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.457943925233645, |
|
"grad_norm": 0.29327982664108276, |
|
"learning_rate": 0.00019182274633223, |
|
"loss": 0.0362, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.5514018691588785, |
|
"grad_norm": 0.35170868039131165, |
|
"learning_rate": 0.00019119853689761912, |
|
"loss": 0.0342, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.6448598130841123, |
|
"grad_norm": 0.46894872188568115, |
|
"learning_rate": 0.00019055246420671257, |
|
"loss": 0.0364, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.7383177570093458, |
|
"grad_norm": 0.4111644923686981, |
|
"learning_rate": 0.0001898846831441533, |
|
"loss": 0.0336, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.831775700934579, |
|
"grad_norm": 0.40484148263931274, |
|
"learning_rate": 0.0001891953537987873, |
|
"loss": 0.0406, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.925233644859813, |
|
"grad_norm": 0.29574188590049744, |
|
"learning_rate": 0.00018848464142528481, |
|
"loss": 0.0344, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 4.018691588785047, |
|
"grad_norm": 0.30757296085357666, |
|
"learning_rate": 0.00018775271640452377, |
|
"loss": 0.0365, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 4.11214953271028, |
|
"grad_norm": 0.26074057817459106, |
|
"learning_rate": 0.00018699975420274358, |
|
"loss": 0.0356, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.205607476635514, |
|
"grad_norm": 0.3508523404598236, |
|
"learning_rate": 0.00018622593532948055, |
|
"loss": 0.0325, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.299065420560748, |
|
"grad_norm": 0.2975287437438965, |
|
"learning_rate": 0.0001854314452942935, |
|
"loss": 0.0321, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.392523364485982, |
|
"grad_norm": 0.30913522839546204, |
|
"learning_rate": 0.0001846164745622914, |
|
"loss": 0.0329, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.485981308411215, |
|
"grad_norm": 0.30218392610549927, |
|
"learning_rate": 0.00018378121850847249, |
|
"loss": 0.0344, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.579439252336448, |
|
"grad_norm": 0.4823051691055298, |
|
"learning_rate": 0.00018292587737088665, |
|
"loss": 0.0331, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.672897196261682, |
|
"grad_norm": 0.28159743547439575, |
|
"learning_rate": 0.00018205065620263168, |
|
"loss": 0.0374, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.766355140186916, |
|
"grad_norm": 0.36674001812934875, |
|
"learning_rate": 0.0001811557648226956, |
|
"loss": 0.036, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.859813084112149, |
|
"grad_norm": 0.31007957458496094, |
|
"learning_rate": 0.00018024141776565606, |
|
"loss": 0.0321, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.953271028037383, |
|
"grad_norm": 0.2760983109474182, |
|
"learning_rate": 0.0001793078342302495, |
|
"loss": 0.0326, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 5.046728971962617, |
|
"grad_norm": 0.23232242465019226, |
|
"learning_rate": 0.00017835523802682194, |
|
"loss": 0.032, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 5.140186915887851, |
|
"grad_norm": 0.3053811192512512, |
|
"learning_rate": 0.0001773838575236747, |
|
"loss": 0.0302, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.233644859813084, |
|
"grad_norm": 0.2696075737476349, |
|
"learning_rate": 0.0001763939255923166, |
|
"loss": 0.0325, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.327102803738318, |
|
"grad_norm": 0.28349894285202026, |
|
"learning_rate": 0.00017538567955163755, |
|
"loss": 0.0358, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.420560747663552, |
|
"grad_norm": 0.20442534983158112, |
|
"learning_rate": 0.00017435936111101518, |
|
"loss": 0.0293, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.5140186915887845, |
|
"grad_norm": 0.271004855632782, |
|
"learning_rate": 0.0001733152163123694, |
|
"loss": 0.0275, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.607476635514018, |
|
"grad_norm": 0.14870816469192505, |
|
"learning_rate": 0.00017225349547117798, |
|
"loss": 0.0315, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.700934579439252, |
|
"grad_norm": 0.3340059518814087, |
|
"learning_rate": 0.00017117445311646802, |
|
"loss": 0.029, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 5.794392523364486, |
|
"grad_norm": 0.32217147946357727, |
|
"learning_rate": 0.00017007834792979695, |
|
"loss": 0.0294, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 5.88785046728972, |
|
"grad_norm": 0.32910802960395813, |
|
"learning_rate": 0.00016896544268323792, |
|
"loss": 0.033, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 5.981308411214953, |
|
"grad_norm": 0.5195891261100769, |
|
"learning_rate": 0.00016783600417638535, |
|
"loss": 0.0334, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 6.074766355140187, |
|
"grad_norm": 0.31266453862190247, |
|
"learning_rate": 0.0001666903031723937, |
|
"loss": 0.0311, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 6.168224299065421, |
|
"grad_norm": 0.21037207543849945, |
|
"learning_rate": 0.00016552861433306726, |
|
"loss": 0.0297, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 6.261682242990654, |
|
"grad_norm": 0.20664794743061066, |
|
"learning_rate": 0.00016435121615301457, |
|
"loss": 0.0286, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 6.355140186915888, |
|
"grad_norm": 0.2566857635974884, |
|
"learning_rate": 0.00016315839089288411, |
|
"loss": 0.0229, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.4485981308411215, |
|
"grad_norm": 0.2402873933315277, |
|
"learning_rate": 0.00016195042451169753, |
|
"loss": 0.0264, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.542056074766355, |
|
"grad_norm": 0.36038416624069214, |
|
"learning_rate": 0.00016072760659829562, |
|
"loss": 0.0252, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.635514018691588, |
|
"grad_norm": 0.4320838749408722, |
|
"learning_rate": 0.00015949023030191475, |
|
"loss": 0.0293, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 6.728971962616822, |
|
"grad_norm": 0.26101937890052795, |
|
"learning_rate": 0.0001582385922619092, |
|
"loss": 0.0286, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 6.822429906542056, |
|
"grad_norm": 0.32656329870224, |
|
"learning_rate": 0.00015697299253663723, |
|
"loss": 0.0294, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 6.91588785046729, |
|
"grad_norm": 0.2876569926738739, |
|
"learning_rate": 0.00015569373453152728, |
|
"loss": 0.0309, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 7.009345794392523, |
|
"grad_norm": 0.2590012848377228, |
|
"learning_rate": 0.00015440112492634192, |
|
"loss": 0.0261, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 7.102803738317757, |
|
"grad_norm": 0.37834206223487854, |
|
"learning_rate": 0.00015309547360165647, |
|
"loss": 0.03, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 7.196261682242991, |
|
"grad_norm": 0.27962639927864075, |
|
"learning_rate": 0.00015177709356457077, |
|
"loss": 0.0297, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 7.289719626168225, |
|
"grad_norm": 0.3405240476131439, |
|
"learning_rate": 0.00015044630087367118, |
|
"loss": 0.0314, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 7.383177570093458, |
|
"grad_norm": 0.31650567054748535, |
|
"learning_rate": 0.0001491034145632609, |
|
"loss": 0.0249, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 7.4766355140186915, |
|
"grad_norm": 0.3112686276435852, |
|
"learning_rate": 0.00014774875656687727, |
|
"loss": 0.0257, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 7.570093457943925, |
|
"grad_norm": 0.23344942927360535, |
|
"learning_rate": 0.0001463826516401137, |
|
"loss": 0.0245, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 7.663551401869158, |
|
"grad_norm": 0.1889248788356781, |
|
"learning_rate": 0.00014500542728276518, |
|
"loss": 0.0229, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 7.757009345794392, |
|
"grad_norm": 0.24938005208969116, |
|
"learning_rate": 0.00014361741366031603, |
|
"loss": 0.0246, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 7.850467289719626, |
|
"grad_norm": 0.22689329087734222, |
|
"learning_rate": 0.0001422189435247884, |
|
"loss": 0.0251, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 7.94392523364486, |
|
"grad_norm": 0.28100717067718506, |
|
"learning_rate": 0.00014081035213497082, |
|
"loss": 0.0277, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 8.037383177570094, |
|
"grad_norm": 0.2773335874080658, |
|
"learning_rate": 0.0001393919771760457, |
|
"loss": 0.027, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 8.130841121495328, |
|
"grad_norm": 0.2691657841205597, |
|
"learning_rate": 0.00013796415867863528, |
|
"loss": 0.0239, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 8.22429906542056, |
|
"grad_norm": 0.4514601230621338, |
|
"learning_rate": 0.00013652723893728532, |
|
"loss": 0.029, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 8.317757009345794, |
|
"grad_norm": 0.3693794012069702, |
|
"learning_rate": 0.00013508156242840566, |
|
"loss": 0.0251, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 8.411214953271028, |
|
"grad_norm": 0.2953411042690277, |
|
"learning_rate": 0.00013362747572768832, |
|
"loss": 0.0284, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 8.504672897196262, |
|
"grad_norm": 0.25673815608024597, |
|
"learning_rate": 0.0001321653274270219, |
|
"loss": 0.0251, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 8.598130841121495, |
|
"grad_norm": 0.2374838888645172, |
|
"learning_rate": 0.00013069546805092276, |
|
"loss": 0.0211, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 8.69158878504673, |
|
"grad_norm": 0.2258038967847824, |
|
"learning_rate": 0.00012921824997250294, |
|
"loss": 0.0235, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 8.785046728971963, |
|
"grad_norm": 0.35665759444236755, |
|
"learning_rate": 0.00012773402732899481, |
|
"loss": 0.0262, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 8.878504672897197, |
|
"grad_norm": 0.2746776342391968, |
|
"learning_rate": 0.00012624315593685282, |
|
"loss": 0.0228, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 8.97196261682243, |
|
"grad_norm": 0.1950758695602417, |
|
"learning_rate": 0.0001247459932064529, |
|
"loss": 0.028, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 9.065420560747663, |
|
"grad_norm": 0.25761884450912476, |
|
"learning_rate": 0.00012324289805640934, |
|
"loss": 0.0256, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 9.158878504672897, |
|
"grad_norm": 0.24331067502498627, |
|
"learning_rate": 0.00012173423082753041, |
|
"loss": 0.0226, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 9.25233644859813, |
|
"grad_norm": 0.2336030751466751, |
|
"learning_rate": 0.00012022035319643305, |
|
"loss": 0.024, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 9.345794392523365, |
|
"grad_norm": 0.26760831475257874, |
|
"learning_rate": 0.00011870162808883703, |
|
"loss": 0.0231, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 9.439252336448599, |
|
"grad_norm": 0.22399088740348816, |
|
"learning_rate": 0.00011717841959255999, |
|
"loss": 0.0212, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 9.532710280373832, |
|
"grad_norm": 0.13075126707553864, |
|
"learning_rate": 0.0001156510928702337, |
|
"loss": 0.027, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 9.626168224299064, |
|
"grad_norm": 0.19196636974811554, |
|
"learning_rate": 0.00011412001407176265, |
|
"loss": 0.0213, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 9.719626168224298, |
|
"grad_norm": 0.24825584888458252, |
|
"learning_rate": 0.00011258555024654612, |
|
"loss": 0.02, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 9.813084112149532, |
|
"grad_norm": 0.20732861757278442, |
|
"learning_rate": 0.00011104806925548435, |
|
"loss": 0.0209, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 9.906542056074766, |
|
"grad_norm": 0.15755069255828857, |
|
"learning_rate": 0.00010950793968279045, |
|
"loss": 0.0198, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.261300265789032, |
|
"learning_rate": 0.00010796553074762867, |
|
"loss": 0.0228, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 10.093457943925234, |
|
"grad_norm": 0.20064909756183624, |
|
"learning_rate": 0.00010642121221560066, |
|
"loss": 0.0237, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 10.186915887850468, |
|
"grad_norm": 0.2862524092197418, |
|
"learning_rate": 0.00010487535431010049, |
|
"loss": 0.0189, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 10.280373831775702, |
|
"grad_norm": 0.2752748727798462, |
|
"learning_rate": 0.0001033283276235602, |
|
"loss": 0.0231, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 10.373831775700934, |
|
"grad_norm": 0.1559881865978241, |
|
"learning_rate": 0.00010178050302860662, |
|
"loss": 0.017, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 10.467289719626168, |
|
"grad_norm": 0.16235685348510742, |
|
"learning_rate": 0.00010023225158915119, |
|
"loss": 0.0214, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 10.560747663551401, |
|
"grad_norm": 0.26877355575561523, |
|
"learning_rate": 9.868394447143394e-05, |
|
"loss": 0.0221, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 10.654205607476635, |
|
"grad_norm": 0.26213476061820984, |
|
"learning_rate": 9.713595285504266e-05, |
|
"loss": 0.0218, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 10.74766355140187, |
|
"grad_norm": 0.2717665433883667, |
|
"learning_rate": 9.558864784392932e-05, |
|
"loss": 0.0203, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 10.841121495327103, |
|
"grad_norm": 0.29337963461875916, |
|
"learning_rate": 9.404240037744418e-05, |
|
"loss": 0.0208, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 10.934579439252337, |
|
"grad_norm": 0.18261757493019104, |
|
"learning_rate": 9.249758114140977e-05, |
|
"loss": 0.0237, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 11.02803738317757, |
|
"grad_norm": 0.18363697826862335, |
|
"learning_rate": 9.095456047925517e-05, |
|
"loss": 0.0202, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 11.121495327102803, |
|
"grad_norm": 0.147624671459198, |
|
"learning_rate": 8.941370830323286e-05, |
|
"loss": 0.0177, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 11.214953271028037, |
|
"grad_norm": 0.19984780251979828, |
|
"learning_rate": 8.787539400573867e-05, |
|
"loss": 0.0208, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 11.30841121495327, |
|
"grad_norm": 0.2558623254299164, |
|
"learning_rate": 8.633998637075634e-05, |
|
"loss": 0.0187, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 11.401869158878505, |
|
"grad_norm": 0.15088629722595215, |
|
"learning_rate": 8.480785348544802e-05, |
|
"loss": 0.0191, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 11.495327102803738, |
|
"grad_norm": 0.15472617745399475, |
|
"learning_rate": 8.32793626519119e-05, |
|
"loss": 0.0174, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 11.588785046728972, |
|
"grad_norm": 0.1284412145614624, |
|
"learning_rate": 8.175488029912783e-05, |
|
"loss": 0.0183, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 11.682242990654206, |
|
"grad_norm": 0.2205333113670349, |
|
"learning_rate": 8.02347718951124e-05, |
|
"loss": 0.0179, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 11.77570093457944, |
|
"grad_norm": 0.2073797583580017, |
|
"learning_rate": 7.871940185930438e-05, |
|
"loss": 0.023, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 11.869158878504672, |
|
"grad_norm": 0.22515133023262024, |
|
"learning_rate": 7.720913347520154e-05, |
|
"loss": 0.0178, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 11.962616822429906, |
|
"grad_norm": 0.18330000340938568, |
|
"learning_rate": 7.57043288032698e-05, |
|
"loss": 0.0206, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 12.05607476635514, |
|
"grad_norm": 0.19882416725158691, |
|
"learning_rate": 7.420534859414542e-05, |
|
"loss": 0.0186, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 12.149532710280374, |
|
"grad_norm": 0.4009910821914673, |
|
"learning_rate": 7.271255220215159e-05, |
|
"loss": 0.0203, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 12.242990654205608, |
|
"grad_norm": 0.21823973953723907, |
|
"learning_rate": 7.122629749914917e-05, |
|
"loss": 0.0183, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 12.336448598130842, |
|
"grad_norm": 0.17465481162071228, |
|
"learning_rate": 6.974694078874344e-05, |
|
"loss": 0.0164, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 12.429906542056075, |
|
"grad_norm": 0.23976965248584747, |
|
"learning_rate": 6.827483672086649e-05, |
|
"loss": 0.0228, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 12.523364485981308, |
|
"grad_norm": 0.17434823513031006, |
|
"learning_rate": 6.681033820675596e-05, |
|
"loss": 0.0185, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 12.616822429906541, |
|
"grad_norm": 0.21936717629432678, |
|
"learning_rate": 6.535379633435082e-05, |
|
"loss": 0.019, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 12.710280373831775, |
|
"grad_norm": 0.20729029178619385, |
|
"learning_rate": 6.390556028412421e-05, |
|
"loss": 0.0187, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 12.80373831775701, |
|
"grad_norm": 0.20753750205039978, |
|
"learning_rate": 6.246597724537342e-05, |
|
"loss": 0.0182, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 12.897196261682243, |
|
"grad_norm": 0.17846998572349548, |
|
"learning_rate": 6.10353923329875e-05, |
|
"loss": 0.0158, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 12.990654205607477, |
|
"grad_norm": 0.21448595821857452, |
|
"learning_rate": 5.961414850471172e-05, |
|
"loss": 0.0159, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 13.08411214953271, |
|
"grad_norm": 0.21095584332942963, |
|
"learning_rate": 5.8202586478929646e-05, |
|
"loss": 0.0169, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 13.177570093457945, |
|
"grad_norm": 0.2717122435569763, |
|
"learning_rate": 5.680104465298186e-05, |
|
"loss": 0.0163, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 13.271028037383177, |
|
"grad_norm": 0.15730348229408264, |
|
"learning_rate": 5.540985902204099e-05, |
|
"loss": 0.0154, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 13.36448598130841, |
|
"grad_norm": 0.2034991979598999, |
|
"learning_rate": 5.4029363098562794e-05, |
|
"loss": 0.0181, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 13.457943925233645, |
|
"grad_norm": 0.1877489984035492, |
|
"learning_rate": 5.2659887832332424e-05, |
|
"loss": 0.0149, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 13.551401869158878, |
|
"grad_norm": 0.25242817401885986, |
|
"learning_rate": 5.130176153112485e-05, |
|
"loss": 0.0162, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 13.644859813084112, |
|
"grad_norm": 0.3759627342224121, |
|
"learning_rate": 4.9955309781999096e-05, |
|
"loss": 0.0205, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 13.738317757009346, |
|
"grad_norm": 0.1744040697813034, |
|
"learning_rate": 4.8620855373244056e-05, |
|
"loss": 0.0158, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 13.83177570093458, |
|
"grad_norm": 0.2127053588628769, |
|
"learning_rate": 4.729871821699587e-05, |
|
"loss": 0.0181, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 13.925233644859812, |
|
"grad_norm": 0.16114765405654907, |
|
"learning_rate": 4.5989215272544605e-05, |
|
"loss": 0.0166, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 14.018691588785046, |
|
"grad_norm": 0.16290584206581116, |
|
"learning_rate": 4.469266047034861e-05, |
|
"loss": 0.0157, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 14.11214953271028, |
|
"grad_norm": 0.29187917709350586, |
|
"learning_rate": 4.3409364636775415e-05, |
|
"loss": 0.0172, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 14.205607476635514, |
|
"grad_norm": 0.2521297335624695, |
|
"learning_rate": 4.213963541958631e-05, |
|
"loss": 0.0147, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 14.299065420560748, |
|
"grad_norm": 0.2301267385482788, |
|
"learning_rate": 4.0883777214183306e-05, |
|
"loss": 0.0172, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 14.392523364485982, |
|
"grad_norm": 0.20023106038570404, |
|
"learning_rate": 3.96420910906356e-05, |
|
"loss": 0.0178, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 14.485981308411215, |
|
"grad_norm": 0.20669516921043396, |
|
"learning_rate": 3.8414874721503294e-05, |
|
"loss": 0.016, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 14.57943925233645, |
|
"grad_norm": 0.22793950140476227, |
|
"learning_rate": 3.720242231047568e-05, |
|
"loss": 0.0168, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 14.672897196261681, |
|
"grad_norm": 0.21381746232509613, |
|
"learning_rate": 3.6005024521840916e-05, |
|
"loss": 0.0156, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 14.766355140186915, |
|
"grad_norm": 0.20497943460941315, |
|
"learning_rate": 3.482296841080433e-05, |
|
"loss": 0.0152, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 14.85981308411215, |
|
"grad_norm": 0.2997715473175049, |
|
"learning_rate": 3.365653735467217e-05, |
|
"loss": 0.0159, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 14.953271028037383, |
|
"grad_norm": 0.1698296070098877, |
|
"learning_rate": 3.2506010984916505e-05, |
|
"loss": 0.0153, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 15.046728971962617, |
|
"grad_norm": 0.13616669178009033, |
|
"learning_rate": 3.137166512013882e-05, |
|
"loss": 0.0155, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 15.14018691588785, |
|
"grad_norm": 0.2119993269443512, |
|
"learning_rate": 3.025377169994702e-05, |
|
"loss": 0.013, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 15.233644859813085, |
|
"grad_norm": 0.18577918410301208, |
|
"learning_rate": 2.9152598719762846e-05, |
|
"loss": 0.0135, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 15.327102803738319, |
|
"grad_norm": 0.16220198571681976, |
|
"learning_rate": 2.8068410166574566e-05, |
|
"loss": 0.015, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 15.42056074766355, |
|
"grad_norm": 0.3275705575942993, |
|
"learning_rate": 2.7001465955650774e-05, |
|
"loss": 0.0136, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 15.514018691588785, |
|
"grad_norm": 0.14544951915740967, |
|
"learning_rate": 2.5952021868230135e-05, |
|
"loss": 0.013, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 15.607476635514018, |
|
"grad_norm": 0.1696663796901703, |
|
"learning_rate": 2.492032949020249e-05, |
|
"loss": 0.0135, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 15.700934579439252, |
|
"grad_norm": 0.22859309613704681, |
|
"learning_rate": 2.390663615179528e-05, |
|
"loss": 0.0145, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 15.794392523364486, |
|
"grad_norm": 0.16665244102478027, |
|
"learning_rate": 2.291118486828071e-05, |
|
"loss": 0.0188, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 15.88785046728972, |
|
"grad_norm": 0.2959059774875641, |
|
"learning_rate": 2.193421428171685e-05, |
|
"loss": 0.0176, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 15.981308411214954, |
|
"grad_norm": 0.2710016071796417, |
|
"learning_rate": 2.0975958603737477e-05, |
|
"loss": 0.0159, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 16.074766355140188, |
|
"grad_norm": 0.18647173047065735, |
|
"learning_rate": 2.0036647559403997e-05, |
|
"loss": 0.0137, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 16.16822429906542, |
|
"grad_norm": 0.17502279579639435, |
|
"learning_rate": 1.9116506332132734e-05, |
|
"loss": 0.0149, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 16.261682242990656, |
|
"grad_norm": 0.24166816473007202, |
|
"learning_rate": 1.8215755509711196e-05, |
|
"loss": 0.0152, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 16.35514018691589, |
|
"grad_norm": 0.1839790791273117, |
|
"learning_rate": 1.7334611031416047e-05, |
|
"loss": 0.0144, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 16.44859813084112, |
|
"grad_norm": 0.20211663842201233, |
|
"learning_rate": 1.647328413624537e-05, |
|
"loss": 0.0149, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 16.542056074766354, |
|
"grad_norm": 0.14310133457183838, |
|
"learning_rate": 1.5631981312277886e-05, |
|
"loss": 0.0138, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 16.635514018691588, |
|
"grad_norm": 0.1992831528186798, |
|
"learning_rate": 1.4810904247170953e-05, |
|
"loss": 0.0145, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 16.72897196261682, |
|
"grad_norm": 0.20767998695373535, |
|
"learning_rate": 1.4010249779809503e-05, |
|
"loss": 0.0142, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 16.822429906542055, |
|
"grad_norm": 0.11333203315734863, |
|
"learning_rate": 1.3230209853117448e-05, |
|
"loss": 0.0144, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 16.91588785046729, |
|
"grad_norm": 0.10632842034101486, |
|
"learning_rate": 1.2470971468042558e-05, |
|
"loss": 0.0139, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 17.009345794392523, |
|
"grad_norm": 0.18203343451023102, |
|
"learning_rate": 1.1732716638726382e-05, |
|
"loss": 0.014, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 17.102803738317757, |
|
"grad_norm": 0.21806679666042328, |
|
"learning_rate": 1.1015622348869491e-05, |
|
"loss": 0.0129, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 17.19626168224299, |
|
"grad_norm": 0.19995306432247162, |
|
"learning_rate": 1.0319860509302637e-05, |
|
"loss": 0.0131, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 17.289719626168225, |
|
"grad_norm": 0.0872287005186081, |
|
"learning_rate": 9.64559791677434e-06, |
|
"loss": 0.0153, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 17.38317757009346, |
|
"grad_norm": 0.13513228297233582, |
|
"learning_rate": 8.992996213964067e-06, |
|
"loss": 0.0158, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 17.476635514018692, |
|
"grad_norm": 0.1807233691215515, |
|
"learning_rate": 8.362211850731382e-06, |
|
"loss": 0.0113, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 17.570093457943926, |
|
"grad_norm": 0.19154734909534454, |
|
"learning_rate": 7.75339604660973e-06, |
|
"loss": 0.0161, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 17.66355140186916, |
|
"grad_norm": 0.2096325159072876, |
|
"learning_rate": 7.166694754554193e-06, |
|
"loss": 0.016, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 17.757009345794394, |
|
"grad_norm": 0.16648273169994354, |
|
"learning_rate": 6.602248625951934e-06, |
|
"loss": 0.013, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 17.850467289719624, |
|
"grad_norm": 0.19170570373535156, |
|
"learning_rate": 6.0601929769033115e-06, |
|
"loss": 0.0116, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 17.94392523364486, |
|
"grad_norm": 0.14835980534553528, |
|
"learning_rate": 5.540657755782341e-06, |
|
"loss": 0.0136, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 18.037383177570092, |
|
"grad_norm": 0.10613177716732025, |
|
"learning_rate": 5.043767512083797e-06, |
|
"loss": 0.012, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 18.130841121495326, |
|
"grad_norm": 0.18721149861812592, |
|
"learning_rate": 4.569641366564614e-06, |
|
"loss": 0.0153, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 18.22429906542056, |
|
"grad_norm": 0.17616799473762512, |
|
"learning_rate": 4.1183929826868476e-06, |
|
"loss": 0.0115, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 18.317757009345794, |
|
"grad_norm": 0.2676970064640045, |
|
"learning_rate": 3.6901305393687634e-06, |
|
"loss": 0.0113, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 18.411214953271028, |
|
"grad_norm": 0.10376067459583282, |
|
"learning_rate": 3.284956705050868e-06, |
|
"loss": 0.0122, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 18.50467289719626, |
|
"grad_norm": 0.12165194004774094, |
|
"learning_rate": 2.902968613082957e-06, |
|
"loss": 0.0134, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 18.598130841121495, |
|
"grad_norm": 0.15639284253120422, |
|
"learning_rate": 2.5442578384380245e-06, |
|
"loss": 0.0115, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 18.69158878504673, |
|
"grad_norm": 0.14054954051971436, |
|
"learning_rate": 2.208910375758888e-06, |
|
"loss": 0.0111, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 18.785046728971963, |
|
"grad_norm": 0.13985030353069305, |
|
"learning_rate": 1.8970066187423364e-06, |
|
"loss": 0.0115, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 18.878504672897197, |
|
"grad_norm": 0.12971541285514832, |
|
"learning_rate": 1.6086213408662232e-06, |
|
"loss": 0.0117, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 18.97196261682243, |
|
"grad_norm": 0.17401276528835297, |
|
"learning_rate": 1.343823677463818e-06, |
|
"loss": 0.0139, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 19.065420560747665, |
|
"grad_norm": 0.24669478833675385, |
|
"learning_rate": 1.1026771091498079e-06, |
|
"loss": 0.0143, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 19.1588785046729, |
|
"grad_norm": 0.1944689005613327, |
|
"learning_rate": 8.852394466019487e-07, |
|
"loss": 0.0116, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 19.252336448598133, |
|
"grad_norm": 0.15337271988391876, |
|
"learning_rate": 6.915628167019628e-07, |
|
"loss": 0.0126, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 19.345794392523363, |
|
"grad_norm": 0.22086739540100098, |
|
"learning_rate": 5.216936500389679e-07, |
|
"loss": 0.0123, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 19.439252336448597, |
|
"grad_norm": 0.10731634497642517, |
|
"learning_rate": 3.7567266977868165e-07, |
|
"loss": 0.0117, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 19.53271028037383, |
|
"grad_norm": 0.16525092720985413, |
|
"learning_rate": 2.535348819006078e-07, |
|
"loss": 0.0142, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 19.626168224299064, |
|
"grad_norm": 0.22534893453121185, |
|
"learning_rate": 1.5530956680606114e-07, |
|
"loss": 0.0136, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 19.7196261682243, |
|
"grad_norm": 0.14636439085006714, |
|
"learning_rate": 8.102027229868192e-08, |
|
"loss": 0.0098, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 19.813084112149532, |
|
"grad_norm": 0.1717785745859146, |
|
"learning_rate": 3.0684807939262985e-08, |
|
"loss": 0.0105, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 19.906542056074766, |
|
"grad_norm": 0.15287938714027405, |
|
"learning_rate": 4.315240776175955e-09, |
|
"loss": 0.0135, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 19.962616822429908, |
|
"step": 2136, |
|
"total_flos": 3.092595209923816e+17, |
|
"train_loss": 0.03558622600950217, |
|
"train_runtime": 2455.8794, |
|
"train_samples_per_second": 55.664, |
|
"train_steps_per_second": 0.87 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2136, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.092595209923816e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|