|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.87323943661972, |
|
"eval_steps": 500, |
|
"global_step": 2822, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.07042253521126761, |
|
"grad_norm": 5.301488399505615, |
|
"learning_rate": 1.4084507042253523e-05, |
|
"loss": 0.9444, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14084507042253522, |
|
"grad_norm": 2.110762119293213, |
|
"learning_rate": 2.8169014084507046e-05, |
|
"loss": 0.4977, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2112676056338028, |
|
"grad_norm": 1.7739307880401611, |
|
"learning_rate": 4.225352112676056e-05, |
|
"loss": 0.296, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.28169014084507044, |
|
"grad_norm": 1.6081597805023193, |
|
"learning_rate": 5.633802816901409e-05, |
|
"loss": 0.2143, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.352112676056338, |
|
"grad_norm": 1.9928644895553589, |
|
"learning_rate": 7.042253521126761e-05, |
|
"loss": 0.1679, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4225352112676056, |
|
"grad_norm": 0.8647320866584778, |
|
"learning_rate": 8.450704225352113e-05, |
|
"loss": 0.146, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.49295774647887325, |
|
"grad_norm": 0.8146809339523315, |
|
"learning_rate": 9.859154929577466e-05, |
|
"loss": 0.1407, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 0.9443349838256836, |
|
"learning_rate": 0.00011267605633802819, |
|
"loss": 0.1268, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6338028169014085, |
|
"grad_norm": 1.322043776512146, |
|
"learning_rate": 0.0001267605633802817, |
|
"loss": 0.1215, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.704225352112676, |
|
"grad_norm": 0.9595595598220825, |
|
"learning_rate": 0.00014084507042253522, |
|
"loss": 0.1024, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7746478873239436, |
|
"grad_norm": 0.743903636932373, |
|
"learning_rate": 0.00015492957746478874, |
|
"loss": 0.1012, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.8450704225352113, |
|
"grad_norm": 0.8585343360900879, |
|
"learning_rate": 0.00016901408450704225, |
|
"loss": 0.0885, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.9154929577464789, |
|
"grad_norm": 0.8720231056213379, |
|
"learning_rate": 0.0001830985915492958, |
|
"loss": 0.082, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.9859154929577465, |
|
"grad_norm": 0.7080922722816467, |
|
"learning_rate": 0.0001971830985915493, |
|
"loss": 0.0861, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.056338028169014, |
|
"grad_norm": 0.9267153143882751, |
|
"learning_rate": 0.00019999560279257314, |
|
"loss": 0.089, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.1267605633802817, |
|
"grad_norm": 0.5979682803153992, |
|
"learning_rate": 0.0001999777398001667, |
|
"loss": 0.0797, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.1971830985915493, |
|
"grad_norm": 0.4289480447769165, |
|
"learning_rate": 0.00019994613865002077, |
|
"loss": 0.0684, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.267605633802817, |
|
"grad_norm": 0.41163209080696106, |
|
"learning_rate": 0.00019990080368451776, |
|
"loss": 0.0644, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.3380281690140845, |
|
"grad_norm": 0.5105245113372803, |
|
"learning_rate": 0.00019984174113323353, |
|
"loss": 0.0668, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.408450704225352, |
|
"grad_norm": 0.48342588543891907, |
|
"learning_rate": 0.00019976895911208109, |
|
"loss": 0.0607, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.4788732394366197, |
|
"grad_norm": 0.7470546960830688, |
|
"learning_rate": 0.00019968246762219565, |
|
"loss": 0.0624, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.5492957746478875, |
|
"grad_norm": 0.6888002157211304, |
|
"learning_rate": 0.0001995822785485601, |
|
"loss": 0.0624, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.619718309859155, |
|
"grad_norm": 0.5235802531242371, |
|
"learning_rate": 0.00019946840565837203, |
|
"loss": 0.0597, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.6901408450704225, |
|
"grad_norm": 0.5001630783081055, |
|
"learning_rate": 0.0001993408645991519, |
|
"loss": 0.0601, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.76056338028169, |
|
"grad_norm": 2.6329758167266846, |
|
"learning_rate": 0.00019919967289659286, |
|
"loss": 0.0538, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.8309859154929577, |
|
"grad_norm": 0.5016240477561951, |
|
"learning_rate": 0.00019904484995215247, |
|
"loss": 0.0554, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.9014084507042255, |
|
"grad_norm": 0.4060329496860504, |
|
"learning_rate": 0.00019887641704038688, |
|
"loss": 0.0573, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.971830985915493, |
|
"grad_norm": 0.28704750537872314, |
|
"learning_rate": 0.00019869439730602726, |
|
"loss": 0.0544, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.0422535211267605, |
|
"grad_norm": 0.47512659430503845, |
|
"learning_rate": 0.00019849881576079946, |
|
"loss": 0.0539, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.112676056338028, |
|
"grad_norm": 0.3640986382961273, |
|
"learning_rate": 0.00019828969927998714, |
|
"loss": 0.0519, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.183098591549296, |
|
"grad_norm": 0.3687652349472046, |
|
"learning_rate": 0.00019806707659873887, |
|
"loss": 0.0539, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.2535211267605635, |
|
"grad_norm": 0.260812908411026, |
|
"learning_rate": 0.00019783097830811935, |
|
"loss": 0.0467, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.323943661971831, |
|
"grad_norm": 0.34940680861473083, |
|
"learning_rate": 0.00019758143685090599, |
|
"loss": 0.0467, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.3943661971830985, |
|
"grad_norm": 0.42112305760383606, |
|
"learning_rate": 0.00019731848651713085, |
|
"loss": 0.0542, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.464788732394366, |
|
"grad_norm": 0.347625195980072, |
|
"learning_rate": 0.00019704216343936873, |
|
"loss": 0.0459, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.535211267605634, |
|
"grad_norm": 0.4011363685131073, |
|
"learning_rate": 0.00019675250558777219, |
|
"loss": 0.0522, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.6056338028169015, |
|
"grad_norm": 0.9218616485595703, |
|
"learning_rate": 0.00019644955276485387, |
|
"loss": 0.0423, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.676056338028169, |
|
"grad_norm": 0.5084503889083862, |
|
"learning_rate": 0.00019613334660001724, |
|
"loss": 0.0439, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.7464788732394365, |
|
"grad_norm": 0.3616188168525696, |
|
"learning_rate": 0.00019580393054383622, |
|
"loss": 0.0489, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.816901408450704, |
|
"grad_norm": 0.43031033873558044, |
|
"learning_rate": 0.0001954613498620845, |
|
"loss": 0.0441, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.887323943661972, |
|
"grad_norm": 0.4344836175441742, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 0.0477, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.9577464788732395, |
|
"grad_norm": 0.34272074699401855, |
|
"learning_rate": 0.00019473688472339336, |
|
"loss": 0.0443, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.028169014084507, |
|
"grad_norm": 0.3591594099998474, |
|
"learning_rate": 0.00019435509981677762, |
|
"loss": 0.0454, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.0985915492957745, |
|
"grad_norm": 0.2543060779571533, |
|
"learning_rate": 0.00019396034937155897, |
|
"loss": 0.0458, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.169014084507042, |
|
"grad_norm": 0.32577958703041077, |
|
"learning_rate": 0.00019355268763125095, |
|
"loss": 0.044, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.23943661971831, |
|
"grad_norm": 0.42566004395484924, |
|
"learning_rate": 0.00019313217061353615, |
|
"loss": 0.047, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.3098591549295775, |
|
"grad_norm": 0.4882064163684845, |
|
"learning_rate": 0.00019269885610256865, |
|
"loss": 0.0429, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.380281690140845, |
|
"grad_norm": 0.4627586901187897, |
|
"learning_rate": 0.00019225280364103383, |
|
"loss": 0.0447, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.4507042253521125, |
|
"grad_norm": 0.4817451536655426, |
|
"learning_rate": 0.00019179407452196636, |
|
"loss": 0.0404, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.52112676056338, |
|
"grad_norm": 0.3949111998081207, |
|
"learning_rate": 0.00019132273178032794, |
|
"loss": 0.0428, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.591549295774648, |
|
"grad_norm": 0.22799795866012573, |
|
"learning_rate": 0.00019083884018434547, |
|
"loss": 0.036, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.6619718309859155, |
|
"grad_norm": 0.3933974802494049, |
|
"learning_rate": 0.000190342466226611, |
|
"loss": 0.0386, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.732394366197183, |
|
"grad_norm": 0.8060548305511475, |
|
"learning_rate": 0.0001898336781149451, |
|
"loss": 0.0374, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.802816901408451, |
|
"grad_norm": 0.4396229684352875, |
|
"learning_rate": 0.000189312545763024, |
|
"loss": 0.0444, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.873239436619718, |
|
"grad_norm": 0.28071898221969604, |
|
"learning_rate": 0.0001887791407807728, |
|
"loss": 0.0351, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.943661971830986, |
|
"grad_norm": 1.3314933776855469, |
|
"learning_rate": 0.00018823353646452517, |
|
"loss": 0.0378, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.014084507042254, |
|
"grad_norm": 0.4084491431713104, |
|
"learning_rate": 0.00018767580778695185, |
|
"loss": 0.0373, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.084507042253521, |
|
"grad_norm": 0.3445165753364563, |
|
"learning_rate": 0.00018710603138675827, |
|
"loss": 0.043, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.154929577464789, |
|
"grad_norm": 0.3539944887161255, |
|
"learning_rate": 0.0001865242855581534, |
|
"loss": 0.0395, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.225352112676056, |
|
"grad_norm": 0.3427730202674866, |
|
"learning_rate": 0.00018593065024009146, |
|
"loss": 0.0396, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.295774647887324, |
|
"grad_norm": 0.5173912644386292, |
|
"learning_rate": 0.00018532520700528712, |
|
"loss": 0.0388, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.366197183098592, |
|
"grad_norm": 0.302499383687973, |
|
"learning_rate": 0.00018470803904900633, |
|
"loss": 0.0391, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.436619718309859, |
|
"grad_norm": 0.37678262591362, |
|
"learning_rate": 0.00018407923117763462, |
|
"loss": 0.0402, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.507042253521127, |
|
"grad_norm": 0.4250379800796509, |
|
"learning_rate": 0.00018343886979702327, |
|
"loss": 0.0403, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.577464788732394, |
|
"grad_norm": 0.30737724900245667, |
|
"learning_rate": 0.00018278704290061645, |
|
"loss": 0.042, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.647887323943662, |
|
"grad_norm": 0.3090226650238037, |
|
"learning_rate": 0.0001821238400573595, |
|
"loss": 0.0355, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.71830985915493, |
|
"grad_norm": 0.3264664113521576, |
|
"learning_rate": 0.00018144935239939144, |
|
"loss": 0.0349, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.788732394366197, |
|
"grad_norm": 0.5748451352119446, |
|
"learning_rate": 0.00018076367260952202, |
|
"loss": 0.033, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.859154929577465, |
|
"grad_norm": 0.28208673000335693, |
|
"learning_rate": 0.0001800668949084961, |
|
"loss": 0.0341, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.929577464788732, |
|
"grad_norm": 0.41251471638679504, |
|
"learning_rate": 0.00017935911504204645, |
|
"loss": 0.0355, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.48121050000190735, |
|
"learning_rate": 0.0001786404302677374, |
|
"loss": 0.0316, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.070422535211268, |
|
"grad_norm": 0.45240846276283264, |
|
"learning_rate": 0.0001779109393416001, |
|
"loss": 0.0391, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.140845070422535, |
|
"grad_norm": 0.6922265291213989, |
|
"learning_rate": 0.0001771707425045626, |
|
"loss": 0.0375, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.211267605633803, |
|
"grad_norm": 0.3153751492500305, |
|
"learning_rate": 0.00017641994146867516, |
|
"loss": 0.0372, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.28169014084507, |
|
"grad_norm": 0.5754887461662292, |
|
"learning_rate": 0.00017565863940313415, |
|
"loss": 0.0389, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.352112676056338, |
|
"grad_norm": 0.5472623109817505, |
|
"learning_rate": 0.00017488694092010506, |
|
"loss": 0.0431, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 5.422535211267606, |
|
"grad_norm": 0.370579332113266, |
|
"learning_rate": 0.0001741049520603476, |
|
"loss": 0.0367, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 5.492957746478873, |
|
"grad_norm": 0.27683746814727783, |
|
"learning_rate": 0.00017331278027864455, |
|
"loss": 0.0364, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 5.563380281690141, |
|
"grad_norm": 0.2768910527229309, |
|
"learning_rate": 0.00017251053442903595, |
|
"loss": 0.032, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 5.633802816901408, |
|
"grad_norm": 0.30943918228149414, |
|
"learning_rate": 0.00017169832474986136, |
|
"loss": 0.0321, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 5.704225352112676, |
|
"grad_norm": 0.36983174085617065, |
|
"learning_rate": 0.0001708762628486117, |
|
"loss": 0.0358, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 5.774647887323944, |
|
"grad_norm": 0.35293862223625183, |
|
"learning_rate": 0.0001700444616865931, |
|
"loss": 0.0312, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 5.845070422535211, |
|
"grad_norm": 0.3581668436527252, |
|
"learning_rate": 0.0001692030355634046, |
|
"loss": 0.0359, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 5.915492957746479, |
|
"grad_norm": 0.22088691592216492, |
|
"learning_rate": 0.00016835210010123205, |
|
"loss": 0.031, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 5.985915492957746, |
|
"grad_norm": 0.26273787021636963, |
|
"learning_rate": 0.00016749177222896003, |
|
"loss": 0.0321, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 6.056338028169014, |
|
"grad_norm": 0.2742927670478821, |
|
"learning_rate": 0.00016662217016610465, |
|
"loss": 0.0287, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 6.126760563380282, |
|
"grad_norm": 0.25638335943222046, |
|
"learning_rate": 0.0001657434134065686, |
|
"loss": 0.0334, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 6.197183098591549, |
|
"grad_norm": 0.39665618538856506, |
|
"learning_rate": 0.00016485562270222124, |
|
"loss": 0.0339, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 6.267605633802817, |
|
"grad_norm": 0.2293689101934433, |
|
"learning_rate": 0.00016395892004630587, |
|
"loss": 0.0318, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 6.338028169014084, |
|
"grad_norm": 0.2861061692237854, |
|
"learning_rate": 0.00016305342865667632, |
|
"loss": 0.0331, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.408450704225352, |
|
"grad_norm": 0.43932482600212097, |
|
"learning_rate": 0.00016213927295886547, |
|
"loss": 0.0342, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 6.47887323943662, |
|
"grad_norm": 0.5310227274894714, |
|
"learning_rate": 0.0001612165785689874, |
|
"loss": 0.0321, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 6.549295774647887, |
|
"grad_norm": 0.2671055793762207, |
|
"learning_rate": 0.00016028547227647647, |
|
"loss": 0.0316, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 6.619718309859155, |
|
"grad_norm": 0.36509400606155396, |
|
"learning_rate": 0.0001593460820266647, |
|
"loss": 0.0357, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 6.690140845070422, |
|
"grad_norm": 0.2598735988140106, |
|
"learning_rate": 0.00015839853690320074, |
|
"loss": 0.0335, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 6.76056338028169, |
|
"grad_norm": 0.28324615955352783, |
|
"learning_rate": 0.00015744296711031201, |
|
"loss": 0.0351, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 6.830985915492958, |
|
"grad_norm": 0.2627483010292053, |
|
"learning_rate": 0.0001564795039549131, |
|
"loss": 0.0332, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 6.901408450704225, |
|
"grad_norm": 0.3434940278530121, |
|
"learning_rate": 0.00015550827982856275, |
|
"loss": 0.0296, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 6.971830985915493, |
|
"grad_norm": 0.2355423867702484, |
|
"learning_rate": 0.00015452942818927143, |
|
"loss": 0.0327, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 7.042253521126761, |
|
"grad_norm": 0.7167981266975403, |
|
"learning_rate": 0.00015354308354316272, |
|
"loss": 0.0365, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 7.112676056338028, |
|
"grad_norm": 0.33783429861068726, |
|
"learning_rate": 0.00015254938142599046, |
|
"loss": 0.0287, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 7.183098591549296, |
|
"grad_norm": 0.23960083723068237, |
|
"learning_rate": 0.0001515484583845146, |
|
"loss": 0.0333, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 7.253521126760563, |
|
"grad_norm": 0.15994220972061157, |
|
"learning_rate": 0.0001505404519577379, |
|
"loss": 0.0316, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 7.323943661971831, |
|
"grad_norm": 0.5600018501281738, |
|
"learning_rate": 0.00014952550065800633, |
|
"loss": 0.0262, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 7.394366197183099, |
|
"grad_norm": 0.2552376687526703, |
|
"learning_rate": 0.00014850374395197603, |
|
"loss": 0.0268, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 7.464788732394366, |
|
"grad_norm": 0.299656480550766, |
|
"learning_rate": 0.00014747532224144874, |
|
"loss": 0.0249, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 7.535211267605634, |
|
"grad_norm": 0.23284365236759186, |
|
"learning_rate": 0.00014644037684407882, |
|
"loss": 0.0243, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 7.605633802816901, |
|
"grad_norm": 0.24462944269180298, |
|
"learning_rate": 0.00014539904997395468, |
|
"loss": 0.0301, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 7.676056338028169, |
|
"grad_norm": 0.3585796654224396, |
|
"learning_rate": 0.00014435148472205663, |
|
"loss": 0.0253, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 7.746478873239437, |
|
"grad_norm": 0.2767147421836853, |
|
"learning_rate": 0.0001432978250365945, |
|
"loss": 0.0274, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 7.816901408450704, |
|
"grad_norm": 0.3437223434448242, |
|
"learning_rate": 0.00014223821570322762, |
|
"loss": 0.0264, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 7.887323943661972, |
|
"grad_norm": 0.18395860493183136, |
|
"learning_rate": 0.00014117280232516907, |
|
"loss": 0.0288, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 7.957746478873239, |
|
"grad_norm": 0.2312847226858139, |
|
"learning_rate": 0.00014010173130317853, |
|
"loss": 0.0277, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 8.028169014084508, |
|
"grad_norm": 0.2692025601863861, |
|
"learning_rate": 0.00013902514981544453, |
|
"loss": 0.0272, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 8.098591549295774, |
|
"grad_norm": 0.18669405579566956, |
|
"learning_rate": 0.00013794320579736083, |
|
"loss": 0.0267, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 8.169014084507042, |
|
"grad_norm": 0.23402738571166992, |
|
"learning_rate": 0.00013685604792119805, |
|
"loss": 0.0274, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 8.23943661971831, |
|
"grad_norm": 0.18943500518798828, |
|
"learning_rate": 0.00013576382557567432, |
|
"loss": 0.0268, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 8.309859154929578, |
|
"grad_norm": 0.2929067015647888, |
|
"learning_rate": 0.00013466668884542742, |
|
"loss": 0.0257, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 8.380281690140846, |
|
"grad_norm": 0.2052321434020996, |
|
"learning_rate": 0.0001335647884903915, |
|
"loss": 0.0243, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 8.450704225352112, |
|
"grad_norm": 0.17795851826667786, |
|
"learning_rate": 0.0001324582759250806, |
|
"loss": 0.0217, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 8.52112676056338, |
|
"grad_norm": 0.2322290539741516, |
|
"learning_rate": 0.00013134730319778258, |
|
"loss": 0.0259, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 8.591549295774648, |
|
"grad_norm": 0.35604098439216614, |
|
"learning_rate": 0.00013023202296966592, |
|
"loss": 0.024, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 8.661971830985916, |
|
"grad_norm": 0.22043980658054352, |
|
"learning_rate": 0.000129112588493802, |
|
"loss": 0.0266, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 8.732394366197184, |
|
"grad_norm": 0.1932317167520523, |
|
"learning_rate": 0.00012798915359410636, |
|
"loss": 0.0255, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 8.80281690140845, |
|
"grad_norm": 0.2350931167602539, |
|
"learning_rate": 0.0001268618726442016, |
|
"loss": 0.0234, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 8.873239436619718, |
|
"grad_norm": 0.2676509916782379, |
|
"learning_rate": 0.00012573090054620418, |
|
"loss": 0.0245, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 8.943661971830986, |
|
"grad_norm": 0.3143276572227478, |
|
"learning_rate": 0.00012459639270943944, |
|
"loss": 0.0234, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 9.014084507042254, |
|
"grad_norm": 0.426460325717926, |
|
"learning_rate": 0.00012345850502908608, |
|
"loss": 0.0237, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 9.084507042253522, |
|
"grad_norm": 0.257888525724411, |
|
"learning_rate": 0.00012231739386475457, |
|
"loss": 0.0303, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 9.154929577464788, |
|
"grad_norm": 0.3120458126068115, |
|
"learning_rate": 0.00012117321601900114, |
|
"loss": 0.0262, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 9.225352112676056, |
|
"grad_norm": 0.20543567836284637, |
|
"learning_rate": 0.00012002612871578143, |
|
"loss": 0.0254, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 9.295774647887324, |
|
"grad_norm": 0.3000582754611969, |
|
"learning_rate": 0.00011887628957884596, |
|
"loss": 0.0215, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 9.366197183098592, |
|
"grad_norm": 0.2910151779651642, |
|
"learning_rate": 0.00011772385661008061, |
|
"loss": 0.0237, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 9.43661971830986, |
|
"grad_norm": 0.1984262466430664, |
|
"learning_rate": 0.00011656898816779538, |
|
"loss": 0.0325, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 9.507042253521126, |
|
"grad_norm": 0.19687724113464355, |
|
"learning_rate": 0.00011541184294496392, |
|
"loss": 0.0239, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 9.577464788732394, |
|
"grad_norm": 0.22585156559944153, |
|
"learning_rate": 0.00011425257994741723, |
|
"loss": 0.026, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 9.647887323943662, |
|
"grad_norm": 0.22957830131053925, |
|
"learning_rate": 0.00011309135847199431, |
|
"loss": 0.0231, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 9.71830985915493, |
|
"grad_norm": 0.31176555156707764, |
|
"learning_rate": 0.00011192833808465289, |
|
"loss": 0.0239, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 9.788732394366198, |
|
"grad_norm": 0.27523675560951233, |
|
"learning_rate": 0.00011076367859854304, |
|
"loss": 0.0208, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 9.859154929577464, |
|
"grad_norm": 0.23288507759571075, |
|
"learning_rate": 0.00010959754005204695, |
|
"loss": 0.0234, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 9.929577464788732, |
|
"grad_norm": 0.24819090962409973, |
|
"learning_rate": 0.00010843008268678766, |
|
"loss": 0.0226, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 1.4893931150436401, |
|
"learning_rate": 0.0001072614669256101, |
|
"loss": 0.0235, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 10.070422535211268, |
|
"grad_norm": 0.184897780418396, |
|
"learning_rate": 0.00010609185335053669, |
|
"loss": 0.0219, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 10.140845070422536, |
|
"grad_norm": 0.1725136637687683, |
|
"learning_rate": 0.00010492140268070177, |
|
"loss": 0.0225, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 10.211267605633802, |
|
"grad_norm": 0.19529558718204498, |
|
"learning_rate": 0.00010375027575026663, |
|
"loss": 0.0189, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 10.28169014084507, |
|
"grad_norm": 0.25408563017845154, |
|
"learning_rate": 0.00010257863348631895, |
|
"loss": 0.0226, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 10.352112676056338, |
|
"grad_norm": 0.1468944549560547, |
|
"learning_rate": 0.0001014066368867596, |
|
"loss": 0.0245, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 10.422535211267606, |
|
"grad_norm": 0.2108079046010971, |
|
"learning_rate": 0.00010023444699817939, |
|
"loss": 0.0208, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 10.492957746478874, |
|
"grad_norm": 0.16347095370292664, |
|
"learning_rate": 9.906222489372946e-05, |
|
"loss": 0.0208, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 10.56338028169014, |
|
"grad_norm": 0.7277833819389343, |
|
"learning_rate": 9.789013165098775e-05, |
|
"loss": 0.021, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 10.633802816901408, |
|
"grad_norm": 0.3126179873943329, |
|
"learning_rate": 9.671832832982502e-05, |
|
"loss": 0.0232, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 10.704225352112676, |
|
"grad_norm": 0.09515654295682907, |
|
"learning_rate": 9.554697595027334e-05, |
|
"loss": 0.0216, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 10.774647887323944, |
|
"grad_norm": 0.20428481698036194, |
|
"learning_rate": 9.437623547039991e-05, |
|
"loss": 0.0205, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 10.845070422535212, |
|
"grad_norm": 0.1914387196302414, |
|
"learning_rate": 9.320626776418938e-05, |
|
"loss": 0.0216, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 10.915492957746478, |
|
"grad_norm": 0.28580597043037415, |
|
"learning_rate": 9.203723359943802e-05, |
|
"loss": 0.0206, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 10.985915492957746, |
|
"grad_norm": 0.1552996188402176, |
|
"learning_rate": 9.086929361566206e-05, |
|
"loss": 0.0234, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 11.056338028169014, |
|
"grad_norm": 0.15982602536678314, |
|
"learning_rate": 8.970260830202399e-05, |
|
"loss": 0.0225, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 11.126760563380282, |
|
"grad_norm": 0.27742645144462585, |
|
"learning_rate": 8.85373379752794e-05, |
|
"loss": 0.023, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 11.19718309859155, |
|
"grad_norm": 0.24796386063098907, |
|
"learning_rate": 8.73736427577473e-05, |
|
"loss": 0.0219, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 11.267605633802816, |
|
"grad_norm": 0.15329182147979736, |
|
"learning_rate": 8.621168255530778e-05, |
|
"loss": 0.0205, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 11.338028169014084, |
|
"grad_norm": 0.21230141818523407, |
|
"learning_rate": 8.505161703542851e-05, |
|
"loss": 0.0212, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 11.408450704225352, |
|
"grad_norm": 0.1345348358154297, |
|
"learning_rate": 8.389360560522495e-05, |
|
"loss": 0.0169, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 11.47887323943662, |
|
"grad_norm": 0.3376161456108093, |
|
"learning_rate": 8.273780738955544e-05, |
|
"loss": 0.0178, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 11.549295774647888, |
|
"grad_norm": 0.17062005400657654, |
|
"learning_rate": 8.158438120915582e-05, |
|
"loss": 0.0216, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 11.619718309859154, |
|
"grad_norm": 0.29981690645217896, |
|
"learning_rate": 8.043348555881548e-05, |
|
"loss": 0.0184, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 11.690140845070422, |
|
"grad_norm": 0.2005750983953476, |
|
"learning_rate": 7.928527858559807e-05, |
|
"loss": 0.0176, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 11.76056338028169, |
|
"grad_norm": 0.36460545659065247, |
|
"learning_rate": 7.813991806711039e-05, |
|
"loss": 0.0181, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 11.830985915492958, |
|
"grad_norm": 0.19622518122196198, |
|
"learning_rate": 7.699756138982171e-05, |
|
"loss": 0.0191, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 11.901408450704226, |
|
"grad_norm": 0.20806631445884705, |
|
"learning_rate": 7.585836552743691e-05, |
|
"loss": 0.0204, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 11.971830985915492, |
|
"grad_norm": 0.1987942010164261, |
|
"learning_rate": 7.472248701932658e-05, |
|
"loss": 0.0229, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 12.04225352112676, |
|
"grad_norm": 0.3817981481552124, |
|
"learning_rate": 7.359008194901632e-05, |
|
"loss": 0.0219, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 12.112676056338028, |
|
"grad_norm": 0.5507696270942688, |
|
"learning_rate": 7.246130592273926e-05, |
|
"loss": 0.0187, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 12.183098591549296, |
|
"grad_norm": 0.14364100992679596, |
|
"learning_rate": 7.133631404805366e-05, |
|
"loss": 0.0165, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 12.253521126760564, |
|
"grad_norm": 0.21604971587657928, |
|
"learning_rate": 7.021526091252926e-05, |
|
"loss": 0.0211, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 12.323943661971832, |
|
"grad_norm": 0.1259509027004242, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 0.0173, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 12.394366197183098, |
|
"grad_norm": 0.27340298891067505, |
|
"learning_rate": 6.798558648192224e-05, |
|
"loss": 0.0225, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 12.464788732394366, |
|
"grad_norm": 0.15757574141025543, |
|
"learning_rate": 6.687727157123172e-05, |
|
"loss": 0.0187, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 12.535211267605634, |
|
"grad_norm": 0.18283964693546295, |
|
"learning_rate": 6.57735081263858e-05, |
|
"loss": 0.0175, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 12.605633802816902, |
|
"grad_norm": 0.12672746181488037, |
|
"learning_rate": 6.467444781790966e-05, |
|
"loss": 0.0196, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 12.676056338028168, |
|
"grad_norm": 0.16949881613254547, |
|
"learning_rate": 6.358024167006058e-05, |
|
"loss": 0.0206, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 12.746478873239436, |
|
"grad_norm": 0.19527575373649597, |
|
"learning_rate": 6.249104004007485e-05, |
|
"loss": 0.0177, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 12.816901408450704, |
|
"grad_norm": 0.1705390363931656, |
|
"learning_rate": 6.140699259750734e-05, |
|
"loss": 0.0158, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 12.887323943661972, |
|
"grad_norm": 0.22441525757312775, |
|
"learning_rate": 6.032824830366466e-05, |
|
"loss": 0.0195, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 12.95774647887324, |
|
"grad_norm": 0.13072457909584045, |
|
"learning_rate": 5.9254955391136435e-05, |
|
"loss": 0.0167, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 13.028169014084508, |
|
"grad_norm": 0.44169002771377563, |
|
"learning_rate": 5.818726134342611e-05, |
|
"loss": 0.018, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 13.098591549295774, |
|
"grad_norm": 0.19800615310668945, |
|
"learning_rate": 5.712531287468501e-05, |
|
"loss": 0.0195, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 13.169014084507042, |
|
"grad_norm": 0.24248644709587097, |
|
"learning_rate": 5.606925590955199e-05, |
|
"loss": 0.0234, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 13.23943661971831, |
|
"grad_norm": 0.1700563281774521, |
|
"learning_rate": 5.501923556310165e-05, |
|
"loss": 0.017, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 13.309859154929578, |
|
"grad_norm": 0.22675108909606934, |
|
"learning_rate": 5.3975396120903674e-05, |
|
"loss": 0.02, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 13.380281690140846, |
|
"grad_norm": 0.2548399567604065, |
|
"learning_rate": 5.293788101919652e-05, |
|
"loss": 0.0209, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 13.450704225352112, |
|
"grad_norm": 0.139588862657547, |
|
"learning_rate": 5.190683282517701e-05, |
|
"loss": 0.0156, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 13.52112676056338, |
|
"grad_norm": 0.17267008125782013, |
|
"learning_rate": 5.088239321741044e-05, |
|
"loss": 0.0172, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 13.591549295774648, |
|
"grad_norm": 0.17120109498500824, |
|
"learning_rate": 4.986470296636184e-05, |
|
"loss": 0.0169, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 13.661971830985916, |
|
"grad_norm": 0.1749114841222763, |
|
"learning_rate": 4.885390191505249e-05, |
|
"loss": 0.0176, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 13.732394366197184, |
|
"grad_norm": 0.19507087767124176, |
|
"learning_rate": 4.785012895984397e-05, |
|
"loss": 0.0199, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 13.80281690140845, |
|
"grad_norm": 0.13473351299762726, |
|
"learning_rate": 4.685352203135168e-05, |
|
"loss": 0.0145, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 13.873239436619718, |
|
"grad_norm": 0.25520896911621094, |
|
"learning_rate": 4.5864218075492086e-05, |
|
"loss": 0.0154, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 13.943661971830986, |
|
"grad_norm": 0.1440926343202591, |
|
"learning_rate": 4.488235303466414e-05, |
|
"loss": 0.0165, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 14.014084507042254, |
|
"grad_norm": 0.19918571412563324, |
|
"learning_rate": 4.390806182906946e-05, |
|
"loss": 0.0186, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 14.084507042253522, |
|
"grad_norm": 0.18236444890499115, |
|
"learning_rate": 4.2941478338172546e-05, |
|
"loss": 0.015, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 14.154929577464788, |
|
"grad_norm": 0.3441983163356781, |
|
"learning_rate": 4.198273538230411e-05, |
|
"loss": 0.0155, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 14.225352112676056, |
|
"grad_norm": 0.16762350499629974, |
|
"learning_rate": 4.103196470441004e-05, |
|
"loss": 0.0161, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 14.295774647887324, |
|
"grad_norm": 0.11081443727016449, |
|
"learning_rate": 4.008929695194819e-05, |
|
"loss": 0.0146, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 14.366197183098592, |
|
"grad_norm": 0.14331316947937012, |
|
"learning_rate": 3.915486165893584e-05, |
|
"loss": 0.0152, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 14.43661971830986, |
|
"grad_norm": 0.2052135467529297, |
|
"learning_rate": 3.8228787228150256e-05, |
|
"loss": 0.0156, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 14.507042253521126, |
|
"grad_norm": 0.2391861230134964, |
|
"learning_rate": 3.731120091348447e-05, |
|
"loss": 0.0168, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 14.577464788732394, |
|
"grad_norm": 0.15410864353179932, |
|
"learning_rate": 3.640222880246117e-05, |
|
"loss": 0.0148, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 14.647887323943662, |
|
"grad_norm": 0.39798498153686523, |
|
"learning_rate": 3.550199579890658e-05, |
|
"loss": 0.016, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 14.71830985915493, |
|
"grad_norm": 0.21030154824256897, |
|
"learning_rate": 3.461062560578732e-05, |
|
"loss": 0.0152, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 14.788732394366198, |
|
"grad_norm": 0.23185010254383087, |
|
"learning_rate": 3.372824070821194e-05, |
|
"loss": 0.0167, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 14.859154929577464, |
|
"grad_norm": 0.16405650973320007, |
|
"learning_rate": 3.2854962356600124e-05, |
|
"loss": 0.0175, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 14.929577464788732, |
|
"grad_norm": 0.28342190384864807, |
|
"learning_rate": 3.199091055002143e-05, |
|
"loss": 0.0159, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 0.6678891181945801, |
|
"learning_rate": 3.1136204019705726e-05, |
|
"loss": 0.0122, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 15.070422535211268, |
|
"grad_norm": 0.1747487485408783, |
|
"learning_rate": 3.029096021272836e-05, |
|
"loss": 0.0163, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 15.140845070422536, |
|
"grad_norm": 0.11703847348690033, |
|
"learning_rate": 2.9455295275871298e-05, |
|
"loss": 0.014, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 15.211267605633802, |
|
"grad_norm": 0.19800235331058502, |
|
"learning_rate": 2.862932403966322e-05, |
|
"loss": 0.0128, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 15.28169014084507, |
|
"grad_norm": 0.390919953584671, |
|
"learning_rate": 2.7813160002600524e-05, |
|
"loss": 0.0146, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 15.352112676056338, |
|
"grad_norm": 0.2693049907684326, |
|
"learning_rate": 2.7006915315550918e-05, |
|
"loss": 0.0134, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 15.422535211267606, |
|
"grad_norm": 0.11595863848924637, |
|
"learning_rate": 2.6210700766342965e-05, |
|
"loss": 0.0129, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 15.492957746478874, |
|
"grad_norm": 0.39005428552627563, |
|
"learning_rate": 2.5424625764542132e-05, |
|
"loss": 0.0134, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 15.56338028169014, |
|
"grad_norm": 0.24683450162410736, |
|
"learning_rate": 2.4648798326416745e-05, |
|
"loss": 0.0162, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 15.633802816901408, |
|
"grad_norm": 0.15063706040382385, |
|
"learning_rate": 2.388332506009524e-05, |
|
"loss": 0.0145, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 15.704225352112676, |
|
"grad_norm": 0.16000519692897797, |
|
"learning_rate": 2.3128311150916826e-05, |
|
"loss": 0.0179, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 15.774647887323944, |
|
"grad_norm": 0.3279981017112732, |
|
"learning_rate": 2.2383860346977904e-05, |
|
"loss": 0.0169, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 15.845070422535212, |
|
"grad_norm": 0.1328040212392807, |
|
"learning_rate": 2.1650074944875587e-05, |
|
"loss": 0.0137, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 15.915492957746478, |
|
"grad_norm": 0.15805411338806152, |
|
"learning_rate": 2.0927055775651104e-05, |
|
"loss": 0.0145, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 15.985915492957746, |
|
"grad_norm": 0.13186223804950714, |
|
"learning_rate": 2.021490219093426e-05, |
|
"loss": 0.0186, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 16.056338028169016, |
|
"grad_norm": 0.5759530067443848, |
|
"learning_rate": 1.9513712049291367e-05, |
|
"loss": 0.0137, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 16.12676056338028, |
|
"grad_norm": 0.15024252235889435, |
|
"learning_rate": 1.882358170277835e-05, |
|
"loss": 0.0124, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 16.197183098591548, |
|
"grad_norm": 0.3911398649215698, |
|
"learning_rate": 1.814460598370066e-05, |
|
"loss": 0.014, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 16.267605633802816, |
|
"grad_norm": 0.4579143226146698, |
|
"learning_rate": 1.7476878191582246e-05, |
|
"loss": 0.0128, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 16.338028169014084, |
|
"grad_norm": 0.24127140641212463, |
|
"learning_rate": 1.6820490080345075e-05, |
|
"loss": 0.0142, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 16.408450704225352, |
|
"grad_norm": 0.14859215915203094, |
|
"learning_rate": 1.6175531845700964e-05, |
|
"loss": 0.0151, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 16.47887323943662, |
|
"grad_norm": 0.167475625872612, |
|
"learning_rate": 1.5542092112757734e-05, |
|
"loss": 0.0134, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 16.549295774647888, |
|
"grad_norm": 0.16596587002277374, |
|
"learning_rate": 1.4920257923840864e-05, |
|
"loss": 0.0171, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 16.619718309859156, |
|
"grad_norm": 0.14397069811820984, |
|
"learning_rate": 1.4310114726532831e-05, |
|
"loss": 0.0138, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 16.690140845070424, |
|
"grad_norm": 0.1877891719341278, |
|
"learning_rate": 1.371174636193181e-05, |
|
"loss": 0.0155, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 16.760563380281692, |
|
"grad_norm": 0.13636761903762817, |
|
"learning_rate": 1.3125235053130547e-05, |
|
"loss": 0.0127, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 16.830985915492956, |
|
"grad_norm": 0.297732412815094, |
|
"learning_rate": 1.2550661393918217e-05, |
|
"loss": 0.013, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 16.901408450704224, |
|
"grad_norm": 0.1112903356552124, |
|
"learning_rate": 1.1988104337705574e-05, |
|
"loss": 0.0117, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 16.971830985915492, |
|
"grad_norm": 0.13235154747962952, |
|
"learning_rate": 1.1437641186676006e-05, |
|
"loss": 0.0129, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 17.04225352112676, |
|
"grad_norm": 0.19815203547477722, |
|
"learning_rate": 1.0899347581163221e-05, |
|
"loss": 0.0148, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 17.112676056338028, |
|
"grad_norm": 0.4887934625148773, |
|
"learning_rate": 1.0373297489257272e-05, |
|
"loss": 0.0125, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 17.183098591549296, |
|
"grad_norm": 0.12783914804458618, |
|
"learning_rate": 9.85956319664051e-06, |
|
"loss": 0.018, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 17.253521126760564, |
|
"grad_norm": 0.2081880420446396, |
|
"learning_rate": 9.358215296654637e-06, |
|
"loss": 0.0126, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 17.323943661971832, |
|
"grad_norm": 0.22537609934806824, |
|
"learning_rate": 8.869322680600289e-06, |
|
"loss": 0.0129, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 17.3943661971831, |
|
"grad_norm": 0.5902778506278992, |
|
"learning_rate": 8.392952528270659e-06, |
|
"loss": 0.0145, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 17.464788732394368, |
|
"grad_norm": 0.12127131968736649, |
|
"learning_rate": 7.929170298719969e-06, |
|
"loss": 0.0115, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 17.535211267605632, |
|
"grad_norm": 0.16380423307418823, |
|
"learning_rate": 7.478039721268759e-06, |
|
"loss": 0.0131, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 17.6056338028169, |
|
"grad_norm": 0.17874068021774292, |
|
"learning_rate": 7.039622786746658e-06, |
|
"loss": 0.0142, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 17.676056338028168, |
|
"grad_norm": 0.18840940296649933, |
|
"learning_rate": 6.613979738974074e-06, |
|
"loss": 0.0131, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 17.746478873239436, |
|
"grad_norm": 0.25764134526252747, |
|
"learning_rate": 6.2011690664840025e-06, |
|
"loss": 0.014, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 17.816901408450704, |
|
"grad_norm": 0.15851202607154846, |
|
"learning_rate": 5.801247494484929e-06, |
|
"loss": 0.0144, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 17.887323943661972, |
|
"grad_norm": 0.12299610674381256, |
|
"learning_rate": 5.414269977066133e-06, |
|
"loss": 0.0126, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 17.95774647887324, |
|
"grad_norm": 0.09734787046909332, |
|
"learning_rate": 5.040289689646338e-06, |
|
"loss": 0.0116, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 18.028169014084508, |
|
"grad_norm": 0.11694216728210449, |
|
"learning_rate": 4.679358021666725e-06, |
|
"loss": 0.0119, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 18.098591549295776, |
|
"grad_norm": 0.2159486711025238, |
|
"learning_rate": 4.3315245695294545e-06, |
|
"loss": 0.0125, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 18.169014084507044, |
|
"grad_norm": 0.13054227828979492, |
|
"learning_rate": 3.996837129782416e-06, |
|
"loss": 0.0131, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 18.239436619718308, |
|
"grad_norm": 0.4224683940410614, |
|
"learning_rate": 3.6753416925515593e-06, |
|
"loss": 0.0091, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 18.309859154929576, |
|
"grad_norm": 0.7623445987701416, |
|
"learning_rate": 3.367082435221125e-06, |
|
"loss": 0.0113, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 18.380281690140844, |
|
"grad_norm": 0.10136809200048447, |
|
"learning_rate": 3.0721017163632294e-06, |
|
"loss": 0.0136, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 18.450704225352112, |
|
"grad_norm": 0.11303048580884933, |
|
"learning_rate": 2.7904400699172994e-06, |
|
"loss": 0.0137, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 18.52112676056338, |
|
"grad_norm": 0.181919127702713, |
|
"learning_rate": 2.5221361996200955e-06, |
|
"loss": 0.0128, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 18.591549295774648, |
|
"grad_norm": 0.4242732524871826, |
|
"learning_rate": 2.2672269736875087e-06, |
|
"loss": 0.0113, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 18.661971830985916, |
|
"grad_norm": 0.07075098901987076, |
|
"learning_rate": 2.025747419748314e-06, |
|
"loss": 0.0136, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 18.732394366197184, |
|
"grad_norm": 0.1898496001958847, |
|
"learning_rate": 1.7977307200309546e-06, |
|
"loss": 0.0128, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 18.802816901408452, |
|
"grad_norm": 0.1816607266664505, |
|
"learning_rate": 1.5832082068039544e-06, |
|
"loss": 0.0122, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 18.87323943661972, |
|
"grad_norm": 0.07458122074604034, |
|
"learning_rate": 1.3822093580703966e-06, |
|
"loss": 0.0116, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 18.943661971830984, |
|
"grad_norm": 0.11320627480745316, |
|
"learning_rate": 1.1947617935174183e-06, |
|
"loss": 0.0124, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 19.014084507042252, |
|
"grad_norm": 0.13373975455760956, |
|
"learning_rate": 1.0208912707207697e-06, |
|
"loss": 0.0123, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 19.08450704225352, |
|
"grad_norm": 0.11728695034980774, |
|
"learning_rate": 8.606216816055334e-07, |
|
"loss": 0.0126, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 19.154929577464788, |
|
"grad_norm": 0.16200479865074158, |
|
"learning_rate": 7.139750491630404e-07, |
|
"loss": 0.0127, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 19.225352112676056, |
|
"grad_norm": 0.15322642028331757, |
|
"learning_rate": 5.809715244246228e-07, |
|
"loss": 0.0108, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 19.295774647887324, |
|
"grad_norm": 0.1876576840877533, |
|
"learning_rate": 4.616293836926633e-07, |
|
"loss": 0.0145, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 19.366197183098592, |
|
"grad_norm": 0.2538888156414032, |
|
"learning_rate": 3.5596502602917027e-07, |
|
"loss": 0.0122, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 19.43661971830986, |
|
"grad_norm": 0.09630775451660156, |
|
"learning_rate": 2.639929710023359e-07, |
|
"loss": 0.0114, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 19.507042253521128, |
|
"grad_norm": 0.15465404093265533, |
|
"learning_rate": 1.8572585669144328e-07, |
|
"loss": 0.0113, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 19.577464788732396, |
|
"grad_norm": 0.10793367773294449, |
|
"learning_rate": 1.211744379501556e-07, |
|
"loss": 0.0108, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 19.647887323943664, |
|
"grad_norm": 0.15497437119483948, |
|
"learning_rate": 7.034758492872052e-08, |
|
"loss": 0.0129, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 19.718309859154928, |
|
"grad_norm": 0.16003310680389404, |
|
"learning_rate": 3.3252281855067345e-08, |
|
"loss": 0.0112, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 19.788732394366196, |
|
"grad_norm": 0.2295142561197281, |
|
"learning_rate": 9.89362607515254e-09, |
|
"loss": 0.0122, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 19.859154929577464, |
|
"grad_norm": 0.4385908246040344, |
|
"learning_rate": 2.748273524311173e-10, |
|
"loss": 0.0137, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 19.87323943661972, |
|
"step": 2822, |
|
"total_flos": 3.9569565333896294e+17, |
|
"train_loss": 0.03605978149646123, |
|
"train_runtime": 3188.1566, |
|
"train_samples_per_second": 56.65, |
|
"train_steps_per_second": 0.885 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2822, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.9569565333896294e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|