|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.996815286624204, |
|
"eval_steps": 500, |
|
"global_step": 3138, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01592356687898089, |
|
"grad_norm": 5.4758992195129395, |
|
"learning_rate": 1.2738853503184714e-05, |
|
"loss": 0.9116, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03184713375796178, |
|
"grad_norm": 4.248476028442383, |
|
"learning_rate": 2.5477707006369428e-05, |
|
"loss": 0.5347, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04777070063694268, |
|
"grad_norm": 2.722592830657959, |
|
"learning_rate": 3.821656050955414e-05, |
|
"loss": 0.3604, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06369426751592357, |
|
"grad_norm": 1.718458890914917, |
|
"learning_rate": 5.0955414012738855e-05, |
|
"loss": 0.2924, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07961783439490445, |
|
"grad_norm": 2.0852134227752686, |
|
"learning_rate": 6.369426751592356e-05, |
|
"loss": 0.2293, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09554140127388536, |
|
"grad_norm": 2.2306277751922607, |
|
"learning_rate": 7.643312101910829e-05, |
|
"loss": 0.2296, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11146496815286625, |
|
"grad_norm": 1.173863172531128, |
|
"learning_rate": 8.9171974522293e-05, |
|
"loss": 0.1754, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12738853503184713, |
|
"grad_norm": 1.4193692207336426, |
|
"learning_rate": 0.00010191082802547771, |
|
"loss": 0.1821, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14331210191082802, |
|
"grad_norm": 1.5220022201538086, |
|
"learning_rate": 0.00011464968152866242, |
|
"loss": 0.1634, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1592356687898089, |
|
"grad_norm": 1.3423898220062256, |
|
"learning_rate": 0.00012738853503184712, |
|
"loss": 0.1462, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1751592356687898, |
|
"grad_norm": 1.2148971557617188, |
|
"learning_rate": 0.00014012738853503185, |
|
"loss": 0.1386, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1910828025477707, |
|
"grad_norm": 0.9744131565093994, |
|
"learning_rate": 0.00015286624203821657, |
|
"loss": 0.1376, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2070063694267516, |
|
"grad_norm": 1.014123558998108, |
|
"learning_rate": 0.0001656050955414013, |
|
"loss": 0.1082, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2229299363057325, |
|
"grad_norm": 1.3054927587509155, |
|
"learning_rate": 0.000178343949044586, |
|
"loss": 0.1054, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23885350318471338, |
|
"grad_norm": 1.789557695388794, |
|
"learning_rate": 0.00019108280254777072, |
|
"loss": 0.1308, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25477707006369427, |
|
"grad_norm": 0.6913818717002869, |
|
"learning_rate": 0.00019999950020955923, |
|
"loss": 0.1197, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27070063694267515, |
|
"grad_norm": 0.9011440277099609, |
|
"learning_rate": 0.00019999061518514538, |
|
"loss": 0.0938, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.28662420382165604, |
|
"grad_norm": 1.6200815439224243, |
|
"learning_rate": 0.00019997062484234648, |
|
"loss": 0.108, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.30254777070063693, |
|
"grad_norm": 0.8650676608085632, |
|
"learning_rate": 0.0001999395314013622, |
|
"loss": 0.105, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3184713375796178, |
|
"grad_norm": 1.1472347974777222, |
|
"learning_rate": 0.00019989733831554218, |
|
"loss": 0.1219, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3343949044585987, |
|
"grad_norm": 1.0066096782684326, |
|
"learning_rate": 0.00019984405027100277, |
|
"loss": 0.0927, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3503184713375796, |
|
"grad_norm": 1.0204339027404785, |
|
"learning_rate": 0.00019977967318610639, |
|
"loss": 0.1119, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3662420382165605, |
|
"grad_norm": 1.074305534362793, |
|
"learning_rate": 0.00019970421421080427, |
|
"loss": 0.0903, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.3821656050955414, |
|
"grad_norm": 0.9550490379333496, |
|
"learning_rate": 0.0001996176817258424, |
|
"loss": 0.091, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3980891719745223, |
|
"grad_norm": 1.0449860095977783, |
|
"learning_rate": 0.0001995200853418307, |
|
"loss": 0.0861, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4140127388535032, |
|
"grad_norm": 0.9337477684020996, |
|
"learning_rate": 0.00019941143589817558, |
|
"loss": 0.0907, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4299363057324841, |
|
"grad_norm": 0.7787328958511353, |
|
"learning_rate": 0.00019929174546187616, |
|
"loss": 0.0816, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.445859872611465, |
|
"grad_norm": 0.5837294459342957, |
|
"learning_rate": 0.000199161027326184, |
|
"loss": 0.085, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.46178343949044587, |
|
"grad_norm": 0.6995041370391846, |
|
"learning_rate": 0.00019901929600912678, |
|
"loss": 0.0767, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.47770700636942676, |
|
"grad_norm": 0.9348034262657166, |
|
"learning_rate": 0.00019886656725189575, |
|
"loss": 0.08, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.49363057324840764, |
|
"grad_norm": 0.458731085062027, |
|
"learning_rate": 0.0001987028580170976, |
|
"loss": 0.0819, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5095541401273885, |
|
"grad_norm": 0.5243947505950928, |
|
"learning_rate": 0.00019852818648687044, |
|
"loss": 0.078, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5254777070063694, |
|
"grad_norm": 0.6848501563072205, |
|
"learning_rate": 0.00019834257206086442, |
|
"loss": 0.0891, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5414012738853503, |
|
"grad_norm": 0.9554067254066467, |
|
"learning_rate": 0.00019814603535408724, |
|
"loss": 0.0759, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5573248407643312, |
|
"grad_norm": 0.49273306131362915, |
|
"learning_rate": 0.0001979385981946144, |
|
"loss": 0.0744, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5732484076433121, |
|
"grad_norm": 0.7968289852142334, |
|
"learning_rate": 0.00019772028362116513, |
|
"loss": 0.0674, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.589171974522293, |
|
"grad_norm": 0.5523058176040649, |
|
"learning_rate": 0.00019749111588054333, |
|
"loss": 0.078, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6050955414012739, |
|
"grad_norm": 0.7879081964492798, |
|
"learning_rate": 0.00019725112042494493, |
|
"loss": 0.0727, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6210191082802548, |
|
"grad_norm": 0.897438645362854, |
|
"learning_rate": 0.00019700032390913087, |
|
"loss": 0.0882, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6369426751592356, |
|
"grad_norm": 0.40805375576019287, |
|
"learning_rate": 0.00019673875418746678, |
|
"loss": 0.0687, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6528662420382165, |
|
"grad_norm": 0.8794530034065247, |
|
"learning_rate": 0.00019646644031082948, |
|
"loss": 0.0745, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6687898089171974, |
|
"grad_norm": 0.671391487121582, |
|
"learning_rate": 0.00019618341252338026, |
|
"loss": 0.0773, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.6847133757961783, |
|
"grad_norm": 0.7118439078330994, |
|
"learning_rate": 0.00019588970225920612, |
|
"loss": 0.0646, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7006369426751592, |
|
"grad_norm": 0.8737181425094604, |
|
"learning_rate": 0.00019558534213882838, |
|
"loss": 0.0667, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7165605095541401, |
|
"grad_norm": 0.9923394322395325, |
|
"learning_rate": 0.00019527036596557983, |
|
"loss": 0.0699, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.732484076433121, |
|
"grad_norm": 0.8922154307365417, |
|
"learning_rate": 0.0001949448087218504, |
|
"loss": 0.062, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7484076433121019, |
|
"grad_norm": 0.6825265884399414, |
|
"learning_rate": 0.0001946087065652018, |
|
"loss": 0.0672, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7643312101910829, |
|
"grad_norm": 0.6685843467712402, |
|
"learning_rate": 0.0001942620968243519, |
|
"loss": 0.057, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7802547770700637, |
|
"grad_norm": 0.5671578049659729, |
|
"learning_rate": 0.00019390501799502872, |
|
"loss": 0.0543, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7961783439490446, |
|
"grad_norm": 0.5407360196113586, |
|
"learning_rate": 0.00019353750973569498, |
|
"loss": 0.0526, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8121019108280255, |
|
"grad_norm": 0.45758092403411865, |
|
"learning_rate": 0.0001931596128631436, |
|
"loss": 0.0523, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8280254777070064, |
|
"grad_norm": 0.5699948668479919, |
|
"learning_rate": 0.0001927713693479643, |
|
"loss": 0.0508, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8439490445859873, |
|
"grad_norm": 0.6552209258079529, |
|
"learning_rate": 0.00019237282230988222, |
|
"loss": 0.0736, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.8598726114649682, |
|
"grad_norm": 0.6645591855049133, |
|
"learning_rate": 0.00019196401601296898, |
|
"loss": 0.067, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8757961783439491, |
|
"grad_norm": 0.5558132529258728, |
|
"learning_rate": 0.00019154499586072646, |
|
"loss": 0.0562, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.89171974522293, |
|
"grad_norm": 0.42415371537208557, |
|
"learning_rate": 0.00019111580839104418, |
|
"loss": 0.064, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9076433121019108, |
|
"grad_norm": 0.6379917860031128, |
|
"learning_rate": 0.00019067650127103055, |
|
"loss": 0.0628, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9235668789808917, |
|
"grad_norm": 0.603799045085907, |
|
"learning_rate": 0.0001902271232917189, |
|
"loss": 0.0668, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.9394904458598726, |
|
"grad_norm": 0.569402813911438, |
|
"learning_rate": 0.0001897677243626485, |
|
"loss": 0.0613, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.9554140127388535, |
|
"grad_norm": 0.7091385722160339, |
|
"learning_rate": 0.00018929835550632145, |
|
"loss": 0.0665, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9713375796178344, |
|
"grad_norm": 0.5904159545898438, |
|
"learning_rate": 0.00018881906885253593, |
|
"loss": 0.0766, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9872611464968153, |
|
"grad_norm": 0.6689817905426025, |
|
"learning_rate": 0.00018832991763259647, |
|
"loss": 0.065, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0031847133757963, |
|
"grad_norm": 0.7255253791809082, |
|
"learning_rate": 0.00018783095617340193, |
|
"loss": 0.053, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.019108280254777, |
|
"grad_norm": 0.41102227568626404, |
|
"learning_rate": 0.0001873222398914117, |
|
"loss": 0.0584, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.035031847133758, |
|
"grad_norm": 0.5301253199577332, |
|
"learning_rate": 0.00018680382528649093, |
|
"loss": 0.0619, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.0509554140127388, |
|
"grad_norm": 0.5525597929954529, |
|
"learning_rate": 0.00018627576993563563, |
|
"loss": 0.0556, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.0668789808917198, |
|
"grad_norm": 0.5564507246017456, |
|
"learning_rate": 0.0001857381324865777, |
|
"loss": 0.0428, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.0828025477707006, |
|
"grad_norm": 0.5977344512939453, |
|
"learning_rate": 0.0001851909726512714, |
|
"loss": 0.0576, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.0987261146496816, |
|
"grad_norm": 0.49446621537208557, |
|
"learning_rate": 0.00018463435119926176, |
|
"loss": 0.0527, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.1146496815286624, |
|
"grad_norm": 0.4952321946620941, |
|
"learning_rate": 0.00018406832995093486, |
|
"loss": 0.0472, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.1305732484076434, |
|
"grad_norm": 0.40190061926841736, |
|
"learning_rate": 0.0001834929717706522, |
|
"loss": 0.0557, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.1464968152866242, |
|
"grad_norm": 0.44065698981285095, |
|
"learning_rate": 0.00018290834055976855, |
|
"loss": 0.0473, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.1624203821656052, |
|
"grad_norm": 0.5594244003295898, |
|
"learning_rate": 0.00018231450124953495, |
|
"loss": 0.0588, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.178343949044586, |
|
"grad_norm": 0.49133527278900146, |
|
"learning_rate": 0.00018171151979388714, |
|
"loss": 0.0497, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.194267515923567, |
|
"grad_norm": 0.5907294750213623, |
|
"learning_rate": 0.00018109946316212052, |
|
"loss": 0.0525, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.2101910828025477, |
|
"grad_norm": 0.5373413562774658, |
|
"learning_rate": 0.00018047839933145232, |
|
"loss": 0.0553, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.2261146496815287, |
|
"grad_norm": 0.5720402598381042, |
|
"learning_rate": 0.00017984839727947164, |
|
"loss": 0.0505, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.2420382165605095, |
|
"grad_norm": 0.3708595037460327, |
|
"learning_rate": 0.00017920952697647886, |
|
"loss": 0.0544, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.2579617834394905, |
|
"grad_norm": 0.48515430092811584, |
|
"learning_rate": 0.00017856185937771416, |
|
"loss": 0.041, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.2738853503184713, |
|
"grad_norm": 0.6001298427581787, |
|
"learning_rate": 0.00017790546641547722, |
|
"loss": 0.0478, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.2898089171974523, |
|
"grad_norm": 0.4815957844257355, |
|
"learning_rate": 0.00017724042099113792, |
|
"loss": 0.0439, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.305732484076433, |
|
"grad_norm": 0.6788195371627808, |
|
"learning_rate": 0.00017656679696703996, |
|
"loss": 0.0573, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.321656050955414, |
|
"grad_norm": 0.3975256383419037, |
|
"learning_rate": 0.0001758846691582972, |
|
"loss": 0.0472, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.3375796178343948, |
|
"grad_norm": 0.4403384327888489, |
|
"learning_rate": 0.00017519411332448444, |
|
"loss": 0.0445, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.3535031847133758, |
|
"grad_norm": 0.39884060621261597, |
|
"learning_rate": 0.00017449520616122344, |
|
"loss": 0.0473, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.3694267515923566, |
|
"grad_norm": 0.41912099719047546, |
|
"learning_rate": 0.00017378802529166462, |
|
"loss": 0.0529, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.3853503184713376, |
|
"grad_norm": 0.43497881293296814, |
|
"learning_rate": 0.00017307264925786622, |
|
"loss": 0.0462, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.4012738853503186, |
|
"grad_norm": 0.6406415700912476, |
|
"learning_rate": 0.0001723491575120708, |
|
"loss": 0.0515, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.4171974522292994, |
|
"grad_norm": 0.5098315477371216, |
|
"learning_rate": 0.0001716176304078813, |
|
"loss": 0.0537, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.4331210191082802, |
|
"grad_norm": 0.39417779445648193, |
|
"learning_rate": 0.0001708781491913365, |
|
"loss": 0.0462, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.4490445859872612, |
|
"grad_norm": 0.31453198194503784, |
|
"learning_rate": 0.00017013079599188758, |
|
"loss": 0.0496, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.4649681528662422, |
|
"grad_norm": 0.467495322227478, |
|
"learning_rate": 0.00016937565381327661, |
|
"loss": 0.0518, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.480891719745223, |
|
"grad_norm": 0.5229964256286621, |
|
"learning_rate": 0.00016861280652431758, |
|
"loss": 0.0588, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.4968152866242037, |
|
"grad_norm": 0.5483565926551819, |
|
"learning_rate": 0.00016784233884958212, |
|
"loss": 0.0454, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.5127388535031847, |
|
"grad_norm": 0.6096242070198059, |
|
"learning_rate": 0.00016706433635998912, |
|
"loss": 0.0458, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.5286624203821657, |
|
"grad_norm": 0.34970104694366455, |
|
"learning_rate": 0.00016627888546330138, |
|
"loss": 0.0498, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.5445859872611465, |
|
"grad_norm": 0.691208004951477, |
|
"learning_rate": 0.00016548607339452853, |
|
"loss": 0.0446, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.5605095541401273, |
|
"grad_norm": 0.32399702072143555, |
|
"learning_rate": 0.00016468598820623855, |
|
"loss": 0.0452, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.5764331210191083, |
|
"grad_norm": 0.36962831020355225, |
|
"learning_rate": 0.0001638787187587784, |
|
"loss": 0.0415, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.5923566878980893, |
|
"grad_norm": 0.45716020464897156, |
|
"learning_rate": 0.0001630643547104047, |
|
"loss": 0.0439, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.60828025477707, |
|
"grad_norm": 0.3454309403896332, |
|
"learning_rate": 0.00016224298650732597, |
|
"loss": 0.0532, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.6242038216560508, |
|
"grad_norm": 0.6210212707519531, |
|
"learning_rate": 0.00016141470537365754, |
|
"loss": 0.0414, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.6401273885350318, |
|
"grad_norm": 0.3841806650161743, |
|
"learning_rate": 0.00016057960330128973, |
|
"loss": 0.0484, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.6560509554140128, |
|
"grad_norm": 0.3426171839237213, |
|
"learning_rate": 0.0001597377730396708, |
|
"loss": 0.0423, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.6719745222929936, |
|
"grad_norm": 0.33869680762290955, |
|
"learning_rate": 0.0001588893080855061, |
|
"loss": 0.0436, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.6878980891719744, |
|
"grad_norm": 0.40435710549354553, |
|
"learning_rate": 0.00015803430267237383, |
|
"loss": 0.0517, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.7038216560509554, |
|
"grad_norm": 0.25334039330482483, |
|
"learning_rate": 0.00015717285176025913, |
|
"loss": 0.0339, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.7197452229299364, |
|
"grad_norm": 0.40064361691474915, |
|
"learning_rate": 0.00015630505102500754, |
|
"loss": 0.0472, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.7356687898089171, |
|
"grad_norm": 0.5399286150932312, |
|
"learning_rate": 0.0001554309968476988, |
|
"loss": 0.0448, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.7515923566878981, |
|
"grad_norm": 0.36550888419151306, |
|
"learning_rate": 0.0001545507863039426, |
|
"loss": 0.0348, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.767515923566879, |
|
"grad_norm": 0.5114671587944031, |
|
"learning_rate": 0.00015366451715309674, |
|
"loss": 0.0409, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.78343949044586, |
|
"grad_norm": 0.4588598608970642, |
|
"learning_rate": 0.00015277228782740988, |
|
"loss": 0.0394, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.799363057324841, |
|
"grad_norm": 0.35981065034866333, |
|
"learning_rate": 0.0001518741974210892, |
|
"loss": 0.0412, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.8152866242038217, |
|
"grad_norm": 0.39596328139305115, |
|
"learning_rate": 0.00015097034567929457, |
|
"loss": 0.0369, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.8312101910828025, |
|
"grad_norm": 0.45480477809906006, |
|
"learning_rate": 0.0001500608329870606, |
|
"loss": 0.0439, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.8471337579617835, |
|
"grad_norm": 0.28407368063926697, |
|
"learning_rate": 0.00014914576035814748, |
|
"loss": 0.0483, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.8630573248407645, |
|
"grad_norm": 0.4121032953262329, |
|
"learning_rate": 0.000148225229423822, |
|
"loss": 0.0488, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.8789808917197452, |
|
"grad_norm": 0.44136524200439453, |
|
"learning_rate": 0.00014729934242157004, |
|
"loss": 0.0434, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.894904458598726, |
|
"grad_norm": 0.43993085622787476, |
|
"learning_rate": 0.00014636820218374177, |
|
"loss": 0.0457, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.910828025477707, |
|
"grad_norm": 0.3353908360004425, |
|
"learning_rate": 0.00014543191212613047, |
|
"loss": 0.0486, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.926751592356688, |
|
"grad_norm": 0.4279186427593231, |
|
"learning_rate": 0.0001444905762364872, |
|
"loss": 0.0526, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.9426751592356688, |
|
"grad_norm": 0.5773082375526428, |
|
"learning_rate": 0.00014354429906297125, |
|
"loss": 0.0462, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.9585987261146496, |
|
"grad_norm": 0.39543482661247253, |
|
"learning_rate": 0.00014259318570253863, |
|
"loss": 0.0417, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.9745222929936306, |
|
"grad_norm": 0.39346298575401306, |
|
"learning_rate": 0.0001416373417892698, |
|
"loss": 0.0361, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.9904458598726116, |
|
"grad_norm": 0.32164451479911804, |
|
"learning_rate": 0.0001406768734826375, |
|
"loss": 0.0394, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.0063694267515926, |
|
"grad_norm": 0.551193356513977, |
|
"learning_rate": 0.00013971188745571616, |
|
"loss": 0.042, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.022292993630573, |
|
"grad_norm": 0.3745879530906677, |
|
"learning_rate": 0.00013874249088333468, |
|
"loss": 0.0432, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.038216560509554, |
|
"grad_norm": 0.36253151297569275, |
|
"learning_rate": 0.00013776879143017294, |
|
"loss": 0.0395, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.054140127388535, |
|
"grad_norm": 0.5354855060577393, |
|
"learning_rate": 0.00013679089723880427, |
|
"loss": 0.044, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.070063694267516, |
|
"grad_norm": 0.5604785680770874, |
|
"learning_rate": 0.00013580891691768483, |
|
"loss": 0.0377, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.0859872611464967, |
|
"grad_norm": 0.42235568165779114, |
|
"learning_rate": 0.00013482295952909102, |
|
"loss": 0.0381, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.1019108280254777, |
|
"grad_norm": 0.3224591612815857, |
|
"learning_rate": 0.0001338331345770068, |
|
"loss": 0.0396, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.1178343949044587, |
|
"grad_norm": 0.36872801184654236, |
|
"learning_rate": 0.00013283955199496156, |
|
"loss": 0.0469, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.1337579617834397, |
|
"grad_norm": 0.38453301787376404, |
|
"learning_rate": 0.0001318423221338207, |
|
"loss": 0.0433, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.1496815286624202, |
|
"grad_norm": 0.3108746409416199, |
|
"learning_rate": 0.0001308415557495295, |
|
"loss": 0.0373, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.1656050955414012, |
|
"grad_norm": 0.41351979970932007, |
|
"learning_rate": 0.00012983736399081242, |
|
"loss": 0.0362, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.1815286624203822, |
|
"grad_norm": 0.39941781759262085, |
|
"learning_rate": 0.0001288298583868282, |
|
"loss": 0.0444, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.1974522292993632, |
|
"grad_norm": 0.5019637942314148, |
|
"learning_rate": 0.00012781915083478332, |
|
"loss": 0.0362, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.213375796178344, |
|
"grad_norm": 0.44153285026550293, |
|
"learning_rate": 0.00012680535358750402, |
|
"loss": 0.048, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.229299363057325, |
|
"grad_norm": 0.3250869810581207, |
|
"learning_rate": 0.00012578857924096934, |
|
"loss": 0.038, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.245222929936306, |
|
"grad_norm": 0.38918137550354004, |
|
"learning_rate": 0.00012476894072180575, |
|
"loss": 0.0507, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.261146496815287, |
|
"grad_norm": 0.42312440276145935, |
|
"learning_rate": 0.00012374655127474487, |
|
"loss": 0.0401, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.2770700636942673, |
|
"grad_norm": 0.6262978911399841, |
|
"learning_rate": 0.00012272152445004647, |
|
"loss": 0.0476, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.2929936305732483, |
|
"grad_norm": 0.4742312431335449, |
|
"learning_rate": 0.00012169397409088695, |
|
"loss": 0.0375, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.3089171974522293, |
|
"grad_norm": 0.5429434180259705, |
|
"learning_rate": 0.00012066401432071551, |
|
"loss": 0.0486, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.3248407643312103, |
|
"grad_norm": 0.34995678067207336, |
|
"learning_rate": 0.00011963175953057924, |
|
"loss": 0.0398, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.340764331210191, |
|
"grad_norm": 0.26490700244903564, |
|
"learning_rate": 0.00011859732436641851, |
|
"loss": 0.0285, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.356687898089172, |
|
"grad_norm": 0.30831092596054077, |
|
"learning_rate": 0.00011756082371633373, |
|
"loss": 0.0344, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.372611464968153, |
|
"grad_norm": 0.35261446237564087, |
|
"learning_rate": 0.00011652237269782573, |
|
"loss": 0.0327, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.388535031847134, |
|
"grad_norm": 0.31007587909698486, |
|
"learning_rate": 0.00011548208664501034, |
|
"loss": 0.049, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.404458598726115, |
|
"grad_norm": 0.49864161014556885, |
|
"learning_rate": 0.00011444008109580884, |
|
"loss": 0.0318, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.4203821656050954, |
|
"grad_norm": 0.2871188521385193, |
|
"learning_rate": 0.00011339647177911599, |
|
"loss": 0.0335, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.4363057324840764, |
|
"grad_norm": 0.38693806529045105, |
|
"learning_rate": 0.00011235137460194676, |
|
"loss": 0.0334, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.4522292993630574, |
|
"grad_norm": 0.34477829933166504, |
|
"learning_rate": 0.00011130490563656326, |
|
"loss": 0.0278, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.468152866242038, |
|
"grad_norm": 0.21558038890361786, |
|
"learning_rate": 0.00011025718110758338, |
|
"loss": 0.0425, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.484076433121019, |
|
"grad_norm": 0.3483816683292389, |
|
"learning_rate": 0.0001092083173790724, |
|
"loss": 0.0308, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.365614652633667, |
|
"learning_rate": 0.00010815843094161927, |
|
"loss": 0.0309, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.515923566878981, |
|
"grad_norm": 0.4408479928970337, |
|
"learning_rate": 0.00010710763839939857, |
|
"loss": 0.034, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.531847133757962, |
|
"grad_norm": 0.2580474615097046, |
|
"learning_rate": 0.00010605605645722024, |
|
"loss": 0.0349, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.5477707006369426, |
|
"grad_norm": 0.3337418735027313, |
|
"learning_rate": 0.0001050038019075678, |
|
"loss": 0.0342, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.5636942675159236, |
|
"grad_norm": 0.41936802864074707, |
|
"learning_rate": 0.00010395099161762698, |
|
"loss": 0.0404, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.5796178343949046, |
|
"grad_norm": 0.3816032111644745, |
|
"learning_rate": 0.00010289774251630602, |
|
"loss": 0.0442, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.595541401273885, |
|
"grad_norm": 0.30350789427757263, |
|
"learning_rate": 0.00010184417158124915, |
|
"loss": 0.036, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.611464968152866, |
|
"grad_norm": 0.20693911612033844, |
|
"learning_rate": 0.00010079039582584457, |
|
"loss": 0.0293, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.627388535031847, |
|
"grad_norm": 0.31562432646751404, |
|
"learning_rate": 9.973653228622867e-05, |
|
"loss": 0.0268, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.643312101910828, |
|
"grad_norm": 0.3229486048221588, |
|
"learning_rate": 9.86826980082874e-05, |
|
"loss": 0.0385, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.659235668789809, |
|
"grad_norm": 0.20771260559558868, |
|
"learning_rate": 9.762901003465679e-05, |
|
"loss": 0.0293, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.6751592356687897, |
|
"grad_norm": 0.2522100508213043, |
|
"learning_rate": 9.657558539172376e-05, |
|
"loss": 0.0273, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.6910828025477707, |
|
"grad_norm": 0.30410313606262207, |
|
"learning_rate": 9.552254107662888e-05, |
|
"loss": 0.039, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.7070063694267517, |
|
"grad_norm": 0.5962004661560059, |
|
"learning_rate": 9.446999404427209e-05, |
|
"loss": 0.0355, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.722929936305732, |
|
"grad_norm": 0.3611944913864136, |
|
"learning_rate": 9.341806119432324e-05, |
|
"loss": 0.0299, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.738853503184713, |
|
"grad_norm": 0.23662623763084412, |
|
"learning_rate": 9.2366859358239e-05, |
|
"loss": 0.0305, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.754777070063694, |
|
"grad_norm": 0.27245596051216125, |
|
"learning_rate": 9.131650528628687e-05, |
|
"loss": 0.0265, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.770700636942675, |
|
"grad_norm": 0.23096629977226257, |
|
"learning_rate": 9.026711563457868e-05, |
|
"loss": 0.0373, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.786624203821656, |
|
"grad_norm": 0.23357895016670227, |
|
"learning_rate": 8.921880695211431e-05, |
|
"loss": 0.0301, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.802547770700637, |
|
"grad_norm": 0.36908674240112305, |
|
"learning_rate": 8.817169566783723e-05, |
|
"loss": 0.0276, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.8184713375796178, |
|
"grad_norm": 0.20657651126384735, |
|
"learning_rate": 8.712589807770349e-05, |
|
"loss": 0.0299, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.8343949044585988, |
|
"grad_norm": 0.27385637164115906, |
|
"learning_rate": 8.60815303317657e-05, |
|
"loss": 0.0312, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.8503184713375798, |
|
"grad_norm": 0.4029058516025543, |
|
"learning_rate": 8.503870842127267e-05, |
|
"loss": 0.0391, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.8662420382165603, |
|
"grad_norm": 0.2755729556083679, |
|
"learning_rate": 8.399754816578718e-05, |
|
"loss": 0.0367, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.8821656050955413, |
|
"grad_norm": 0.2416078746318817, |
|
"learning_rate": 8.295816520032271e-05, |
|
"loss": 0.0274, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.8980891719745223, |
|
"grad_norm": 0.22337061166763306, |
|
"learning_rate": 8.192067496250038e-05, |
|
"loss": 0.0219, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.9140127388535033, |
|
"grad_norm": 0.21762487292289734, |
|
"learning_rate": 8.088519267972813e-05, |
|
"loss": 0.023, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.9299363057324843, |
|
"grad_norm": 0.32507646083831787, |
|
"learning_rate": 7.985183335640331e-05, |
|
"loss": 0.0284, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.945859872611465, |
|
"grad_norm": 0.3356756269931793, |
|
"learning_rate": 7.882071176113967e-05, |
|
"loss": 0.0304, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.961783439490446, |
|
"grad_norm": 0.31162557005882263, |
|
"learning_rate": 7.779194241402071e-05, |
|
"loss": 0.0282, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.977707006369427, |
|
"grad_norm": 0.24151593446731567, |
|
"learning_rate": 7.676563957388097e-05, |
|
"loss": 0.0248, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.9936305732484074, |
|
"grad_norm": 0.41317158937454224, |
|
"learning_rate": 7.574191722561562e-05, |
|
"loss": 0.0303, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.0095541401273884, |
|
"grad_norm": 0.33723485469818115, |
|
"learning_rate": 7.472088906752124e-05, |
|
"loss": 0.0326, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.0254777070063694, |
|
"grad_norm": 0.402401328086853, |
|
"learning_rate": 7.37026684986678e-05, |
|
"loss": 0.0327, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.0414012738853504, |
|
"grad_norm": 0.2733253538608551, |
|
"learning_rate": 7.26873686063045e-05, |
|
"loss": 0.0286, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.0573248407643314, |
|
"grad_norm": 0.2582903802394867, |
|
"learning_rate": 7.16751021532994e-05, |
|
"loss": 0.0232, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.073248407643312, |
|
"grad_norm": 0.3948001265525818, |
|
"learning_rate": 7.066598156561606e-05, |
|
"loss": 0.0331, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.089171974522293, |
|
"grad_norm": 0.23859384655952454, |
|
"learning_rate": 6.966011891982678e-05, |
|
"loss": 0.0253, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.105095541401274, |
|
"grad_norm": 0.2874588072299957, |
|
"learning_rate": 6.865762593066513e-05, |
|
"loss": 0.0316, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.121019108280255, |
|
"grad_norm": 0.40092089772224426, |
|
"learning_rate": 6.76586139386184e-05, |
|
"loss": 0.0294, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.1369426751592355, |
|
"grad_norm": 0.2927519679069519, |
|
"learning_rate": 6.666319389756189e-05, |
|
"loss": 0.0323, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.1528662420382165, |
|
"grad_norm": 0.2560182809829712, |
|
"learning_rate": 6.567147636243576e-05, |
|
"loss": 0.0292, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.1687898089171975, |
|
"grad_norm": 0.18076066672801971, |
|
"learning_rate": 6.468357147696653e-05, |
|
"loss": 0.0223, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.1847133757961785, |
|
"grad_norm": 0.32147490978240967, |
|
"learning_rate": 6.36995889614342e-05, |
|
"loss": 0.028, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.200636942675159, |
|
"grad_norm": 0.24778632819652557, |
|
"learning_rate": 6.271963810048607e-05, |
|
"loss": 0.0244, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.21656050955414, |
|
"grad_norm": 0.2591859698295593, |
|
"learning_rate": 6.174382773099938e-05, |
|
"loss": 0.0252, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.232484076433121, |
|
"grad_norm": 0.37974950671195984, |
|
"learning_rate": 6.077226622999355e-05, |
|
"loss": 0.0293, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.248407643312102, |
|
"grad_norm": 0.36128807067871094, |
|
"learning_rate": 5.980506150259323e-05, |
|
"loss": 0.0253, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.2643312101910826, |
|
"grad_norm": 0.2899702489376068, |
|
"learning_rate": 5.88423209700441e-05, |
|
"loss": 0.0199, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.2802547770700636, |
|
"grad_norm": 0.24200858175754547, |
|
"learning_rate": 5.7884151557782305e-05, |
|
"loss": 0.0334, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.2961783439490446, |
|
"grad_norm": 0.24898533523082733, |
|
"learning_rate": 5.693065968355888e-05, |
|
"loss": 0.0269, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.3121019108280256, |
|
"grad_norm": 0.33028173446655273, |
|
"learning_rate": 5.598195124562049e-05, |
|
"loss": 0.0309, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.328025477707006, |
|
"grad_norm": 0.32554179430007935, |
|
"learning_rate": 5.5038131610948326e-05, |
|
"loss": 0.0274, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.343949044585987, |
|
"grad_norm": 0.25384682416915894, |
|
"learning_rate": 5.409930560355527e-05, |
|
"loss": 0.0235, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.359872611464968, |
|
"grad_norm": 0.3368367850780487, |
|
"learning_rate": 5.316557749284401e-05, |
|
"loss": 0.0259, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.375796178343949, |
|
"grad_norm": 0.3751080334186554, |
|
"learning_rate": 5.223705098202647e-05, |
|
"loss": 0.0405, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.3917197452229297, |
|
"grad_norm": 0.3862003684043884, |
|
"learning_rate": 5.131382919660609e-05, |
|
"loss": 0.0311, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.4076433121019107, |
|
"grad_norm": 0.32009872794151306, |
|
"learning_rate": 5.03960146729244e-05, |
|
"loss": 0.0252, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.4235668789808917, |
|
"grad_norm": 0.2071923166513443, |
|
"learning_rate": 4.9483709346772896e-05, |
|
"loss": 0.0248, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.4394904458598727, |
|
"grad_norm": 0.3461016118526459, |
|
"learning_rate": 4.85770145420718e-05, |
|
"loss": 0.0336, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.4554140127388537, |
|
"grad_norm": 0.3042682111263275, |
|
"learning_rate": 4.7676030959616526e-05, |
|
"loss": 0.0211, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.4713375796178343, |
|
"grad_norm": 0.16671477258205414, |
|
"learning_rate": 4.678085866589348e-05, |
|
"loss": 0.018, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.4872611464968153, |
|
"grad_norm": 0.20116400718688965, |
|
"learning_rate": 4.5891597081966583e-05, |
|
"loss": 0.0224, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.5031847133757963, |
|
"grad_norm": 0.23196861147880554, |
|
"learning_rate": 4.500834497243478e-05, |
|
"loss": 0.0221, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.519108280254777, |
|
"grad_norm": 0.2727397084236145, |
|
"learning_rate": 4.413120043446322e-05, |
|
"loss": 0.0272, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 3.535031847133758, |
|
"grad_norm": 0.283627450466156, |
|
"learning_rate": 4.3260260886888206e-05, |
|
"loss": 0.0259, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 3.550955414012739, |
|
"grad_norm": 0.40015652775764465, |
|
"learning_rate": 4.239562305939725e-05, |
|
"loss": 0.0298, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 3.56687898089172, |
|
"grad_norm": 0.30534714460372925, |
|
"learning_rate": 4.153738298178623e-05, |
|
"loss": 0.0245, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 3.582802547770701, |
|
"grad_norm": 0.22230559587478638, |
|
"learning_rate": 4.068563597329379e-05, |
|
"loss": 0.0231, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.5987261146496814, |
|
"grad_norm": 0.3529864549636841, |
|
"learning_rate": 3.984047663201492e-05, |
|
"loss": 0.04, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 3.6146496815286624, |
|
"grad_norm": 0.2945915460586548, |
|
"learning_rate": 3.9001998824394526e-05, |
|
"loss": 0.0251, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 3.6305732484076434, |
|
"grad_norm": 0.24476458132266998, |
|
"learning_rate": 3.817029567480228e-05, |
|
"loss": 0.0245, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 3.646496815286624, |
|
"grad_norm": 0.1646440178155899, |
|
"learning_rate": 3.7345459555189885e-05, |
|
"loss": 0.0246, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 3.662420382165605, |
|
"grad_norm": 0.21083424985408783, |
|
"learning_rate": 3.652758207483197e-05, |
|
"loss": 0.0272, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.678343949044586, |
|
"grad_norm": 0.17064619064331055, |
|
"learning_rate": 3.5716754070151524e-05, |
|
"loss": 0.0169, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.694267515923567, |
|
"grad_norm": 0.18861030042171478, |
|
"learning_rate": 3.4913065594631434e-05, |
|
"loss": 0.0257, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.710191082802548, |
|
"grad_norm": 0.31544387340545654, |
|
"learning_rate": 3.4116605908812595e-05, |
|
"loss": 0.0237, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.7261146496815285, |
|
"grad_norm": 0.5185452103614807, |
|
"learning_rate": 3.3327463470380524e-05, |
|
"loss": 0.0229, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.7420382165605095, |
|
"grad_norm": 0.2828991115093231, |
|
"learning_rate": 3.2545725924340876e-05, |
|
"loss": 0.0294, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.7579617834394905, |
|
"grad_norm": 0.2944129407405853, |
|
"learning_rate": 3.1771480093285186e-05, |
|
"loss": 0.0218, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.7738853503184715, |
|
"grad_norm": 0.24150818586349487, |
|
"learning_rate": 3.1004811967748134e-05, |
|
"loss": 0.024, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.789808917197452, |
|
"grad_norm": 0.2700318396091461, |
|
"learning_rate": 3.024580669665711e-05, |
|
"loss": 0.0244, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.805732484076433, |
|
"grad_norm": 0.4214876592159271, |
|
"learning_rate": 2.9494548577875192e-05, |
|
"loss": 0.0264, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.821656050955414, |
|
"grad_norm": 0.2855050265789032, |
|
"learning_rate": 2.8751121048838836e-05, |
|
"loss": 0.02, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.837579617834395, |
|
"grad_norm": 0.19058647751808167, |
|
"learning_rate": 2.801560667729093e-05, |
|
"loss": 0.0243, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.853503184713376, |
|
"grad_norm": 0.32941293716430664, |
|
"learning_rate": 2.7288087152110565e-05, |
|
"loss": 0.0268, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.8694267515923566, |
|
"grad_norm": 0.4154011905193329, |
|
"learning_rate": 2.656864327424027e-05, |
|
"loss": 0.0206, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.8853503184713376, |
|
"grad_norm": 0.3098050057888031, |
|
"learning_rate": 2.585735494771222e-05, |
|
"loss": 0.023, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.9012738853503186, |
|
"grad_norm": 0.27963143587112427, |
|
"learning_rate": 2.515430117077354e-05, |
|
"loss": 0.0273, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.917197452229299, |
|
"grad_norm": 0.29374557733535767, |
|
"learning_rate": 2.4459560027112504e-05, |
|
"loss": 0.0244, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.93312101910828, |
|
"grad_norm": 0.1925758719444275, |
|
"learning_rate": 2.377320867718654e-05, |
|
"loss": 0.0214, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.949044585987261, |
|
"grad_norm": 0.3169631361961365, |
|
"learning_rate": 2.3095323349652153e-05, |
|
"loss": 0.0258, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.964968152866242, |
|
"grad_norm": 0.2621577978134155, |
|
"learning_rate": 2.242597933289894e-05, |
|
"loss": 0.0238, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 3.980891719745223, |
|
"grad_norm": 0.2187499850988388, |
|
"learning_rate": 2.176525096668769e-05, |
|
"loss": 0.0266, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 3.9968152866242037, |
|
"grad_norm": 0.2935671806335449, |
|
"learning_rate": 2.1113211633894003e-05, |
|
"loss": 0.0214, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.012738853503185, |
|
"grad_norm": 0.1593521386384964, |
|
"learning_rate": 2.046993375235804e-05, |
|
"loss": 0.0166, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.028662420382165, |
|
"grad_norm": 0.32574114203453064, |
|
"learning_rate": 1.9835488766841615e-05, |
|
"loss": 0.0232, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.044585987261146, |
|
"grad_norm": 0.2108948677778244, |
|
"learning_rate": 1.920994714109323e-05, |
|
"loss": 0.0255, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.060509554140127, |
|
"grad_norm": 0.2054058164358139, |
|
"learning_rate": 1.8593378350022183e-05, |
|
"loss": 0.0156, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.076433121019108, |
|
"grad_norm": 0.28134655952453613, |
|
"learning_rate": 1.798585087198228e-05, |
|
"loss": 0.0244, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.092356687898089, |
|
"grad_norm": 0.26843348145484924, |
|
"learning_rate": 1.7387432181166642e-05, |
|
"loss": 0.0294, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.10828025477707, |
|
"grad_norm": 0.32004109025001526, |
|
"learning_rate": 1.6798188740113484e-05, |
|
"loss": 0.0263, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.124203821656051, |
|
"grad_norm": 0.6330613493919373, |
|
"learning_rate": 1.621818599232475e-05, |
|
"loss": 0.0178, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.140127388535032, |
|
"grad_norm": 0.21530304849147797, |
|
"learning_rate": 1.564748835499773e-05, |
|
"loss": 0.0277, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.156050955414012, |
|
"grad_norm": 0.20998333394527435, |
|
"learning_rate": 1.5086159211870442e-05, |
|
"loss": 0.0214, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.171974522292993, |
|
"grad_norm": 0.3173789381980896, |
|
"learning_rate": 1.4534260906182228e-05, |
|
"loss": 0.0198, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.187898089171974, |
|
"grad_norm": 0.18961144983768463, |
|
"learning_rate": 1.399185473374961e-05, |
|
"loss": 0.021, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.203821656050955, |
|
"grad_norm": 0.36942291259765625, |
|
"learning_rate": 1.3459000936158528e-05, |
|
"loss": 0.0239, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.219745222929936, |
|
"grad_norm": 0.28133276104927063, |
|
"learning_rate": 1.293575869407373e-05, |
|
"loss": 0.0244, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.235668789808917, |
|
"grad_norm": 0.2719907760620117, |
|
"learning_rate": 1.2422186120665935e-05, |
|
"loss": 0.0203, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.251592356687898, |
|
"grad_norm": 0.15558955073356628, |
|
"learning_rate": 1.1918340255157612e-05, |
|
"loss": 0.0214, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 4.267515923566879, |
|
"grad_norm": 0.23012204468250275, |
|
"learning_rate": 1.1424277056487864e-05, |
|
"loss": 0.0232, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 4.2834394904458595, |
|
"grad_norm": 0.29646140336990356, |
|
"learning_rate": 1.0940051397097673e-05, |
|
"loss": 0.0179, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.2993630573248405, |
|
"grad_norm": 0.282185435295105, |
|
"learning_rate": 1.0465717056835355e-05, |
|
"loss": 0.0171, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.3152866242038215, |
|
"grad_norm": 0.3244917392730713, |
|
"learning_rate": 1.0001326716983584e-05, |
|
"loss": 0.0161, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.3312101910828025, |
|
"grad_norm": 0.2768123149871826, |
|
"learning_rate": 9.546931954408622e-06, |
|
"loss": 0.0248, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.3471337579617835, |
|
"grad_norm": 0.14225247502326965, |
|
"learning_rate": 9.10258323583173e-06, |
|
"loss": 0.0171, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.3630573248407645, |
|
"grad_norm": 0.16188742220401764, |
|
"learning_rate": 8.668329912224337e-06, |
|
"loss": 0.0177, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.3789808917197455, |
|
"grad_norm": 0.30127179622650146, |
|
"learning_rate": 8.244220213326958e-06, |
|
"loss": 0.0213, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.3949044585987265, |
|
"grad_norm": 0.1804376244544983, |
|
"learning_rate": 7.83030124229246e-06, |
|
"loss": 0.0232, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 4.4108280254777075, |
|
"grad_norm": 0.24196097254753113, |
|
"learning_rate": 7.426618970454757e-06, |
|
"loss": 0.0163, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 4.426751592356688, |
|
"grad_norm": 0.4308320879936218, |
|
"learning_rate": 7.033218232223027e-06, |
|
"loss": 0.0165, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 4.442675159235669, |
|
"grad_norm": 0.3199467062950134, |
|
"learning_rate": 6.650142720102248e-06, |
|
"loss": 0.024, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 4.45859872611465, |
|
"grad_norm": 0.2603253722190857, |
|
"learning_rate": 6.2774349798405465e-06, |
|
"loss": 0.0232, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 4.474522292993631, |
|
"grad_norm": 0.20752651989459991, |
|
"learning_rate": 5.915136405703858e-06, |
|
"loss": 0.0223, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 4.490445859872612, |
|
"grad_norm": 0.17525236308574677, |
|
"learning_rate": 5.563287235878678e-06, |
|
"loss": 0.0168, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 4.506369426751593, |
|
"grad_norm": 0.20458337664604187, |
|
"learning_rate": 5.221926548002876e-06, |
|
"loss": 0.0208, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 4.522292993630574, |
|
"grad_norm": 0.353657066822052, |
|
"learning_rate": 4.891092254825725e-06, |
|
"loss": 0.0224, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 4.538216560509554, |
|
"grad_norm": 0.23591625690460205, |
|
"learning_rate": 4.570821099997169e-06, |
|
"loss": 0.0186, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 4.554140127388535, |
|
"grad_norm": 0.2248276323080063, |
|
"learning_rate": 4.261148653986824e-06, |
|
"loss": 0.0201, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 4.570063694267516, |
|
"grad_norm": 0.19413244724273682, |
|
"learning_rate": 3.962109310133544e-06, |
|
"loss": 0.0209, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 4.585987261146497, |
|
"grad_norm": 0.1779373288154602, |
|
"learning_rate": 3.6737362808254793e-06, |
|
"loss": 0.0164, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 4.601910828025478, |
|
"grad_norm": 0.13120533525943756, |
|
"learning_rate": 3.3960615938114172e-06, |
|
"loss": 0.02, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 4.617834394904459, |
|
"grad_norm": 0.24661269783973694, |
|
"learning_rate": 3.1291160886436575e-06, |
|
"loss": 0.0199, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 4.63375796178344, |
|
"grad_norm": 0.20324034988880157, |
|
"learning_rate": 2.872929413252867e-06, |
|
"loss": 0.0168, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 4.649681528662421, |
|
"grad_norm": 0.1891164630651474, |
|
"learning_rate": 2.6275300206552667e-06, |
|
"loss": 0.0239, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 4.665605095541402, |
|
"grad_norm": 0.446866512298584, |
|
"learning_rate": 2.3929451657925482e-06, |
|
"loss": 0.0215, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 4.681528662420382, |
|
"grad_norm": 0.1874268352985382, |
|
"learning_rate": 2.1692009025048422e-06, |
|
"loss": 0.0194, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 4.697452229299363, |
|
"grad_norm": 0.3293200135231018, |
|
"learning_rate": 1.956322080637052e-06, |
|
"loss": 0.0245, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 4.713375796178344, |
|
"grad_norm": 0.21030674874782562, |
|
"learning_rate": 1.754332343278986e-06, |
|
"loss": 0.0218, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 4.729299363057325, |
|
"grad_norm": 0.1421198844909668, |
|
"learning_rate": 1.5632541241394461e-06, |
|
"loss": 0.017, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 4.745222929936306, |
|
"grad_norm": 0.30587485432624817, |
|
"learning_rate": 1.3831086450546648e-06, |
|
"loss": 0.0165, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 4.761146496815287, |
|
"grad_norm": 0.28812679648399353, |
|
"learning_rate": 1.213915913631314e-06, |
|
"loss": 0.0221, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 4.777070063694268, |
|
"grad_norm": 0.20168578624725342, |
|
"learning_rate": 1.055694721024425e-06, |
|
"loss": 0.0197, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.792993630573249, |
|
"grad_norm": 0.31528469920158386, |
|
"learning_rate": 9.084626398503604e-07, |
|
"loss": 0.0278, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 4.80891719745223, |
|
"grad_norm": 0.2268596887588501, |
|
"learning_rate": 7.722360222350955e-07, |
|
"loss": 0.0196, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 4.82484076433121, |
|
"grad_norm": 0.21379084885120392, |
|
"learning_rate": 6.470299979981609e-07, |
|
"loss": 0.0242, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 4.840764331210191, |
|
"grad_norm": 0.4038665294647217, |
|
"learning_rate": 5.328584729722197e-07, |
|
"loss": 0.0208, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 4.856687898089172, |
|
"grad_norm": 0.1791532039642334, |
|
"learning_rate": 4.297341274586475e-07, |
|
"loss": 0.0248, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 4.872611464968153, |
|
"grad_norm": 0.2981935739517212, |
|
"learning_rate": 3.376684148192255e-07, |
|
"loss": 0.0166, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 4.888535031847134, |
|
"grad_norm": 0.37263256311416626, |
|
"learning_rate": 2.5667156020408034e-07, |
|
"loss": 0.0161, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 4.904458598726115, |
|
"grad_norm": 0.19920985400676727, |
|
"learning_rate": 1.8675255941604797e-07, |
|
"loss": 0.0296, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 4.920382165605096, |
|
"grad_norm": 0.17917926609516144, |
|
"learning_rate": 1.2791917791152853e-07, |
|
"loss": 0.024, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 4.936305732484076, |
|
"grad_norm": 0.2005498707294464, |
|
"learning_rate": 8.017794993808725e-08, |
|
"loss": 0.0223, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 4.952229299363057, |
|
"grad_norm": 0.23752698302268982, |
|
"learning_rate": 4.353417780872393e-08, |
|
"loss": 0.0199, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 4.968152866242038, |
|
"grad_norm": 0.20469646155834198, |
|
"learning_rate": 1.7991931312921762e-08, |
|
"loss": 0.021, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 4.984076433121019, |
|
"grad_norm": 0.20618374645709991, |
|
"learning_rate": 3.5540472647088707e-09, |
|
"loss": 0.0183, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 4.996815286624204, |
|
"step": 3138, |
|
"total_flos": 1.1375719484380992e+17, |
|
"train_loss": 0.04994222255384489, |
|
"train_runtime": 1406.117, |
|
"train_samples_per_second": 35.707, |
|
"train_steps_per_second": 2.232 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3138, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1375719484380992e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|