|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.965384615384615, |
|
"eval_steps": 500, |
|
"global_step": 5191, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.038461538461538464, |
|
"grad_norm": 7.0167975425720215, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 1.1358, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07692307692307693, |
|
"grad_norm": 3.2799055576324463, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 0.8043, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.11538461538461539, |
|
"grad_norm": 4.641620635986328, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 0.4223, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15384615384615385, |
|
"grad_norm": 0.8846292495727539, |
|
"learning_rate": 3.0769230769230774e-05, |
|
"loss": 0.2722, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 0.9276573657989502, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 0.1947, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.23076923076923078, |
|
"grad_norm": 1.1657756567001343, |
|
"learning_rate": 4.615384615384616e-05, |
|
"loss": 0.1486, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2692307692307692, |
|
"grad_norm": 0.9154715538024902, |
|
"learning_rate": 5.384615384615385e-05, |
|
"loss": 0.1246, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3076923076923077, |
|
"grad_norm": 1.1416549682617188, |
|
"learning_rate": 6.153846153846155e-05, |
|
"loss": 0.1014, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.34615384615384615, |
|
"grad_norm": 0.8195381760597229, |
|
"learning_rate": 6.923076923076924e-05, |
|
"loss": 0.0926, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.45692142844200134, |
|
"learning_rate": 7.692307692307693e-05, |
|
"loss": 0.0815, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4230769230769231, |
|
"grad_norm": 0.4946421980857849, |
|
"learning_rate": 8.461538461538461e-05, |
|
"loss": 0.0832, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.46153846153846156, |
|
"grad_norm": 0.6091125011444092, |
|
"learning_rate": 9.230769230769232e-05, |
|
"loss": 0.0721, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.610546886920929, |
|
"learning_rate": 0.0001, |
|
"loss": 0.0632, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.5384615384615384, |
|
"grad_norm": 0.5763479471206665, |
|
"learning_rate": 0.0001076923076923077, |
|
"loss": 0.0673, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 0.821746826171875, |
|
"learning_rate": 0.00011538461538461538, |
|
"loss": 0.0641, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.5488954186439514, |
|
"learning_rate": 0.0001230769230769231, |
|
"loss": 0.0625, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.6538461538461539, |
|
"grad_norm": 0.6605976819992065, |
|
"learning_rate": 0.00013076923076923077, |
|
"loss": 0.0579, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6923076923076923, |
|
"grad_norm": 0.8009259104728699, |
|
"learning_rate": 0.00013846153846153847, |
|
"loss": 0.0523, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.7307692307692307, |
|
"grad_norm": 0.4878791272640228, |
|
"learning_rate": 0.00014615384615384615, |
|
"loss": 0.0538, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.3475745618343353, |
|
"learning_rate": 0.00015384615384615385, |
|
"loss": 0.0519, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.8076923076923077, |
|
"grad_norm": 0.454436719417572, |
|
"learning_rate": 0.00016153846153846155, |
|
"loss": 0.0514, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.8461538461538461, |
|
"grad_norm": 0.7808321118354797, |
|
"learning_rate": 0.00016923076923076923, |
|
"loss": 0.0517, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8846153846153846, |
|
"grad_norm": 0.6214762926101685, |
|
"learning_rate": 0.00017692307692307693, |
|
"loss": 0.0524, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.47517019510269165, |
|
"learning_rate": 0.00018461538461538463, |
|
"loss": 0.0545, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 0.7772664427757263, |
|
"learning_rate": 0.00019230769230769233, |
|
"loss": 0.0491, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.863855242729187, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0482, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.0384615384615385, |
|
"grad_norm": 0.34070315957069397, |
|
"learning_rate": 0.00019999797045691374, |
|
"loss": 0.0467, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.0769230769230769, |
|
"grad_norm": 0.38879549503326416, |
|
"learning_rate": 0.00019999188191003583, |
|
"loss": 0.0392, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.1153846153846154, |
|
"grad_norm": 0.4438105821609497, |
|
"learning_rate": 0.00019998173460650565, |
|
"loss": 0.0424, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 0.9575906991958618, |
|
"learning_rate": 0.00019996752895821098, |
|
"loss": 0.0503, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.1923076923076923, |
|
"grad_norm": 0.5975697636604309, |
|
"learning_rate": 0.00019994926554177134, |
|
"loss": 0.0487, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.2307692307692308, |
|
"grad_norm": 0.5060175061225891, |
|
"learning_rate": 0.00019992694509851454, |
|
"loss": 0.0376, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.2692307692307692, |
|
"grad_norm": 0.35833704471588135, |
|
"learning_rate": 0.0001999005685344466, |
|
"loss": 0.0378, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.3076923076923077, |
|
"grad_norm": 0.5617273449897766, |
|
"learning_rate": 0.000199870136920215, |
|
"loss": 0.0476, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.3461538461538463, |
|
"grad_norm": 0.611136794090271, |
|
"learning_rate": 0.00019983565149106513, |
|
"loss": 0.0442, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.3846153846153846, |
|
"grad_norm": 0.3568519949913025, |
|
"learning_rate": 0.00019979711364679037, |
|
"loss": 0.042, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.4230769230769231, |
|
"grad_norm": 0.3810388445854187, |
|
"learning_rate": 0.00019975452495167494, |
|
"loss": 0.0375, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.4615384615384617, |
|
"grad_norm": 0.5420308113098145, |
|
"learning_rate": 0.00019970788713443073, |
|
"loss": 0.0359, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.5682479739189148, |
|
"learning_rate": 0.00019965720208812692, |
|
"loss": 0.0371, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.2949374318122864, |
|
"learning_rate": 0.00019960247187011322, |
|
"loss": 0.0368, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.5769230769230769, |
|
"grad_norm": 0.44852954149246216, |
|
"learning_rate": 0.0001995436987019363, |
|
"loss": 0.0375, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.6153846153846154, |
|
"grad_norm": 0.2495189756155014, |
|
"learning_rate": 0.00019948088496924972, |
|
"loss": 0.0363, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.6538461538461537, |
|
"grad_norm": 0.4514489769935608, |
|
"learning_rate": 0.00019941403322171707, |
|
"loss": 0.037, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.6923076923076923, |
|
"grad_norm": 0.26508140563964844, |
|
"learning_rate": 0.00019934314617290832, |
|
"loss": 0.0337, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.7307692307692308, |
|
"grad_norm": 0.23346170783042908, |
|
"learning_rate": 0.00019926822670018993, |
|
"loss": 0.0344, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.7692307692307692, |
|
"grad_norm": 0.3835722506046295, |
|
"learning_rate": 0.00019918927784460781, |
|
"loss": 0.0324, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.8076923076923077, |
|
"grad_norm": 0.3047066330909729, |
|
"learning_rate": 0.00019910630281076407, |
|
"loss": 0.0305, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.8461538461538463, |
|
"grad_norm": 0.22565406560897827, |
|
"learning_rate": 0.0001990193049666868, |
|
"loss": 0.033, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.8846153846153846, |
|
"grad_norm": 0.39445042610168457, |
|
"learning_rate": 0.00019892828784369345, |
|
"loss": 0.0301, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.4797164797782898, |
|
"learning_rate": 0.00019883325513624756, |
|
"loss": 0.0343, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.9615384615384617, |
|
"grad_norm": 0.5288494825363159, |
|
"learning_rate": 0.00019873421070180857, |
|
"loss": 0.0318, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.3639772832393646, |
|
"learning_rate": 0.00019863115856067536, |
|
"loss": 0.0318, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.0384615384615383, |
|
"grad_norm": 0.4338575303554535, |
|
"learning_rate": 0.0001985241028958232, |
|
"loss": 0.029, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.076923076923077, |
|
"grad_norm": 0.25063231587409973, |
|
"learning_rate": 0.00019841304805273382, |
|
"loss": 0.0299, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.1153846153846154, |
|
"grad_norm": 0.17708472907543182, |
|
"learning_rate": 0.0001982979985392189, |
|
"loss": 0.024, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.1538461538461537, |
|
"grad_norm": 0.33794716000556946, |
|
"learning_rate": 0.00019817895902523745, |
|
"loss": 0.0285, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.1923076923076925, |
|
"grad_norm": 0.22588586807250977, |
|
"learning_rate": 0.00019805593434270584, |
|
"loss": 0.0309, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.230769230769231, |
|
"grad_norm": 0.27854955196380615, |
|
"learning_rate": 0.00019792892948530195, |
|
"loss": 0.0287, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.269230769230769, |
|
"grad_norm": 0.24123087525367737, |
|
"learning_rate": 0.0001977979496082624, |
|
"loss": 0.0259, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.3076923076923075, |
|
"grad_norm": 0.311997652053833, |
|
"learning_rate": 0.0001976630000281733, |
|
"loss": 0.0273, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.3461538461538463, |
|
"grad_norm": 0.2107621133327484, |
|
"learning_rate": 0.00019752408622275436, |
|
"loss": 0.0267, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.3846153846153846, |
|
"grad_norm": 0.49752968549728394, |
|
"learning_rate": 0.0001973812138306366, |
|
"loss": 0.0305, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.423076923076923, |
|
"grad_norm": 0.2595117688179016, |
|
"learning_rate": 0.00019723438865113367, |
|
"loss": 0.0272, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.4615384615384617, |
|
"grad_norm": 0.35266903042793274, |
|
"learning_rate": 0.00019708361664400598, |
|
"loss": 0.0289, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.3101799190044403, |
|
"learning_rate": 0.00019692890392921934, |
|
"loss": 0.0304, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.5384615384615383, |
|
"grad_norm": 0.3763001561164856, |
|
"learning_rate": 0.00019677025678669607, |
|
"loss": 0.0261, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.5769230769230766, |
|
"grad_norm": 0.17876401543617249, |
|
"learning_rate": 0.00019660768165606046, |
|
"loss": 0.0251, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.6153846153846154, |
|
"grad_norm": 0.2988975942134857, |
|
"learning_rate": 0.00019644118513637715, |
|
"loss": 0.0265, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.6538461538461537, |
|
"grad_norm": 0.32994213700294495, |
|
"learning_rate": 0.00019627077398588335, |
|
"loss": 0.0247, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.6923076923076925, |
|
"grad_norm": 0.25379645824432373, |
|
"learning_rate": 0.00019609645512171442, |
|
"loss": 0.0241, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.730769230769231, |
|
"grad_norm": 0.3905366361141205, |
|
"learning_rate": 0.0001959182356196234, |
|
"loss": 0.0249, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.769230769230769, |
|
"grad_norm": 0.22633816301822662, |
|
"learning_rate": 0.00019573612271369336, |
|
"loss": 0.0267, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.8076923076923075, |
|
"grad_norm": 0.2320065051317215, |
|
"learning_rate": 0.00019555012379604417, |
|
"loss": 0.0255, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.8461538461538463, |
|
"grad_norm": 0.21357214450836182, |
|
"learning_rate": 0.0001953602464165321, |
|
"loss": 0.0289, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.8846153846153846, |
|
"grad_norm": 0.23385067284107208, |
|
"learning_rate": 0.00019516649828244363, |
|
"loss": 0.023, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.9230769230769234, |
|
"grad_norm": 0.2579933702945709, |
|
"learning_rate": 0.0001949688872581825, |
|
"loss": 0.0258, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.9615384615384617, |
|
"grad_norm": 0.3598341643810272, |
|
"learning_rate": 0.0001947674213649504, |
|
"loss": 0.0282, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.2527090907096863, |
|
"learning_rate": 0.00019456210878042164, |
|
"loss": 0.0277, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.0384615384615383, |
|
"grad_norm": 0.3657386898994446, |
|
"learning_rate": 0.00019435295783841092, |
|
"loss": 0.0246, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.076923076923077, |
|
"grad_norm": 0.35666748881340027, |
|
"learning_rate": 0.0001941399770285352, |
|
"loss": 0.024, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.1153846153846154, |
|
"grad_norm": 0.19721713662147522, |
|
"learning_rate": 0.00019392317499586907, |
|
"loss": 0.0204, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.1538461538461537, |
|
"grad_norm": 0.23313289880752563, |
|
"learning_rate": 0.00019370256054059386, |
|
"loss": 0.0217, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.1923076923076925, |
|
"grad_norm": 0.2258790284395218, |
|
"learning_rate": 0.00019347814261764046, |
|
"loss": 0.0233, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.230769230769231, |
|
"grad_norm": 0.21273957192897797, |
|
"learning_rate": 0.00019324993033632568, |
|
"loss": 0.0215, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.269230769230769, |
|
"grad_norm": 0.2613265812397003, |
|
"learning_rate": 0.00019301793295998273, |
|
"loss": 0.0226, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.3076923076923075, |
|
"grad_norm": 0.35958781838417053, |
|
"learning_rate": 0.00019278215990558504, |
|
"loss": 0.0258, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.3461538461538463, |
|
"grad_norm": 0.27915194630622864, |
|
"learning_rate": 0.000192542620743364, |
|
"loss": 0.0255, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.3846153846153846, |
|
"grad_norm": 0.23008808493614197, |
|
"learning_rate": 0.00019229932519642068, |
|
"loss": 0.0259, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.423076923076923, |
|
"grad_norm": 0.3263101279735565, |
|
"learning_rate": 0.00019205228314033093, |
|
"loss": 0.0247, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.4615384615384617, |
|
"grad_norm": 0.27318865060806274, |
|
"learning_rate": 0.00019180150460274478, |
|
"loss": 0.0246, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 0.38367944955825806, |
|
"learning_rate": 0.00019154699976297907, |
|
"loss": 0.0218, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.5384615384615383, |
|
"grad_norm": 0.24464184045791626, |
|
"learning_rate": 0.00019128877895160465, |
|
"loss": 0.0205, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.5769230769230766, |
|
"grad_norm": 0.2480219155550003, |
|
"learning_rate": 0.00019102685265002666, |
|
"loss": 0.0248, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.6153846153846154, |
|
"grad_norm": 0.3331535756587982, |
|
"learning_rate": 0.0001907612314900595, |
|
"loss": 0.0232, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.6538461538461537, |
|
"grad_norm": 0.22589264810085297, |
|
"learning_rate": 0.00019049192625349487, |
|
"loss": 0.0208, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.6923076923076925, |
|
"grad_norm": 0.29576578736305237, |
|
"learning_rate": 0.00019021894787166443, |
|
"loss": 0.0239, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.730769230769231, |
|
"grad_norm": 0.249545156955719, |
|
"learning_rate": 0.00018994230742499593, |
|
"loss": 0.0225, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.769230769230769, |
|
"grad_norm": 0.2573404312133789, |
|
"learning_rate": 0.00018966201614256347, |
|
"loss": 0.0209, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.8076923076923075, |
|
"grad_norm": 0.32673683762550354, |
|
"learning_rate": 0.00018937808540163173, |
|
"loss": 0.023, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.8461538461538463, |
|
"grad_norm": 0.29287075996398926, |
|
"learning_rate": 0.0001890905267271942, |
|
"loss": 0.0231, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.8846153846153846, |
|
"grad_norm": 0.2039879858493805, |
|
"learning_rate": 0.0001887993517915052, |
|
"loss": 0.0184, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.9230769230769234, |
|
"grad_norm": 0.24603568017482758, |
|
"learning_rate": 0.00018850457241360635, |
|
"loss": 0.0262, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.9615384615384617, |
|
"grad_norm": 0.2239179164171219, |
|
"learning_rate": 0.00018820620055884658, |
|
"loss": 0.0218, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.17946171760559082, |
|
"learning_rate": 0.0001879042483383966, |
|
"loss": 0.0181, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.038461538461538, |
|
"grad_norm": 0.11465075612068176, |
|
"learning_rate": 0.00018759872800875726, |
|
"loss": 0.0172, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.076923076923077, |
|
"grad_norm": 0.17597812414169312, |
|
"learning_rate": 0.000187289651971262, |
|
"loss": 0.0169, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.115384615384615, |
|
"grad_norm": 0.19563104212284088, |
|
"learning_rate": 0.00018697703277157347, |
|
"loss": 0.0198, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.153846153846154, |
|
"grad_norm": 0.18617744743824005, |
|
"learning_rate": 0.00018666088309917447, |
|
"loss": 0.0218, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.1923076923076925, |
|
"grad_norm": 0.23704974353313446, |
|
"learning_rate": 0.00018634121578685255, |
|
"loss": 0.0186, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.230769230769231, |
|
"grad_norm": 0.32059594988822937, |
|
"learning_rate": 0.00018601804381017945, |
|
"loss": 0.0211, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.269230769230769, |
|
"grad_norm": 0.28677231073379517, |
|
"learning_rate": 0.00018569138028698415, |
|
"loss": 0.0159, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.3076923076923075, |
|
"grad_norm": 0.24357284605503082, |
|
"learning_rate": 0.00018536123847682056, |
|
"loss": 0.0189, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.346153846153846, |
|
"grad_norm": 0.1930132806301117, |
|
"learning_rate": 0.00018502763178042923, |
|
"loss": 0.0202, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.384615384615385, |
|
"grad_norm": 0.1683141440153122, |
|
"learning_rate": 0.00018469057373919347, |
|
"loss": 0.0202, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.423076923076923, |
|
"grad_norm": 0.1439371407032013, |
|
"learning_rate": 0.00018435007803458965, |
|
"loss": 0.0186, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.461538461538462, |
|
"grad_norm": 0.23720888793468475, |
|
"learning_rate": 0.00018400615848763175, |
|
"loss": 0.0209, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 0.15303802490234375, |
|
"learning_rate": 0.00018365882905831065, |
|
"loss": 0.0232, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.538461538461538, |
|
"grad_norm": 0.140608549118042, |
|
"learning_rate": 0.0001833081038450271, |
|
"loss": 0.0192, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.576923076923077, |
|
"grad_norm": 0.31198394298553467, |
|
"learning_rate": 0.00018295399708401975, |
|
"loss": 0.0168, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.615384615384615, |
|
"grad_norm": 0.1934264749288559, |
|
"learning_rate": 0.00018259652314878721, |
|
"loss": 0.0207, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.653846153846154, |
|
"grad_norm": 0.33610162138938904, |
|
"learning_rate": 0.00018223569654950454, |
|
"loss": 0.0196, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.6923076923076925, |
|
"grad_norm": 0.2503054738044739, |
|
"learning_rate": 0.00018187153193243436, |
|
"loss": 0.0201, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.730769230769231, |
|
"grad_norm": 0.20254291594028473, |
|
"learning_rate": 0.0001815040440793322, |
|
"loss": 0.0199, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.769230769230769, |
|
"grad_norm": 0.24005241692066193, |
|
"learning_rate": 0.0001811332479068468, |
|
"loss": 0.0214, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.8076923076923075, |
|
"grad_norm": 0.21922606229782104, |
|
"learning_rate": 0.00018075915846591428, |
|
"loss": 0.0204, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.846153846153846, |
|
"grad_norm": 0.2370215505361557, |
|
"learning_rate": 0.00018038179094114742, |
|
"loss": 0.022, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.884615384615385, |
|
"grad_norm": 0.2668112814426422, |
|
"learning_rate": 0.0001800011606502192, |
|
"loss": 0.0188, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.923076923076923, |
|
"grad_norm": 0.15615129470825195, |
|
"learning_rate": 0.00017961728304324116, |
|
"loss": 0.0182, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.961538461538462, |
|
"grad_norm": 0.23041196167469025, |
|
"learning_rate": 0.00017923017370213615, |
|
"loss": 0.0188, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.21733583509922028, |
|
"learning_rate": 0.0001788398483400059, |
|
"loss": 0.0198, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 5.038461538461538, |
|
"grad_norm": 0.2194267213344574, |
|
"learning_rate": 0.00017844632280049328, |
|
"loss": 0.0181, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 5.076923076923077, |
|
"grad_norm": 0.14587156474590302, |
|
"learning_rate": 0.00017804961305713896, |
|
"loss": 0.0151, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 5.115384615384615, |
|
"grad_norm": 0.16885510087013245, |
|
"learning_rate": 0.00017764973521273332, |
|
"loss": 0.0215, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 5.153846153846154, |
|
"grad_norm": 0.19568288326263428, |
|
"learning_rate": 0.00017724670549866268, |
|
"loss": 0.0191, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 5.1923076923076925, |
|
"grad_norm": 0.15629301965236664, |
|
"learning_rate": 0.00017684054027425035, |
|
"loss": 0.0174, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 5.230769230769231, |
|
"grad_norm": 0.17440061271190643, |
|
"learning_rate": 0.00017643125602609288, |
|
"loss": 0.0145, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 5.269230769230769, |
|
"grad_norm": 0.21910502016544342, |
|
"learning_rate": 0.0001760188693673905, |
|
"loss": 0.0181, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 5.3076923076923075, |
|
"grad_norm": 0.21728090941905975, |
|
"learning_rate": 0.00017560339703727315, |
|
"loss": 0.0137, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 5.346153846153846, |
|
"grad_norm": 0.15279999375343323, |
|
"learning_rate": 0.00017518485590012068, |
|
"loss": 0.0152, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 5.384615384615385, |
|
"grad_norm": 0.33734527230262756, |
|
"learning_rate": 0.00017476326294487852, |
|
"loss": 0.0169, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 5.423076923076923, |
|
"grad_norm": 0.15465325117111206, |
|
"learning_rate": 0.00017433863528436804, |
|
"loss": 0.0159, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 5.461538461538462, |
|
"grad_norm": 0.1761886179447174, |
|
"learning_rate": 0.00017391099015459186, |
|
"loss": 0.0179, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 0.1088884100317955, |
|
"learning_rate": 0.00017348034491403432, |
|
"loss": 0.0163, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.538461538461538, |
|
"grad_norm": 0.3224456012248993, |
|
"learning_rate": 0.00017304671704295685, |
|
"loss": 0.016, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.576923076923077, |
|
"grad_norm": 0.2751074433326721, |
|
"learning_rate": 0.0001726101241426884, |
|
"loss": 0.0152, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.615384615384615, |
|
"grad_norm": 0.20135682821273804, |
|
"learning_rate": 0.00017217058393491098, |
|
"loss": 0.0213, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.653846153846154, |
|
"grad_norm": 0.13225609064102173, |
|
"learning_rate": 0.00017172811426094045, |
|
"loss": 0.0151, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.6923076923076925, |
|
"grad_norm": 0.24074223637580872, |
|
"learning_rate": 0.0001712827330810021, |
|
"loss": 0.0181, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.730769230769231, |
|
"grad_norm": 0.2650367319583893, |
|
"learning_rate": 0.00017083445847350186, |
|
"loss": 0.0181, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.769230769230769, |
|
"grad_norm": 0.17957966029644012, |
|
"learning_rate": 0.00017038330863429236, |
|
"loss": 0.0153, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.8076923076923075, |
|
"grad_norm": 0.1726105660200119, |
|
"learning_rate": 0.00016992930187593425, |
|
"loss": 0.0178, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 5.846153846153846, |
|
"grad_norm": 0.29331403970718384, |
|
"learning_rate": 0.00016947245662695317, |
|
"loss": 0.0164, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 5.884615384615385, |
|
"grad_norm": 0.1539272964000702, |
|
"learning_rate": 0.0001690127914310914, |
|
"loss": 0.0185, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 5.923076923076923, |
|
"grad_norm": 0.2472430318593979, |
|
"learning_rate": 0.00016855032494655537, |
|
"loss": 0.0163, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 5.961538461538462, |
|
"grad_norm": 0.23108868300914764, |
|
"learning_rate": 0.00016808507594525822, |
|
"loss": 0.0163, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.1876610368490219, |
|
"learning_rate": 0.00016761706331205783, |
|
"loss": 0.0183, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 6.038461538461538, |
|
"grad_norm": 0.2372998148202896, |
|
"learning_rate": 0.00016714630604399021, |
|
"loss": 0.0184, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 6.076923076923077, |
|
"grad_norm": 0.23431254923343658, |
|
"learning_rate": 0.00016667282324949863, |
|
"loss": 0.0177, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 6.115384615384615, |
|
"grad_norm": 0.18878187239170074, |
|
"learning_rate": 0.00016619663414765764, |
|
"loss": 0.0176, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 6.153846153846154, |
|
"grad_norm": 0.17434252798557281, |
|
"learning_rate": 0.00016571775806739335, |
|
"loss": 0.0182, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 6.1923076923076925, |
|
"grad_norm": 0.21172362565994263, |
|
"learning_rate": 0.00016523621444669836, |
|
"loss": 0.0186, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 6.230769230769231, |
|
"grad_norm": 0.3032718300819397, |
|
"learning_rate": 0.00016475202283184332, |
|
"loss": 0.0158, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 6.269230769230769, |
|
"grad_norm": 0.2207326591014862, |
|
"learning_rate": 0.00016426520287658307, |
|
"loss": 0.0148, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 6.3076923076923075, |
|
"grad_norm": 0.1728544682264328, |
|
"learning_rate": 0.00016377577434135908, |
|
"loss": 0.018, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 6.346153846153846, |
|
"grad_norm": 0.2233678251504898, |
|
"learning_rate": 0.00016328375709249738, |
|
"loss": 0.0143, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 6.384615384615385, |
|
"grad_norm": 0.156491219997406, |
|
"learning_rate": 0.00016278917110140205, |
|
"loss": 0.015, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 6.423076923076923, |
|
"grad_norm": 0.17857709527015686, |
|
"learning_rate": 0.00016229203644374475, |
|
"loss": 0.0164, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 6.461538461538462, |
|
"grad_norm": 0.1415674239397049, |
|
"learning_rate": 0.00016179237329864952, |
|
"loss": 0.0157, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"grad_norm": 0.1668516844511032, |
|
"learning_rate": 0.00016129020194787403, |
|
"loss": 0.0168, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 6.538461538461538, |
|
"grad_norm": 0.1995563507080078, |
|
"learning_rate": 0.00016078554277498618, |
|
"loss": 0.018, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 6.576923076923077, |
|
"grad_norm": 0.18094874918460846, |
|
"learning_rate": 0.00016027841626453665, |
|
"loss": 0.0198, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 6.615384615384615, |
|
"grad_norm": 0.17089825868606567, |
|
"learning_rate": 0.00015976884300122746, |
|
"loss": 0.0158, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 6.653846153846154, |
|
"grad_norm": 0.17652678489685059, |
|
"learning_rate": 0.00015925684366907653, |
|
"loss": 0.0187, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.6923076923076925, |
|
"grad_norm": 0.17994622886180878, |
|
"learning_rate": 0.00015874243905057798, |
|
"loss": 0.0156, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.730769230769231, |
|
"grad_norm": 0.19167155027389526, |
|
"learning_rate": 0.0001582256500258585, |
|
"loss": 0.0198, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.769230769230769, |
|
"grad_norm": 0.18259945511817932, |
|
"learning_rate": 0.00015770649757182993, |
|
"loss": 0.0164, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.8076923076923075, |
|
"grad_norm": 0.22917133569717407, |
|
"learning_rate": 0.0001571850027613378, |
|
"loss": 0.0169, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 6.846153846153846, |
|
"grad_norm": 0.20906826853752136, |
|
"learning_rate": 0.00015666118676230576, |
|
"loss": 0.0179, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 6.884615384615385, |
|
"grad_norm": 0.3887175917625427, |
|
"learning_rate": 0.00015613507083687668, |
|
"loss": 0.0178, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 6.923076923076923, |
|
"grad_norm": 0.1430666148662567, |
|
"learning_rate": 0.00015560667634054932, |
|
"loss": 0.0169, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 6.961538461538462, |
|
"grad_norm": 0.34076541662216187, |
|
"learning_rate": 0.00015507602472131162, |
|
"loss": 0.0176, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 0.248886838555336, |
|
"learning_rate": 0.00015454313751877004, |
|
"loss": 0.0178, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 7.038461538461538, |
|
"grad_norm": 0.21486853063106537, |
|
"learning_rate": 0.0001540080363632754, |
|
"loss": 0.0162, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 7.076923076923077, |
|
"grad_norm": 0.19121983647346497, |
|
"learning_rate": 0.00015347074297504464, |
|
"loss": 0.0147, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 7.115384615384615, |
|
"grad_norm": 0.20204658806324005, |
|
"learning_rate": 0.00015293127916327944, |
|
"loss": 0.0167, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 7.153846153846154, |
|
"grad_norm": 0.17696718871593475, |
|
"learning_rate": 0.0001523896668252808, |
|
"loss": 0.017, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 7.1923076923076925, |
|
"grad_norm": 0.19161172211170197, |
|
"learning_rate": 0.0001518459279455602, |
|
"loss": 0.0143, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 7.230769230769231, |
|
"grad_norm": 0.2208317518234253, |
|
"learning_rate": 0.0001513000845949473, |
|
"loss": 0.016, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 7.269230769230769, |
|
"grad_norm": 0.16817378997802734, |
|
"learning_rate": 0.00015075215892969418, |
|
"loss": 0.0177, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 7.3076923076923075, |
|
"grad_norm": 0.21887525916099548, |
|
"learning_rate": 0.0001502021731905756, |
|
"loss": 0.0142, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 7.346153846153846, |
|
"grad_norm": 0.14425648748874664, |
|
"learning_rate": 0.00014965014970198676, |
|
"loss": 0.015, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 7.384615384615385, |
|
"grad_norm": 0.2206214964389801, |
|
"learning_rate": 0.0001490961108710367, |
|
"loss": 0.0158, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 7.423076923076923, |
|
"grad_norm": 0.13397306203842163, |
|
"learning_rate": 0.00014854007918663903, |
|
"loss": 0.0172, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 7.461538461538462, |
|
"grad_norm": 0.2873019278049469, |
|
"learning_rate": 0.00014798207721859898, |
|
"loss": 0.0167, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"grad_norm": 0.20510359108448029, |
|
"learning_rate": 0.00014742212761669724, |
|
"loss": 0.0149, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 7.538461538461538, |
|
"grad_norm": 0.2240099161863327, |
|
"learning_rate": 0.00014686025310977065, |
|
"loss": 0.0149, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 7.576923076923077, |
|
"grad_norm": 0.2232583910226822, |
|
"learning_rate": 0.0001462964765047897, |
|
"loss": 0.0148, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 7.615384615384615, |
|
"grad_norm": 0.16749520599842072, |
|
"learning_rate": 0.00014573082068593254, |
|
"loss": 0.0165, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 7.653846153846154, |
|
"grad_norm": 0.18627752363681793, |
|
"learning_rate": 0.00014516330861365638, |
|
"loss": 0.0152, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 7.6923076923076925, |
|
"grad_norm": 0.2765505313873291, |
|
"learning_rate": 0.00014459396332376517, |
|
"loss": 0.0154, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.730769230769231, |
|
"grad_norm": 0.2526029050350189, |
|
"learning_rate": 0.00014402280792647493, |
|
"loss": 0.0149, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 7.769230769230769, |
|
"grad_norm": 0.12884902954101562, |
|
"learning_rate": 0.00014344986560547537, |
|
"loss": 0.0164, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 7.8076923076923075, |
|
"grad_norm": 0.10823468118906021, |
|
"learning_rate": 0.00014287515961698902, |
|
"loss": 0.0149, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 7.846153846153846, |
|
"grad_norm": 0.15259294211864471, |
|
"learning_rate": 0.0001422987132888272, |
|
"loss": 0.0143, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 7.884615384615385, |
|
"grad_norm": 0.13543514907360077, |
|
"learning_rate": 0.00014172055001944312, |
|
"loss": 0.0134, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 7.923076923076923, |
|
"grad_norm": 0.2171320766210556, |
|
"learning_rate": 0.0001411406932769821, |
|
"loss": 0.0156, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 7.961538461538462, |
|
"grad_norm": 0.09700020402669907, |
|
"learning_rate": 0.000140559166598329, |
|
"loss": 0.0141, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.19601549208164215, |
|
"learning_rate": 0.00013997599358815275, |
|
"loss": 0.0147, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 8.038461538461538, |
|
"grad_norm": 0.18070325255393982, |
|
"learning_rate": 0.0001393911979179485, |
|
"loss": 0.0159, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 8.076923076923077, |
|
"grad_norm": 0.17102424800395966, |
|
"learning_rate": 0.0001388048033250763, |
|
"loss": 0.0171, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 8.115384615384615, |
|
"grad_norm": 0.15269245207309723, |
|
"learning_rate": 0.00013821683361179809, |
|
"loss": 0.0136, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 8.153846153846153, |
|
"grad_norm": 0.14295409619808197, |
|
"learning_rate": 0.00013762731264431116, |
|
"loss": 0.0116, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 8.192307692307692, |
|
"grad_norm": 0.13391715288162231, |
|
"learning_rate": 0.00013703626435177956, |
|
"loss": 0.017, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 8.23076923076923, |
|
"grad_norm": 0.18101289868354797, |
|
"learning_rate": 0.00013644371272536284, |
|
"loss": 0.0145, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 8.26923076923077, |
|
"grad_norm": 0.13576363027095795, |
|
"learning_rate": 0.00013584968181724213, |
|
"loss": 0.0139, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 8.307692307692308, |
|
"grad_norm": 0.14105717837810516, |
|
"learning_rate": 0.00013525419573964386, |
|
"loss": 0.0132, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 8.346153846153847, |
|
"grad_norm": 0.09896686673164368, |
|
"learning_rate": 0.00013465727866386108, |
|
"loss": 0.0107, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 8.384615384615385, |
|
"grad_norm": 0.27290794253349304, |
|
"learning_rate": 0.0001340589548192723, |
|
"loss": 0.0143, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 8.423076923076923, |
|
"grad_norm": 0.1378784477710724, |
|
"learning_rate": 0.00013345924849235793, |
|
"loss": 0.0136, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 8.461538461538462, |
|
"grad_norm": 0.18884290754795074, |
|
"learning_rate": 0.00013285818402571458, |
|
"loss": 0.0153, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 0.13120310008525848, |
|
"learning_rate": 0.00013225578581706687, |
|
"loss": 0.0145, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 8.538461538461538, |
|
"grad_norm": 0.1788390427827835, |
|
"learning_rate": 0.00013165207831827724, |
|
"loss": 0.0152, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 8.576923076923077, |
|
"grad_norm": 0.16243167221546173, |
|
"learning_rate": 0.00013104708603435325, |
|
"loss": 0.0161, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 8.615384615384615, |
|
"grad_norm": 0.24895057082176208, |
|
"learning_rate": 0.00013044083352245308, |
|
"loss": 0.0145, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 8.653846153846153, |
|
"grad_norm": 0.15210068225860596, |
|
"learning_rate": 0.0001298333453908886, |
|
"loss": 0.0135, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 8.692307692307692, |
|
"grad_norm": 0.19090086221694946, |
|
"learning_rate": 0.00012922464629812653, |
|
"loss": 0.0167, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 8.73076923076923, |
|
"grad_norm": 0.2009580284357071, |
|
"learning_rate": 0.0001286147609517876, |
|
"loss": 0.0134, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 8.76923076923077, |
|
"grad_norm": 0.20718801021575928, |
|
"learning_rate": 0.00012800371410764356, |
|
"loss": 0.0122, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 8.807692307692308, |
|
"grad_norm": 0.18385674059391022, |
|
"learning_rate": 0.00012739153056861243, |
|
"loss": 0.0145, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 8.846153846153847, |
|
"grad_norm": 0.19775976240634918, |
|
"learning_rate": 0.00012677823518375146, |
|
"loss": 0.0143, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 8.884615384615385, |
|
"grad_norm": 0.14536786079406738, |
|
"learning_rate": 0.00012616385284724898, |
|
"loss": 0.0136, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 8.923076923076923, |
|
"grad_norm": 0.1978665441274643, |
|
"learning_rate": 0.00012554840849741332, |
|
"loss": 0.0144, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 8.961538461538462, |
|
"grad_norm": 0.16081364452838898, |
|
"learning_rate": 0.00012493192711566104, |
|
"loss": 0.0146, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"grad_norm": 0.18740719556808472, |
|
"learning_rate": 0.00012431443372550266, |
|
"loss": 0.017, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 9.038461538461538, |
|
"grad_norm": 0.1770515739917755, |
|
"learning_rate": 0.00012369595339152702, |
|
"loss": 0.0154, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 9.076923076923077, |
|
"grad_norm": 0.12580080330371857, |
|
"learning_rate": 0.0001230765112183838, |
|
"loss": 0.0149, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 9.115384615384615, |
|
"grad_norm": 0.19087755680084229, |
|
"learning_rate": 0.00012245613234976462, |
|
"loss": 0.0145, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 9.153846153846153, |
|
"grad_norm": 0.10227272659540176, |
|
"learning_rate": 0.00012183484196738233, |
|
"loss": 0.0126, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 9.192307692307692, |
|
"grad_norm": 0.17450454831123352, |
|
"learning_rate": 0.00012121266528994895, |
|
"loss": 0.0145, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 9.23076923076923, |
|
"grad_norm": 0.15834400057792664, |
|
"learning_rate": 0.00012058962757215195, |
|
"loss": 0.0122, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 9.26923076923077, |
|
"grad_norm": 0.13027557730674744, |
|
"learning_rate": 0.00011996575410362919, |
|
"loss": 0.0137, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 9.307692307692308, |
|
"grad_norm": 0.18278013169765472, |
|
"learning_rate": 0.00011934107020794239, |
|
"loss": 0.0133, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 9.346153846153847, |
|
"grad_norm": 0.15627382695674896, |
|
"learning_rate": 0.00011871560124154912, |
|
"loss": 0.0158, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 9.384615384615385, |
|
"grad_norm": 0.16056114435195923, |
|
"learning_rate": 0.00011808937259277377, |
|
"loss": 0.0131, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 9.423076923076923, |
|
"grad_norm": 0.11140304803848267, |
|
"learning_rate": 0.00011746240968077682, |
|
"loss": 0.0142, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 9.461538461538462, |
|
"grad_norm": 0.17418035864830017, |
|
"learning_rate": 0.0001168347379545231, |
|
"loss": 0.0123, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"grad_norm": 0.0985954999923706, |
|
"learning_rate": 0.0001162063828917489, |
|
"loss": 0.0123, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 9.538461538461538, |
|
"grad_norm": 0.22951987385749817, |
|
"learning_rate": 0.00011557736999792765, |
|
"loss": 0.0127, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 9.576923076923077, |
|
"grad_norm": 0.13590703904628754, |
|
"learning_rate": 0.00011494772480523482, |
|
"loss": 0.0135, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 9.615384615384615, |
|
"grad_norm": 0.1775355190038681, |
|
"learning_rate": 0.00011431747287151125, |
|
"loss": 0.013, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 9.653846153846153, |
|
"grad_norm": 0.22847086191177368, |
|
"learning_rate": 0.00011368663977922613, |
|
"loss": 0.0136, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 9.692307692307692, |
|
"grad_norm": 0.1462956815958023, |
|
"learning_rate": 0.00011305525113443822, |
|
"loss": 0.0141, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 9.73076923076923, |
|
"grad_norm": 0.1806713193655014, |
|
"learning_rate": 0.00011242333256575677, |
|
"loss": 0.0118, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 9.76923076923077, |
|
"grad_norm": 0.14892129600048065, |
|
"learning_rate": 0.00011179090972330093, |
|
"loss": 0.012, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 9.807692307692308, |
|
"grad_norm": 0.1719180941581726, |
|
"learning_rate": 0.0001111580082776589, |
|
"loss": 0.0134, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 9.846153846153847, |
|
"grad_norm": 0.1946127712726593, |
|
"learning_rate": 0.00011052465391884575, |
|
"loss": 0.0146, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 9.884615384615385, |
|
"grad_norm": 0.16166430711746216, |
|
"learning_rate": 0.00010989087235526068, |
|
"loss": 0.0145, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 9.923076923076923, |
|
"grad_norm": 0.18934275209903717, |
|
"learning_rate": 0.00010925668931264346, |
|
"loss": 0.0109, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 9.961538461538462, |
|
"grad_norm": 0.14358612895011902, |
|
"learning_rate": 0.00010862213053303037, |
|
"loss": 0.0099, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.2569468319416046, |
|
"learning_rate": 0.00010798722177370897, |
|
"loss": 0.0147, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 10.038461538461538, |
|
"grad_norm": 0.21339739859104156, |
|
"learning_rate": 0.00010735198880617303, |
|
"loss": 0.0137, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 10.076923076923077, |
|
"grad_norm": 0.12013400346040726, |
|
"learning_rate": 0.00010671645741507603, |
|
"loss": 0.011, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 10.115384615384615, |
|
"grad_norm": 0.18901623785495758, |
|
"learning_rate": 0.00010608065339718483, |
|
"loss": 0.0109, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 10.153846153846153, |
|
"grad_norm": 0.10211600363254547, |
|
"learning_rate": 0.00010544460256033237, |
|
"loss": 0.0132, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 10.192307692307692, |
|
"grad_norm": 0.17099827527999878, |
|
"learning_rate": 0.00010480833072237022, |
|
"loss": 0.0139, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 10.23076923076923, |
|
"grad_norm": 0.15852560102939606, |
|
"learning_rate": 0.00010417186371012063, |
|
"loss": 0.0116, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 10.26923076923077, |
|
"grad_norm": 0.17198602855205536, |
|
"learning_rate": 0.000103535227358328, |
|
"loss": 0.0114, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 10.307692307692308, |
|
"grad_norm": 0.21135368943214417, |
|
"learning_rate": 0.00010289844750861053, |
|
"loss": 0.0126, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 10.346153846153847, |
|
"grad_norm": 0.13126599788665771, |
|
"learning_rate": 0.000102261550008411, |
|
"loss": 0.013, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 10.384615384615385, |
|
"grad_norm": 0.16771045327186584, |
|
"learning_rate": 0.00010162456070994781, |
|
"loss": 0.0111, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 10.423076923076923, |
|
"grad_norm": 0.1977088302373886, |
|
"learning_rate": 0.00010098750546916547, |
|
"loss": 0.0123, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 10.461538461538462, |
|
"grad_norm": 0.12443752586841583, |
|
"learning_rate": 0.00010035041014468518, |
|
"loss": 0.0114, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"grad_norm": 0.2196878045797348, |
|
"learning_rate": 9.971330059675518e-05, |
|
"loss": 0.0151, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 10.538461538461538, |
|
"grad_norm": 0.1408672332763672, |
|
"learning_rate": 9.907620268620098e-05, |
|
"loss": 0.011, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 10.576923076923077, |
|
"grad_norm": 0.14648661017417908, |
|
"learning_rate": 9.84391422733758e-05, |
|
"loss": 0.0101, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 10.615384615384615, |
|
"grad_norm": 0.14232785999774933, |
|
"learning_rate": 9.78021452171108e-05, |
|
"loss": 0.0104, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 10.653846153846153, |
|
"grad_norm": 0.16160699725151062, |
|
"learning_rate": 9.716523737366537e-05, |
|
"loss": 0.0127, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 10.692307692307692, |
|
"grad_norm": 0.20870442688465118, |
|
"learning_rate": 9.652844459567777e-05, |
|
"loss": 0.0103, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 10.73076923076923, |
|
"grad_norm": 0.15344356000423431, |
|
"learning_rate": 9.589179273111551e-05, |
|
"loss": 0.0101, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 10.76923076923077, |
|
"grad_norm": 0.11750971525907516, |
|
"learning_rate": 9.525530762222645e-05, |
|
"loss": 0.0111, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 10.807692307692308, |
|
"grad_norm": 0.117418073117733, |
|
"learning_rate": 9.461901510448962e-05, |
|
"loss": 0.0128, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 10.846153846153847, |
|
"grad_norm": 0.317682683467865, |
|
"learning_rate": 9.398294100556668e-05, |
|
"loss": 0.0124, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 10.884615384615385, |
|
"grad_norm": 0.14449702203273773, |
|
"learning_rate": 9.334711114425335e-05, |
|
"loss": 0.0117, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 10.923076923076923, |
|
"grad_norm": 0.09939802438020706, |
|
"learning_rate": 9.27115513294316e-05, |
|
"loss": 0.012, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 10.961538461538462, |
|
"grad_norm": 0.16178478300571442, |
|
"learning_rate": 9.207628735902205e-05, |
|
"loss": 0.0138, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"grad_norm": 0.21062442660331726, |
|
"learning_rate": 9.144134501893671e-05, |
|
"loss": 0.0122, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 11.038461538461538, |
|
"grad_norm": 0.17146876454353333, |
|
"learning_rate": 9.080675008203221e-05, |
|
"loss": 0.0148, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 11.076923076923077, |
|
"grad_norm": 0.13412779569625854, |
|
"learning_rate": 9.017252830706394e-05, |
|
"loss": 0.0099, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 11.115384615384615, |
|
"grad_norm": 0.14295507967472076, |
|
"learning_rate": 8.953870543764026e-05, |
|
"loss": 0.0116, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 11.153846153846153, |
|
"grad_norm": 0.13120034337043762, |
|
"learning_rate": 8.890530720117767e-05, |
|
"loss": 0.0127, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 11.192307692307692, |
|
"grad_norm": 0.18492452800273895, |
|
"learning_rate": 8.827235930785636e-05, |
|
"loss": 0.0106, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 11.23076923076923, |
|
"grad_norm": 0.15923990309238434, |
|
"learning_rate": 8.763988744957672e-05, |
|
"loss": 0.0134, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 11.26923076923077, |
|
"grad_norm": 0.10775059461593628, |
|
"learning_rate": 8.700791729891651e-05, |
|
"loss": 0.0141, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 11.307692307692308, |
|
"grad_norm": 0.1554538756608963, |
|
"learning_rate": 8.637647450808879e-05, |
|
"loss": 0.0103, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 11.346153846153847, |
|
"grad_norm": 0.16043995320796967, |
|
"learning_rate": 8.574558470790053e-05, |
|
"loss": 0.0147, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 11.384615384615385, |
|
"grad_norm": 0.1335715800523758, |
|
"learning_rate": 8.511527350671233e-05, |
|
"loss": 0.0115, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 11.423076923076923, |
|
"grad_norm": 0.13701431453227997, |
|
"learning_rate": 8.448556648939907e-05, |
|
"loss": 0.0108, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 11.461538461538462, |
|
"grad_norm": 0.09677750617265701, |
|
"learning_rate": 8.385648921631116e-05, |
|
"loss": 0.0095, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"grad_norm": 0.08720875531435013, |
|
"learning_rate": 8.322806722223725e-05, |
|
"loss": 0.0086, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 11.538461538461538, |
|
"grad_norm": 0.10705883800983429, |
|
"learning_rate": 8.260032601536758e-05, |
|
"loss": 0.0103, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 11.576923076923077, |
|
"grad_norm": 0.1649639904499054, |
|
"learning_rate": 8.19732910762587e-05, |
|
"loss": 0.0093, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 11.615384615384615, |
|
"grad_norm": 0.0938439667224884, |
|
"learning_rate": 8.134698785679909e-05, |
|
"loss": 0.0087, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 11.653846153846153, |
|
"grad_norm": 0.15994666516780853, |
|
"learning_rate": 8.072144177917615e-05, |
|
"loss": 0.0097, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 11.692307692307692, |
|
"grad_norm": 0.16346988081932068, |
|
"learning_rate": 8.009667823484424e-05, |
|
"loss": 0.0087, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 11.73076923076923, |
|
"grad_norm": 0.12906785309314728, |
|
"learning_rate": 7.947272258349396e-05, |
|
"loss": 0.0113, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 11.76923076923077, |
|
"grad_norm": 0.1393740475177765, |
|
"learning_rate": 7.884960015202289e-05, |
|
"loss": 0.0078, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 11.807692307692308, |
|
"grad_norm": 0.11777480691671371, |
|
"learning_rate": 7.822733623350749e-05, |
|
"loss": 0.0108, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 11.846153846153847, |
|
"grad_norm": 0.11602358520030975, |
|
"learning_rate": 7.760595608617646e-05, |
|
"loss": 0.0078, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 11.884615384615385, |
|
"grad_norm": 0.09884156286716461, |
|
"learning_rate": 7.698548493238537e-05, |
|
"loss": 0.0118, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 11.923076923076923, |
|
"grad_norm": 0.16078071296215057, |
|
"learning_rate": 7.636594795759307e-05, |
|
"loss": 0.0108, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 11.961538461538462, |
|
"grad_norm": 0.07723239809274673, |
|
"learning_rate": 7.574737030933921e-05, |
|
"loss": 0.0095, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"grad_norm": 0.1335863620042801, |
|
"learning_rate": 7.512977709622362e-05, |
|
"loss": 0.0108, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 12.038461538461538, |
|
"grad_norm": 0.20832855999469757, |
|
"learning_rate": 7.451319338688705e-05, |
|
"loss": 0.0107, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 12.076923076923077, |
|
"grad_norm": 0.0902862548828125, |
|
"learning_rate": 7.389764420899348e-05, |
|
"loss": 0.0116, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 12.115384615384615, |
|
"grad_norm": 0.0983407124876976, |
|
"learning_rate": 7.328315454821455e-05, |
|
"loss": 0.0117, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 12.153846153846153, |
|
"grad_norm": 0.10898351669311523, |
|
"learning_rate": 7.266974934721506e-05, |
|
"loss": 0.0087, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 12.192307692307692, |
|
"grad_norm": 0.12344610691070557, |
|
"learning_rate": 7.205745350464081e-05, |
|
"loss": 0.0076, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 12.23076923076923, |
|
"grad_norm": 0.1027444526553154, |
|
"learning_rate": 7.144629187410756e-05, |
|
"loss": 0.0089, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 12.26923076923077, |
|
"grad_norm": 0.13259388506412506, |
|
"learning_rate": 7.083628926319259e-05, |
|
"loss": 0.0093, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 12.307692307692308, |
|
"grad_norm": 0.15261848270893097, |
|
"learning_rate": 7.022747043242753e-05, |
|
"loss": 0.0088, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 12.346153846153847, |
|
"grad_norm": 0.18796484172344208, |
|
"learning_rate": 6.961986009429342e-05, |
|
"loss": 0.0094, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 12.384615384615385, |
|
"grad_norm": 0.12565913796424866, |
|
"learning_rate": 6.901348291221737e-05, |
|
"loss": 0.0092, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 12.423076923076923, |
|
"grad_norm": 0.11127506196498871, |
|
"learning_rate": 6.840836349957179e-05, |
|
"loss": 0.0078, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 12.461538461538462, |
|
"grad_norm": 0.17799825966358185, |
|
"learning_rate": 6.780452641867508e-05, |
|
"loss": 0.0098, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"grad_norm": 0.18342117965221405, |
|
"learning_rate": 6.720199617979468e-05, |
|
"loss": 0.0083, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 12.538461538461538, |
|
"grad_norm": 0.14820097386837006, |
|
"learning_rate": 6.660079724015226e-05, |
|
"loss": 0.0087, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 12.576923076923077, |
|
"grad_norm": 0.12141295522451401, |
|
"learning_rate": 6.600095400293078e-05, |
|
"loss": 0.0108, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 12.615384615384615, |
|
"grad_norm": 0.10008122026920319, |
|
"learning_rate": 6.540249081628416e-05, |
|
"loss": 0.0097, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 12.653846153846153, |
|
"grad_norm": 0.14060907065868378, |
|
"learning_rate": 6.480543197234886e-05, |
|
"loss": 0.0111, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 12.692307692307692, |
|
"grad_norm": 0.16219834983348846, |
|
"learning_rate": 6.420980170625788e-05, |
|
"loss": 0.0113, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 12.73076923076923, |
|
"grad_norm": 0.1932661533355713, |
|
"learning_rate": 6.361562419515693e-05, |
|
"loss": 0.0093, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 12.76923076923077, |
|
"grad_norm": 0.14726077020168304, |
|
"learning_rate": 6.302292355722325e-05, |
|
"loss": 0.0099, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 12.807692307692308, |
|
"grad_norm": 0.1557319462299347, |
|
"learning_rate": 6.243172385068646e-05, |
|
"loss": 0.0087, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 12.846153846153847, |
|
"grad_norm": 0.21162110567092896, |
|
"learning_rate": 6.184204907285215e-05, |
|
"loss": 0.0098, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 12.884615384615385, |
|
"grad_norm": 0.11167220026254654, |
|
"learning_rate": 6.125392315912761e-05, |
|
"loss": 0.0089, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 12.923076923076923, |
|
"grad_norm": 0.10831273347139359, |
|
"learning_rate": 6.066736998205054e-05, |
|
"loss": 0.0088, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 12.961538461538462, |
|
"grad_norm": 0.07814571261405945, |
|
"learning_rate": 6.00824133503198e-05, |
|
"loss": 0.0092, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 0.11000088602304459, |
|
"learning_rate": 5.94990770078292e-05, |
|
"loss": 0.0096, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 13.038461538461538, |
|
"grad_norm": 0.1632835417985916, |
|
"learning_rate": 5.8917384632703534e-05, |
|
"loss": 0.0068, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 13.076923076923077, |
|
"grad_norm": 0.12230722606182098, |
|
"learning_rate": 5.8337359836337526e-05, |
|
"loss": 0.0092, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 13.115384615384615, |
|
"grad_norm": 0.12406644970178604, |
|
"learning_rate": 5.775902616243751e-05, |
|
"loss": 0.0095, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 13.153846153846153, |
|
"grad_norm": 0.10183268785476685, |
|
"learning_rate": 5.7182407086065706e-05, |
|
"loss": 0.0074, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 13.192307692307692, |
|
"grad_norm": 0.11906790733337402, |
|
"learning_rate": 5.660752601268731e-05, |
|
"loss": 0.0064, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 13.23076923076923, |
|
"grad_norm": 0.13318245112895966, |
|
"learning_rate": 5.603440627722043e-05, |
|
"loss": 0.0097, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 13.26923076923077, |
|
"grad_norm": 0.10035967826843262, |
|
"learning_rate": 5.546307114308902e-05, |
|
"loss": 0.0105, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 13.307692307692308, |
|
"grad_norm": 0.11527876555919647, |
|
"learning_rate": 5.4893543801278514e-05, |
|
"loss": 0.0077, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 13.346153846153847, |
|
"grad_norm": 0.18029938638210297, |
|
"learning_rate": 5.4325847369394486e-05, |
|
"loss": 0.0099, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 13.384615384615385, |
|
"grad_norm": 0.13629525899887085, |
|
"learning_rate": 5.376000489072437e-05, |
|
"loss": 0.0082, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 13.423076923076923, |
|
"grad_norm": 0.0793733075261116, |
|
"learning_rate": 5.3196039333301864e-05, |
|
"loss": 0.0072, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 13.461538461538462, |
|
"grad_norm": 0.14016038179397583, |
|
"learning_rate": 5.263397358897499e-05, |
|
"loss": 0.0101, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"grad_norm": 0.13849851489067078, |
|
"learning_rate": 5.207383047247665e-05, |
|
"loss": 0.0086, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 13.538461538461538, |
|
"grad_norm": 0.10788647085428238, |
|
"learning_rate": 5.1515632720498666e-05, |
|
"loss": 0.0075, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 13.576923076923077, |
|
"grad_norm": 0.14721888303756714, |
|
"learning_rate": 5.095940299076874e-05, |
|
"loss": 0.0111, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 13.615384615384615, |
|
"grad_norm": 0.11311782151460648, |
|
"learning_rate": 5.0405163861130925e-05, |
|
"loss": 0.0083, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 13.653846153846153, |
|
"grad_norm": 0.18341895937919617, |
|
"learning_rate": 4.985293782862913e-05, |
|
"loss": 0.0074, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 13.692307692307692, |
|
"grad_norm": 0.10047091543674469, |
|
"learning_rate": 4.930274730859391e-05, |
|
"loss": 0.0076, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 13.73076923076923, |
|
"grad_norm": 0.10870840400457382, |
|
"learning_rate": 4.875461463373251e-05, |
|
"loss": 0.0078, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 13.76923076923077, |
|
"grad_norm": 0.12459917366504669, |
|
"learning_rate": 4.820856205322257e-05, |
|
"loss": 0.0093, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 13.807692307692308, |
|
"grad_norm": 0.07429447025060654, |
|
"learning_rate": 4.766461173180888e-05, |
|
"loss": 0.0086, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 13.846153846153847, |
|
"grad_norm": 0.08076323568820953, |
|
"learning_rate": 4.7122785748903755e-05, |
|
"loss": 0.0079, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 13.884615384615385, |
|
"grad_norm": 0.09467586874961853, |
|
"learning_rate": 4.6583106097690744e-05, |
|
"loss": 0.0069, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 13.923076923076923, |
|
"grad_norm": 0.10673276335000992, |
|
"learning_rate": 4.604559468423188e-05, |
|
"loss": 0.0084, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 13.961538461538462, |
|
"grad_norm": 0.12460212409496307, |
|
"learning_rate": 4.551027332657868e-05, |
|
"loss": 0.0067, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"grad_norm": 0.13634030520915985, |
|
"learning_rate": 4.4977163753886366e-05, |
|
"loss": 0.008, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 14.038461538461538, |
|
"grad_norm": 0.10553108155727386, |
|
"learning_rate": 4.4446287605531826e-05, |
|
"loss": 0.0064, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 14.076923076923077, |
|
"grad_norm": 0.11521629989147186, |
|
"learning_rate": 4.391766643023542e-05, |
|
"loss": 0.0084, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 14.115384615384615, |
|
"grad_norm": 0.10882071405649185, |
|
"learning_rate": 4.3391321685186184e-05, |
|
"loss": 0.0078, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 14.153846153846153, |
|
"grad_norm": 0.061356205493211746, |
|
"learning_rate": 4.2867274735170914e-05, |
|
"loss": 0.0066, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 14.192307692307692, |
|
"grad_norm": 0.12024400383234024, |
|
"learning_rate": 4.234554685170683e-05, |
|
"loss": 0.0062, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 14.23076923076923, |
|
"grad_norm": 0.1351236253976822, |
|
"learning_rate": 4.182615921217835e-05, |
|
"loss": 0.0084, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 14.26923076923077, |
|
"grad_norm": 0.06800480931997299, |
|
"learning_rate": 4.130913289897732e-05, |
|
"loss": 0.0056, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 14.307692307692308, |
|
"grad_norm": 0.11439716070890427, |
|
"learning_rate": 4.0794488898647365e-05, |
|
"loss": 0.0083, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 14.346153846153847, |
|
"grad_norm": 0.07181525975465775, |
|
"learning_rate": 4.0282248101031884e-05, |
|
"loss": 0.007, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 14.384615384615385, |
|
"grad_norm": 0.08257249742746353, |
|
"learning_rate": 3.977243129842627e-05, |
|
"loss": 0.0067, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 14.423076923076923, |
|
"grad_norm": 0.06434876471757889, |
|
"learning_rate": 3.9265059184733934e-05, |
|
"loss": 0.0074, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 14.461538461538462, |
|
"grad_norm": 0.11237592250108719, |
|
"learning_rate": 3.8760152354626075e-05, |
|
"loss": 0.0065, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"grad_norm": 0.1452643871307373, |
|
"learning_rate": 3.825773130270609e-05, |
|
"loss": 0.0081, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 14.538461538461538, |
|
"grad_norm": 0.08552118390798569, |
|
"learning_rate": 3.7757816422677406e-05, |
|
"loss": 0.0069, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 14.576923076923077, |
|
"grad_norm": 0.12589380145072937, |
|
"learning_rate": 3.7260428006515835e-05, |
|
"loss": 0.0056, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 14.615384615384615, |
|
"grad_norm": 0.0928504690527916, |
|
"learning_rate": 3.6765586243645735e-05, |
|
"loss": 0.0082, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 14.653846153846153, |
|
"grad_norm": 0.11787436157464981, |
|
"learning_rate": 3.6273311220120696e-05, |
|
"loss": 0.0082, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 14.692307692307692, |
|
"grad_norm": 0.1571059226989746, |
|
"learning_rate": 3.578362291780818e-05, |
|
"loss": 0.0065, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 14.73076923076923, |
|
"grad_norm": 0.09746450930833817, |
|
"learning_rate": 3.529654121357827e-05, |
|
"loss": 0.0073, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 14.76923076923077, |
|
"grad_norm": 0.10719246417284012, |
|
"learning_rate": 3.481208587849712e-05, |
|
"loss": 0.0075, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 14.807692307692308, |
|
"grad_norm": 0.09190470725297928, |
|
"learning_rate": 3.4330276577024255e-05, |
|
"loss": 0.0062, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 14.846153846153847, |
|
"grad_norm": 0.1003689393401146, |
|
"learning_rate": 3.3851132866214394e-05, |
|
"loss": 0.0063, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 14.884615384615385, |
|
"grad_norm": 0.10899902880191803, |
|
"learning_rate": 3.337467419492368e-05, |
|
"loss": 0.0069, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 14.923076923076923, |
|
"grad_norm": 0.10312393307685852, |
|
"learning_rate": 3.290091990302011e-05, |
|
"loss": 0.0065, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 14.961538461538462, |
|
"grad_norm": 0.10359740257263184, |
|
"learning_rate": 3.2429889220598655e-05, |
|
"loss": 0.0062, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"grad_norm": 0.07049980014562607, |
|
"learning_rate": 3.19616012672006e-05, |
|
"loss": 0.0064, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 15.038461538461538, |
|
"grad_norm": 0.06978161633014679, |
|
"learning_rate": 3.1496075051037574e-05, |
|
"loss": 0.0065, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 15.076923076923077, |
|
"grad_norm": 0.16202770173549652, |
|
"learning_rate": 3.1033329468219775e-05, |
|
"loss": 0.01, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 15.115384615384615, |
|
"grad_norm": 0.09229037910699844, |
|
"learning_rate": 3.057338330198919e-05, |
|
"loss": 0.0068, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 15.153846153846153, |
|
"grad_norm": 0.12501412630081177, |
|
"learning_rate": 3.0116255221957047e-05, |
|
"loss": 0.0061, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 15.192307692307692, |
|
"grad_norm": 0.07364345341920853, |
|
"learning_rate": 2.9661963783346058e-05, |
|
"loss": 0.0063, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 15.23076923076923, |
|
"grad_norm": 0.129119873046875, |
|
"learning_rate": 2.92105274262372e-05, |
|
"loss": 0.007, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 15.26923076923077, |
|
"grad_norm": 0.09468358010053635, |
|
"learning_rate": 2.876196447482117e-05, |
|
"loss": 0.0065, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 15.307692307692308, |
|
"grad_norm": 0.11235329508781433, |
|
"learning_rate": 2.8316293136654725e-05, |
|
"loss": 0.0087, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 15.346153846153847, |
|
"grad_norm": 0.1262669563293457, |
|
"learning_rate": 2.7873531501921545e-05, |
|
"loss": 0.0106, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 15.384615384615385, |
|
"grad_norm": 0.10660561174154282, |
|
"learning_rate": 2.7433697542697946e-05, |
|
"loss": 0.0099, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 15.423076923076923, |
|
"grad_norm": 0.07230859249830246, |
|
"learning_rate": 2.6996809112223277e-05, |
|
"loss": 0.0075, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 15.461538461538462, |
|
"grad_norm": 0.15842293202877045, |
|
"learning_rate": 2.6562883944175456e-05, |
|
"loss": 0.008, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"grad_norm": 0.0934629887342453, |
|
"learning_rate": 2.6131939651950966e-05, |
|
"loss": 0.0056, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 15.538461538461538, |
|
"grad_norm": 0.131788432598114, |
|
"learning_rate": 2.5703993727950016e-05, |
|
"loss": 0.0071, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 15.576923076923077, |
|
"grad_norm": 0.09212549030780792, |
|
"learning_rate": 2.5279063542866376e-05, |
|
"loss": 0.0055, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 15.615384615384615, |
|
"grad_norm": 0.1430416852235794, |
|
"learning_rate": 2.4857166344982462e-05, |
|
"loss": 0.0065, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 15.653846153846153, |
|
"grad_norm": 0.08664490282535553, |
|
"learning_rate": 2.44383192594691e-05, |
|
"loss": 0.0059, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 15.692307692307692, |
|
"grad_norm": 0.08127816766500473, |
|
"learning_rate": 2.4022539287690416e-05, |
|
"loss": 0.0066, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 15.73076923076923, |
|
"grad_norm": 0.10413156449794769, |
|
"learning_rate": 2.360984330651378e-05, |
|
"loss": 0.0069, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 15.76923076923077, |
|
"grad_norm": 0.08507948368787766, |
|
"learning_rate": 2.3200248067624654e-05, |
|
"loss": 0.0071, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 15.807692307692308, |
|
"grad_norm": 0.0667034238576889, |
|
"learning_rate": 2.2793770196846743e-05, |
|
"loss": 0.0061, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 15.846153846153847, |
|
"grad_norm": 0.09667922556400299, |
|
"learning_rate": 2.239042619346713e-05, |
|
"loss": 0.0075, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 15.884615384615385, |
|
"grad_norm": 0.0950574055314064, |
|
"learning_rate": 2.1990232429566484e-05, |
|
"loss": 0.005, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 15.923076923076923, |
|
"grad_norm": 0.09804387390613556, |
|
"learning_rate": 2.159320514935449e-05, |
|
"loss": 0.0073, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 15.961538461538462, |
|
"grad_norm": 0.06155986711382866, |
|
"learning_rate": 2.119936046851062e-05, |
|
"loss": 0.0084, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"grad_norm": 0.12021949142217636, |
|
"learning_rate": 2.080871437352985e-05, |
|
"loss": 0.0056, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 16.03846153846154, |
|
"grad_norm": 0.11103606224060059, |
|
"learning_rate": 2.042128272107382e-05, |
|
"loss": 0.0074, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 16.076923076923077, |
|
"grad_norm": 0.07725587487220764, |
|
"learning_rate": 2.0037081237327125e-05, |
|
"loss": 0.0073, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 16.115384615384617, |
|
"grad_norm": 0.081053227186203, |
|
"learning_rate": 1.965612551735908e-05, |
|
"loss": 0.0053, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 16.153846153846153, |
|
"grad_norm": 0.09564225375652313, |
|
"learning_rate": 1.927843102449065e-05, |
|
"loss": 0.0059, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 16.192307692307693, |
|
"grad_norm": 0.10796211659908295, |
|
"learning_rate": 1.890401308966676e-05, |
|
"loss": 0.0052, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 16.23076923076923, |
|
"grad_norm": 0.05812790244817734, |
|
"learning_rate": 1.853288691083407e-05, |
|
"loss": 0.0054, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 16.26923076923077, |
|
"grad_norm": 0.07313115149736404, |
|
"learning_rate": 1.816506755232391e-05, |
|
"loss": 0.006, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 16.307692307692307, |
|
"grad_norm": 0.07596628367900848, |
|
"learning_rate": 1.7800569944241053e-05, |
|
"loss": 0.0054, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 16.346153846153847, |
|
"grad_norm": 0.10533738136291504, |
|
"learning_rate": 1.74394088818575e-05, |
|
"loss": 0.007, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 16.384615384615383, |
|
"grad_norm": 0.0967307761311531, |
|
"learning_rate": 1.708159902501201e-05, |
|
"loss": 0.0056, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 16.423076923076923, |
|
"grad_norm": 0.06359889358282089, |
|
"learning_rate": 1.672715489751496e-05, |
|
"loss": 0.006, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 16.46153846153846, |
|
"grad_norm": 0.08679856359958649, |
|
"learning_rate": 1.637609088655896e-05, |
|
"loss": 0.0053, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"grad_norm": 0.07200346887111664, |
|
"learning_rate": 1.6028421242134707e-05, |
|
"loss": 0.0051, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 16.53846153846154, |
|
"grad_norm": 0.06287889182567596, |
|
"learning_rate": 1.568416007645269e-05, |
|
"loss": 0.0055, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 16.576923076923077, |
|
"grad_norm": 0.08229623734951019, |
|
"learning_rate": 1.53433213633703e-05, |
|
"loss": 0.0062, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 16.615384615384617, |
|
"grad_norm": 0.060530465096235275, |
|
"learning_rate": 1.5005918937824548e-05, |
|
"loss": 0.0067, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 16.653846153846153, |
|
"grad_norm": 0.094852514564991, |
|
"learning_rate": 1.4671966495270672e-05, |
|
"loss": 0.0057, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 16.692307692307693, |
|
"grad_norm": 0.09454119205474854, |
|
"learning_rate": 1.4341477591126096e-05, |
|
"loss": 0.0055, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 16.73076923076923, |
|
"grad_norm": 0.06650886684656143, |
|
"learning_rate": 1.4014465640220243e-05, |
|
"loss": 0.0067, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 16.76923076923077, |
|
"grad_norm": 0.0640147477388382, |
|
"learning_rate": 1.3690943916249943e-05, |
|
"loss": 0.0056, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 16.807692307692307, |
|
"grad_norm": 0.0917372778058052, |
|
"learning_rate": 1.3370925551240798e-05, |
|
"loss": 0.006, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 16.846153846153847, |
|
"grad_norm": 0.09550613164901733, |
|
"learning_rate": 1.3054423535014005e-05, |
|
"loss": 0.0063, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 16.884615384615383, |
|
"grad_norm": 0.0998285710811615, |
|
"learning_rate": 1.2741450714659153e-05, |
|
"loss": 0.0055, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 16.923076923076923, |
|
"grad_norm": 0.16476678848266602, |
|
"learning_rate": 1.2432019794012684e-05, |
|
"loss": 0.0069, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 16.96153846153846, |
|
"grad_norm": 0.18762223422527313, |
|
"learning_rate": 1.2126143333142337e-05, |
|
"loss": 0.007, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 0.07807065546512604, |
|
"learning_rate": 1.1823833747837233e-05, |
|
"loss": 0.006, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 17.03846153846154, |
|
"grad_norm": 0.086389921605587, |
|
"learning_rate": 1.1525103309103934e-05, |
|
"loss": 0.0046, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 17.076923076923077, |
|
"grad_norm": 0.09868927299976349, |
|
"learning_rate": 1.1229964142668425e-05, |
|
"loss": 0.0061, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 17.115384615384617, |
|
"grad_norm": 0.06974314898252487, |
|
"learning_rate": 1.0938428228483733e-05, |
|
"loss": 0.0049, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 17.153846153846153, |
|
"grad_norm": 0.06945253908634186, |
|
"learning_rate": 1.0650507400243859e-05, |
|
"loss": 0.0065, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 17.192307692307693, |
|
"grad_norm": 0.08085139840841293, |
|
"learning_rate": 1.0366213344903342e-05, |
|
"loss": 0.005, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 17.23076923076923, |
|
"grad_norm": 0.08081810176372528, |
|
"learning_rate": 1.00855576022029e-05, |
|
"loss": 0.0051, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 17.26923076923077, |
|
"grad_norm": 0.04182474687695503, |
|
"learning_rate": 9.808551564200908e-06, |
|
"loss": 0.0056, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 17.307692307692307, |
|
"grad_norm": 0.1365455687046051, |
|
"learning_rate": 9.535206474811198e-06, |
|
"loss": 0.0068, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 17.346153846153847, |
|
"grad_norm": 0.10072525590658188, |
|
"learning_rate": 9.265533429346484e-06, |
|
"loss": 0.0058, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 17.384615384615383, |
|
"grad_norm": 0.0997309759259224, |
|
"learning_rate": 8.999543374068098e-06, |
|
"loss": 0.0055, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 17.423076923076923, |
|
"grad_norm": 0.05286664515733719, |
|
"learning_rate": 8.737247105741542e-06, |
|
"loss": 0.0036, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 17.46153846153846, |
|
"grad_norm": 0.0948430672287941, |
|
"learning_rate": 8.478655271198389e-06, |
|
"loss": 0.0057, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"grad_norm": 0.043210502713918686, |
|
"learning_rate": 8.223778366904056e-06, |
|
"loss": 0.0058, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 17.53846153846154, |
|
"grad_norm": 0.08293162286281586, |
|
"learning_rate": 7.972626738531708e-06, |
|
"loss": 0.0057, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 17.576923076923077, |
|
"grad_norm": 0.049041640013456345, |
|
"learning_rate": 7.725210580542387e-06, |
|
"loss": 0.0049, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 17.615384615384617, |
|
"grad_norm": 0.08410376310348511, |
|
"learning_rate": 7.48153993577112e-06, |
|
"loss": 0.005, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 17.653846153846153, |
|
"grad_norm": 0.07442956417798996, |
|
"learning_rate": 7.241624695019366e-06, |
|
"loss": 0.0046, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 17.692307692307693, |
|
"grad_norm": 0.04315830394625664, |
|
"learning_rate": 7.005474596653494e-06, |
|
"loss": 0.0063, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 17.73076923076923, |
|
"grad_norm": 0.10330154001712799, |
|
"learning_rate": 6.773099226209501e-06, |
|
"loss": 0.0056, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 17.76923076923077, |
|
"grad_norm": 0.0930248498916626, |
|
"learning_rate": 6.544508016003903e-06, |
|
"loss": 0.0058, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 17.807692307692307, |
|
"grad_norm": 0.22107692062854767, |
|
"learning_rate": 6.319710244750898e-06, |
|
"loss": 0.0058, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 17.846153846153847, |
|
"grad_norm": 0.07021880894899368, |
|
"learning_rate": 6.0987150371857534e-06, |
|
"loss": 0.005, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 17.884615384615383, |
|
"grad_norm": 0.12697994709014893, |
|
"learning_rate": 5.88153136369437e-06, |
|
"loss": 0.0053, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 17.923076923076923, |
|
"grad_norm": 0.0982799381017685, |
|
"learning_rate": 5.668168039949206e-06, |
|
"loss": 0.006, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 17.96153846153846, |
|
"grad_norm": 0.07556256651878357, |
|
"learning_rate": 5.458633726551443e-06, |
|
"loss": 0.0052, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"grad_norm": 0.0795573741197586, |
|
"learning_rate": 5.252936928679431e-06, |
|
"loss": 0.0057, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 18.03846153846154, |
|
"grad_norm": 0.0670771524310112, |
|
"learning_rate": 5.051085995743421e-06, |
|
"loss": 0.0076, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 18.076923076923077, |
|
"grad_norm": 0.11251858621835709, |
|
"learning_rate": 4.853089121046739e-06, |
|
"loss": 0.0065, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 18.115384615384617, |
|
"grad_norm": 0.10217051953077316, |
|
"learning_rate": 4.6589543414531364e-06, |
|
"loss": 0.0043, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 18.153846153846153, |
|
"grad_norm": 0.050674889236688614, |
|
"learning_rate": 4.468689537060633e-06, |
|
"loss": 0.0049, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 18.192307692307693, |
|
"grad_norm": 0.053426776081323624, |
|
"learning_rate": 4.282302430881568e-06, |
|
"loss": 0.0059, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 18.23076923076923, |
|
"grad_norm": 0.09936069697141647, |
|
"learning_rate": 4.099800588529212e-06, |
|
"loss": 0.0057, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 18.26923076923077, |
|
"grad_norm": 0.1067737266421318, |
|
"learning_rate": 3.921191417910608e-06, |
|
"loss": 0.0048, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 18.307692307692307, |
|
"grad_norm": 0.058368340134620667, |
|
"learning_rate": 3.746482168925902e-06, |
|
"loss": 0.0068, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 18.346153846153847, |
|
"grad_norm": 0.10528615117073059, |
|
"learning_rate": 3.5756799331740588e-06, |
|
"loss": 0.0064, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 18.384615384615383, |
|
"grad_norm": 0.0884661078453064, |
|
"learning_rate": 3.4087916436650104e-06, |
|
"loss": 0.0066, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 18.423076923076923, |
|
"grad_norm": 0.03825301676988602, |
|
"learning_rate": 3.2458240745382617e-06, |
|
"loss": 0.005, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 18.46153846153846, |
|
"grad_norm": 0.08059193193912506, |
|
"learning_rate": 3.0867838407878436e-06, |
|
"loss": 0.0064, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"grad_norm": 0.0991053432226181, |
|
"learning_rate": 2.931677397993915e-06, |
|
"loss": 0.0048, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 18.53846153846154, |
|
"grad_norm": 0.05811507999897003, |
|
"learning_rate": 2.7805110420606407e-06, |
|
"loss": 0.0073, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 18.576923076923077, |
|
"grad_norm": 0.10655184090137482, |
|
"learning_rate": 2.633290908960684e-06, |
|
"loss": 0.004, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 18.615384615384617, |
|
"grad_norm": 0.07305694371461868, |
|
"learning_rate": 2.490022974486095e-06, |
|
"loss": 0.0053, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 18.653846153846153, |
|
"grad_norm": 0.06774792075157166, |
|
"learning_rate": 2.350713054005793e-06, |
|
"loss": 0.0049, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 18.692307692307693, |
|
"grad_norm": 0.049805670976638794, |
|
"learning_rate": 2.2153668022295015e-06, |
|
"loss": 0.0044, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 18.73076923076923, |
|
"grad_norm": 0.048138655722141266, |
|
"learning_rate": 2.0839897129782183e-06, |
|
"loss": 0.0055, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 18.76923076923077, |
|
"grad_norm": 0.090663842856884, |
|
"learning_rate": 1.956587118961195e-06, |
|
"loss": 0.0047, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 18.807692307692307, |
|
"grad_norm": 0.11241704970598221, |
|
"learning_rate": 1.833164191559511e-06, |
|
"loss": 0.0044, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 18.846153846153847, |
|
"grad_norm": 0.04880325496196747, |
|
"learning_rate": 1.713725940616151e-06, |
|
"loss": 0.0038, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 18.884615384615383, |
|
"grad_norm": 0.04214165359735489, |
|
"learning_rate": 1.5982772142326462e-06, |
|
"loss": 0.0043, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 18.923076923076923, |
|
"grad_norm": 0.07064809650182724, |
|
"learning_rate": 1.4868226985722877e-06, |
|
"loss": 0.0056, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 18.96153846153846, |
|
"grad_norm": 0.06411836296319962, |
|
"learning_rate": 1.3793669176698887e-06, |
|
"loss": 0.0056, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"grad_norm": 0.10901802033185959, |
|
"learning_rate": 1.2759142332482099e-06, |
|
"loss": 0.0057, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 19.03846153846154, |
|
"grad_norm": 0.05795470252633095, |
|
"learning_rate": 1.1764688445408677e-06, |
|
"loss": 0.0049, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 19.076923076923077, |
|
"grad_norm": 0.03703946992754936, |
|
"learning_rate": 1.0810347881218708e-06, |
|
"loss": 0.0059, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 19.115384615384617, |
|
"grad_norm": 0.05625122785568237, |
|
"learning_rate": 9.896159377418057e-07, |
|
"loss": 0.0065, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 19.153846153846153, |
|
"grad_norm": 0.08023211359977722, |
|
"learning_rate": 9.022160041705974e-07, |
|
"loss": 0.0053, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 19.192307692307693, |
|
"grad_norm": 0.06640934944152832, |
|
"learning_rate": 8.188385350468508e-07, |
|
"loss": 0.0046, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 19.23076923076923, |
|
"grad_norm": 0.10493505746126175, |
|
"learning_rate": 7.394869147339112e-07, |
|
"loss": 0.0061, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 19.26923076923077, |
|
"grad_norm": 0.09153269976377487, |
|
"learning_rate": 6.641643641823958e-07, |
|
"loss": 0.0067, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 19.307692307692307, |
|
"grad_norm": 0.06915758550167084, |
|
"learning_rate": 5.928739407995543e-07, |
|
"loss": 0.0052, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 19.346153846153847, |
|
"grad_norm": 0.08392194658517838, |
|
"learning_rate": 5.256185383251011e-07, |
|
"loss": 0.0061, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 19.384615384615383, |
|
"grad_norm": 0.04674766585230827, |
|
"learning_rate": 4.6240088671378746e-07, |
|
"loss": 0.0046, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 19.423076923076923, |
|
"grad_norm": 0.052531130611896515, |
|
"learning_rate": 4.0322355202455644e-07, |
|
"loss": 0.006, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 19.46153846153846, |
|
"grad_norm": 0.062260791659355164, |
|
"learning_rate": 3.4808893631642636e-07, |
|
"loss": 0.0043, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"grad_norm": 0.06215400993824005, |
|
"learning_rate": 2.969992775509578e-07, |
|
"loss": 0.0045, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 19.53846153846154, |
|
"grad_norm": 0.07574478536844254, |
|
"learning_rate": 2.4995664950141493e-07, |
|
"loss": 0.0038, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 19.576923076923077, |
|
"grad_norm": 0.05041692778468132, |
|
"learning_rate": 2.0696296166862194e-07, |
|
"loss": 0.0038, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 19.615384615384617, |
|
"grad_norm": 0.10670372098684311, |
|
"learning_rate": 1.6801995920341375e-07, |
|
"loss": 0.0049, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 19.653846153846153, |
|
"grad_norm": 0.05979029834270477, |
|
"learning_rate": 1.3312922283581496e-07, |
|
"loss": 0.0048, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 19.692307692307693, |
|
"grad_norm": 0.05246563255786896, |
|
"learning_rate": 1.0229216881088022e-07, |
|
"loss": 0.0047, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 19.73076923076923, |
|
"grad_norm": 0.08008202165365219, |
|
"learning_rate": 7.551004883120661e-08, |
|
"loss": 0.0045, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 19.76923076923077, |
|
"grad_norm": 0.04513508453965187, |
|
"learning_rate": 5.278395000611891e-08, |
|
"loss": 0.0047, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 19.807692307692307, |
|
"grad_norm": 0.07471077144145966, |
|
"learning_rate": 3.4114794807571514e-08, |
|
"loss": 0.0056, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 19.846153846153847, |
|
"grad_norm": 0.07386625558137894, |
|
"learning_rate": 1.9503341032633958e-08, |
|
"loss": 0.0052, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 19.884615384615383, |
|
"grad_norm": 0.05373087525367737, |
|
"learning_rate": 8.950181772826582e-09, |
|
"loss": 0.0062, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 19.923076923076923, |
|
"grad_norm": 0.0932120531797409, |
|
"learning_rate": 2.455745389962072e-09, |
|
"loss": 0.0047, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 19.96153846153846, |
|
"grad_norm": 0.08532081544399261, |
|
"learning_rate": 2.0295498825984027e-11, |
|
"loss": 0.0037, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 19.965384615384615, |
|
"step": 5191, |
|
"total_flos": 7.394345918457876e+17, |
|
"train_loss": 0.02140558246369566, |
|
"train_runtime": 6034.0474, |
|
"train_samples_per_second": 55.058, |
|
"train_steps_per_second": 0.86 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5191, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.394345918457876e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|