flip_A1-dvp15o2035 / trainer_state.json
LegrandFrederic's picture
Upload trainer_state.json with huggingface_hub
ffc73d6 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 1535,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.03257328990228013,
"grad_norm": 8.907567024230957,
"learning_rate": 2.5974025974025972e-05,
"loss": 1.8912,
"step": 10
},
{
"epoch": 0.06514657980456026,
"grad_norm": 9.222850799560547,
"learning_rate": 5.1948051948051944e-05,
"loss": 0.5715,
"step": 20
},
{
"epoch": 0.09771986970684039,
"grad_norm": 2.1737639904022217,
"learning_rate": 7.792207792207793e-05,
"loss": 0.2636,
"step": 30
},
{
"epoch": 0.13029315960912052,
"grad_norm": 2.1530239582061768,
"learning_rate": 0.00010389610389610389,
"loss": 0.1988,
"step": 40
},
{
"epoch": 0.16286644951140064,
"grad_norm": 1.3454252481460571,
"learning_rate": 0.00012987012987012987,
"loss": 0.1389,
"step": 50
},
{
"epoch": 0.19543973941368079,
"grad_norm": 1.806857943534851,
"learning_rate": 0.00015584415584415587,
"loss": 0.1527,
"step": 60
},
{
"epoch": 0.2280130293159609,
"grad_norm": 1.9475533962249756,
"learning_rate": 0.00018181818181818183,
"loss": 0.1769,
"step": 70
},
{
"epoch": 0.26058631921824105,
"grad_norm": 0.9884098768234253,
"learning_rate": 0.0001999979107245606,
"loss": 0.1225,
"step": 80
},
{
"epoch": 0.2931596091205212,
"grad_norm": 0.8951364755630493,
"learning_rate": 0.0001999607704786645,
"loss": 0.108,
"step": 90
},
{
"epoch": 0.3257328990228013,
"grad_norm": 1.403916835784912,
"learning_rate": 0.00019987722173728587,
"loss": 0.1324,
"step": 100
},
{
"epoch": 0.3583061889250814,
"grad_norm": 1.019700288772583,
"learning_rate": 0.00019974730328935534,
"loss": 0.1047,
"step": 110
},
{
"epoch": 0.39087947882736157,
"grad_norm": 0.8862571120262146,
"learning_rate": 0.00019957107545173192,
"loss": 0.1011,
"step": 120
},
{
"epoch": 0.4234527687296417,
"grad_norm": 0.7065503597259521,
"learning_rate": 0.00019934862004120004,
"loss": 0.0847,
"step": 130
},
{
"epoch": 0.4560260586319218,
"grad_norm": 1.27033531665802,
"learning_rate": 0.00019908004033648453,
"loss": 0.0785,
"step": 140
},
{
"epoch": 0.48859934853420195,
"grad_norm": 0.9348053932189941,
"learning_rate": 0.00019876546103030196,
"loss": 0.0835,
"step": 150
},
{
"epoch": 0.5211726384364821,
"grad_norm": 0.8986678719520569,
"learning_rate": 0.00019840502817146966,
"loss": 0.083,
"step": 160
},
{
"epoch": 0.5537459283387622,
"grad_norm": 1.101909875869751,
"learning_rate": 0.00019799890909710013,
"loss": 0.0893,
"step": 170
},
{
"epoch": 0.5863192182410424,
"grad_norm": 0.7529295086860657,
"learning_rate": 0.00019754729235491207,
"loss": 0.0877,
"step": 180
},
{
"epoch": 0.6188925081433225,
"grad_norm": 0.7967862486839294,
"learning_rate": 0.0001970503876156937,
"loss": 0.0888,
"step": 190
},
{
"epoch": 0.6514657980456026,
"grad_norm": 1.1783355474472046,
"learning_rate": 0.00019650842557595967,
"loss": 0.0756,
"step": 200
},
{
"epoch": 0.6840390879478827,
"grad_norm": 1.0442906618118286,
"learning_rate": 0.00019592165785084603,
"loss": 0.0999,
"step": 210
},
{
"epoch": 0.7166123778501629,
"grad_norm": 4.202133655548096,
"learning_rate": 0.00019529035685729391,
"loss": 0.0924,
"step": 220
},
{
"epoch": 0.749185667752443,
"grad_norm": 2.0415215492248535,
"learning_rate": 0.00019461481568757506,
"loss": 0.1073,
"step": 230
},
{
"epoch": 0.7817589576547231,
"grad_norm": 0.6503229737281799,
"learning_rate": 0.00019389534797321884,
"loss": 0.0718,
"step": 240
},
{
"epoch": 0.8143322475570033,
"grad_norm": 2.1912670135498047,
"learning_rate": 0.00019313228773940345,
"loss": 0.0771,
"step": 250
},
{
"epoch": 0.8469055374592834,
"grad_norm": 1.1402329206466675,
"learning_rate": 0.00019232598924987903,
"loss": 0.0942,
"step": 260
},
{
"epoch": 0.8794788273615635,
"grad_norm": 1.558457612991333,
"learning_rate": 0.0001914768268424946,
"loss": 0.0822,
"step": 270
},
{
"epoch": 0.9120521172638436,
"grad_norm": 0.685741662979126,
"learning_rate": 0.00019058519475540538,
"loss": 0.0798,
"step": 280
},
{
"epoch": 0.9446254071661238,
"grad_norm": 0.8257420659065247,
"learning_rate": 0.00018965150694404094,
"loss": 0.0555,
"step": 290
},
{
"epoch": 0.9771986970684039,
"grad_norm": 0.7239089608192444,
"learning_rate": 0.00018867619688891937,
"loss": 0.079,
"step": 300
},
{
"epoch": 1.009771986970684,
"grad_norm": 0.46614518761634827,
"learning_rate": 0.0001876597173943965,
"loss": 0.0696,
"step": 310
},
{
"epoch": 1.0423452768729642,
"grad_norm": 0.6465802788734436,
"learning_rate": 0.00018660254037844388,
"loss": 0.0675,
"step": 320
},
{
"epoch": 1.0749185667752443,
"grad_norm": 1.07686448097229,
"learning_rate": 0.00018550515665355247,
"loss": 0.0594,
"step": 330
},
{
"epoch": 1.1074918566775245,
"grad_norm": 0.37035682797431946,
"learning_rate": 0.000184368075698865,
"loss": 0.0508,
"step": 340
},
{
"epoch": 1.1400651465798046,
"grad_norm": 0.5353094935417175,
"learning_rate": 0.00018319182542364117,
"loss": 0.0752,
"step": 350
},
{
"epoch": 1.1726384364820848,
"grad_norm": 0.5493381023406982,
"learning_rate": 0.00018197695192216702,
"loss": 0.0568,
"step": 360
},
{
"epoch": 1.205211726384365,
"grad_norm": 0.3111603260040283,
"learning_rate": 0.00018072401922022117,
"loss": 0.0427,
"step": 370
},
{
"epoch": 1.237785016286645,
"grad_norm": 0.4221360385417938,
"learning_rate": 0.0001794336090132164,
"loss": 0.0633,
"step": 380
},
{
"epoch": 1.2703583061889252,
"grad_norm": 0.8296531438827515,
"learning_rate": 0.00017810632039613736,
"loss": 0.0585,
"step": 390
},
{
"epoch": 1.3029315960912053,
"grad_norm": 0.4350226819515228,
"learning_rate": 0.00017674276958540072,
"loss": 0.0476,
"step": 400
},
{
"epoch": 1.3355048859934853,
"grad_norm": 0.8966237306594849,
"learning_rate": 0.00017534358963276607,
"loss": 0.0532,
"step": 410
},
{
"epoch": 1.3680781758957654,
"grad_norm": 0.4114079177379608,
"learning_rate": 0.00017390943013143083,
"loss": 0.0593,
"step": 420
},
{
"epoch": 1.4006514657980456,
"grad_norm": 0.5560611486434937,
"learning_rate": 0.0001724409569144455,
"loss": 0.0514,
"step": 430
},
{
"epoch": 1.4332247557003257,
"grad_norm": 0.5275083184242249,
"learning_rate": 0.0001709388517455893,
"loss": 0.0453,
"step": 440
},
{
"epoch": 1.4657980456026058,
"grad_norm": 0.541585385799408,
"learning_rate": 0.00016940381200284972,
"loss": 0.0441,
"step": 450
},
{
"epoch": 1.498371335504886,
"grad_norm": 0.3919298052787781,
"learning_rate": 0.0001678365503546528,
"loss": 0.0348,
"step": 460
},
{
"epoch": 1.5309446254071661,
"grad_norm": 0.626324474811554,
"learning_rate": 0.0001662377944289948,
"loss": 0.0426,
"step": 470
},
{
"epoch": 1.5635179153094463,
"grad_norm": 4.0510358810424805,
"learning_rate": 0.0001646082864756282,
"loss": 0.0422,
"step": 480
},
{
"epoch": 1.5960912052117264,
"grad_norm": 0.6301591396331787,
"learning_rate": 0.00016294878302145987,
"loss": 0.0435,
"step": 490
},
{
"epoch": 1.6286644951140063,
"grad_norm": 0.6829243302345276,
"learning_rate": 0.0001612600545193203,
"loss": 0.0566,
"step": 500
},
{
"epoch": 1.6612377850162865,
"grad_norm": 0.721428632736206,
"learning_rate": 0.00015954288499026782,
"loss": 0.051,
"step": 510
},
{
"epoch": 1.6938110749185666,
"grad_norm": 0.810246467590332,
"learning_rate": 0.0001577980716595934,
"loss": 0.0509,
"step": 520
},
{
"epoch": 1.7263843648208468,
"grad_norm": 0.8535786867141724,
"learning_rate": 0.00015602642458669528,
"loss": 0.0527,
"step": 530
},
{
"epoch": 1.758957654723127,
"grad_norm": 0.7078109383583069,
"learning_rate": 0.0001542287662889948,
"loss": 0.0517,
"step": 540
},
{
"epoch": 1.791530944625407,
"grad_norm": 0.9758318662643433,
"learning_rate": 0.00015240593136006897,
"loss": 0.0425,
"step": 550
},
{
"epoch": 1.8241042345276872,
"grad_norm": 1.1173475980758667,
"learning_rate": 0.0001505587660821759,
"loss": 0.0462,
"step": 560
},
{
"epoch": 1.8566775244299674,
"grad_norm": 0.44922390580177307,
"learning_rate": 0.0001486881280333539,
"loss": 0.0519,
"step": 570
},
{
"epoch": 1.8892508143322475,
"grad_norm": 0.5552352666854858,
"learning_rate": 0.00014679488568927616,
"loss": 0.0518,
"step": 580
},
{
"epoch": 1.9218241042345277,
"grad_norm": 0.6189628839492798,
"learning_rate": 0.00014487991802004623,
"loss": 0.0397,
"step": 590
},
{
"epoch": 1.9543973941368078,
"grad_norm": 0.5633209943771362,
"learning_rate": 0.0001429441140821209,
"loss": 0.0383,
"step": 600
},
{
"epoch": 1.986970684039088,
"grad_norm": 0.44652634859085083,
"learning_rate": 0.00014098837260555084,
"loss": 0.0416,
"step": 610
},
{
"epoch": 2.019543973941368,
"grad_norm": 0.6565033197402954,
"learning_rate": 0.0001390136015767295,
"loss": 0.0422,
"step": 620
},
{
"epoch": 2.0521172638436482,
"grad_norm": 0.5799785256385803,
"learning_rate": 0.00013702071781684517,
"loss": 0.0484,
"step": 630
},
{
"epoch": 2.0846905537459284,
"grad_norm": 0.46765848994255066,
"learning_rate": 0.00013501064655623094,
"loss": 0.0303,
"step": 640
},
{
"epoch": 2.1172638436482085,
"grad_norm": 0.5322710275650024,
"learning_rate": 0.00013298432100481079,
"loss": 0.0321,
"step": 650
},
{
"epoch": 2.1498371335504887,
"grad_norm": 0.9900329113006592,
"learning_rate": 0.0001309426819188409,
"loss": 0.0377,
"step": 660
},
{
"epoch": 2.182410423452769,
"grad_norm": 0.6508163809776306,
"learning_rate": 0.0001288866771641474,
"loss": 0.0341,
"step": 670
},
{
"epoch": 2.214983713355049,
"grad_norm": 0.6541309356689453,
"learning_rate": 0.00012681726127606376,
"loss": 0.0535,
"step": 680
},
{
"epoch": 2.247557003257329,
"grad_norm": 0.3469844162464142,
"learning_rate": 0.000124735395016271,
"loss": 0.0314,
"step": 690
},
{
"epoch": 2.2801302931596092,
"grad_norm": 0.5612144470214844,
"learning_rate": 0.00012264204492674815,
"loss": 0.0334,
"step": 700
},
{
"epoch": 2.3127035830618894,
"grad_norm": 0.6845806837081909,
"learning_rate": 0.0001205381828810382,
"loss": 0.0417,
"step": 710
},
{
"epoch": 2.3452768729641695,
"grad_norm": 0.6910083889961243,
"learning_rate": 0.00011842478563303952,
"loss": 0.0295,
"step": 720
},
{
"epoch": 2.3778501628664497,
"grad_norm": 0.5824999213218689,
"learning_rate": 0.00011630283436353098,
"loss": 0.0367,
"step": 730
},
{
"epoch": 2.41042345276873,
"grad_norm": 0.9331006407737732,
"learning_rate": 0.00011417331422464205,
"loss": 0.0349,
"step": 740
},
{
"epoch": 2.44299674267101,
"grad_norm": 0.33145973086357117,
"learning_rate": 0.00011203721388247923,
"loss": 0.0291,
"step": 750
},
{
"epoch": 2.47557003257329,
"grad_norm": 0.35993027687072754,
"learning_rate": 0.00010989552505812072,
"loss": 0.0415,
"step": 760
},
{
"epoch": 2.5081433224755703,
"grad_norm": 0.446781188249588,
"learning_rate": 0.0001077492420671931,
"loss": 0.0319,
"step": 770
},
{
"epoch": 2.5407166123778504,
"grad_norm": 0.31257572770118713,
"learning_rate": 0.00010559936135824322,
"loss": 0.0269,
"step": 780
},
{
"epoch": 2.5732899022801305,
"grad_norm": 0.4158976078033447,
"learning_rate": 0.00010344688105012005,
"loss": 0.0364,
"step": 790
},
{
"epoch": 2.6058631921824107,
"grad_norm": 0.4493694305419922,
"learning_rate": 0.00010129280046858086,
"loss": 0.0377,
"step": 800
},
{
"epoch": 2.6384364820846904,
"grad_norm": 0.4073673188686371,
"learning_rate": 9.913811968233716e-05,
"loss": 0.0203,
"step": 810
},
{
"epoch": 2.6710097719869705,
"grad_norm": 0.47181200981140137,
"learning_rate": 9.69838390387558e-05,
"loss": 0.0479,
"step": 820
},
{
"epoch": 2.7035830618892507,
"grad_norm": 0.3475622236728668,
"learning_rate": 9.483095869943055e-05,
"loss": 0.0256,
"step": 830
},
{
"epoch": 2.736156351791531,
"grad_norm": 0.3033672571182251,
"learning_rate": 9.268047817583998e-05,
"loss": 0.0196,
"step": 840
},
{
"epoch": 2.768729641693811,
"grad_norm": 0.38617008924484253,
"learning_rate": 9.053339586530723e-05,
"loss": 0.0246,
"step": 850
},
{
"epoch": 2.801302931596091,
"grad_norm": 0.4176099896430969,
"learning_rate": 8.839070858747697e-05,
"loss": 0.034,
"step": 860
},
{
"epoch": 2.8338762214983713,
"grad_norm": 0.5051398277282715,
"learning_rate": 8.625341112152487e-05,
"loss": 0.0267,
"step": 870
},
{
"epoch": 2.8664495114006514,
"grad_norm": 0.3694643974304199,
"learning_rate": 8.412249574431428e-05,
"loss": 0.0252,
"step": 880
},
{
"epoch": 2.8990228013029316,
"grad_norm": 0.47114214301109314,
"learning_rate": 8.199895176971488e-05,
"loss": 0.0331,
"step": 890
},
{
"epoch": 2.9315960912052117,
"grad_norm": 0.3039054572582245,
"learning_rate": 7.988376508929676e-05,
"loss": 0.0232,
"step": 900
},
{
"epoch": 2.964169381107492,
"grad_norm": 0.31323984265327454,
"learning_rate": 7.777791771461332e-05,
"loss": 0.0245,
"step": 910
},
{
"epoch": 2.996742671009772,
"grad_norm": 0.36317452788352966,
"learning_rate": 7.568238732128585e-05,
"loss": 0.0315,
"step": 920
},
{
"epoch": 3.029315960912052,
"grad_norm": 0.3417069911956787,
"learning_rate": 7.359814679510065e-05,
"loss": 0.031,
"step": 930
},
{
"epoch": 3.0618892508143323,
"grad_norm": 0.5224521160125732,
"learning_rate": 7.152616378033042e-05,
"loss": 0.0198,
"step": 940
},
{
"epoch": 3.0944625407166124,
"grad_norm": 0.45148587226867676,
"learning_rate": 6.94674002304887e-05,
"loss": 0.0243,
"step": 950
},
{
"epoch": 3.1270358306188926,
"grad_norm": 0.6386067271232605,
"learning_rate": 6.742281196172663e-05,
"loss": 0.025,
"step": 960
},
{
"epoch": 3.1596091205211727,
"grad_norm": 0.3056974709033966,
"learning_rate": 6.539334820907888e-05,
"loss": 0.0162,
"step": 970
},
{
"epoch": 3.192182410423453,
"grad_norm": 0.38449615240097046,
"learning_rate": 6.337995118576521e-05,
"loss": 0.0236,
"step": 980
},
{
"epoch": 3.224755700325733,
"grad_norm": 0.7297264933586121,
"learning_rate": 6.138355564575169e-05,
"loss": 0.0211,
"step": 990
},
{
"epoch": 3.257328990228013,
"grad_norm": 0.2978608310222626,
"learning_rate": 5.940508844977537e-05,
"loss": 0.0238,
"step": 1000
},
{
"epoch": 3.2899022801302933,
"grad_norm": 0.2972109913825989,
"learning_rate": 5.744546813503328e-05,
"loss": 0.0341,
"step": 1010
},
{
"epoch": 3.3224755700325734,
"grad_norm": 3.2639620304107666,
"learning_rate": 5.550560448873575e-05,
"loss": 0.0264,
"step": 1020
},
{
"epoch": 3.3550488599348536,
"grad_norm": 0.28048136830329895,
"learning_rate": 5.358639812572244e-05,
"loss": 0.0199,
"step": 1030
},
{
"epoch": 3.3876221498371337,
"grad_norm": 0.5666626691818237,
"learning_rate": 5.168874007033615e-05,
"loss": 0.0222,
"step": 1040
},
{
"epoch": 3.420195439739414,
"grad_norm": 0.2740839421749115,
"learning_rate": 4.9813511342749805e-05,
"loss": 0.0207,
"step": 1050
},
{
"epoch": 3.4527687296416936,
"grad_norm": 0.2190520465373993,
"learning_rate": 4.7961582549937675e-05,
"loss": 0.0214,
"step": 1060
},
{
"epoch": 3.4853420195439737,
"grad_norm": 0.2338235080242157,
"learning_rate": 4.6133813481481246e-05,
"loss": 0.0165,
"step": 1070
},
{
"epoch": 3.517915309446254,
"grad_norm": 0.30623218417167664,
"learning_rate": 4.433105271039721e-05,
"loss": 0.0192,
"step": 1080
},
{
"epoch": 3.550488599348534,
"grad_norm": 0.40116289258003235,
"learning_rate": 4.255413719917294e-05,
"loss": 0.023,
"step": 1090
},
{
"epoch": 3.583061889250814,
"grad_norm": 0.2895350158214569,
"learning_rate": 4.080389191119241e-05,
"loss": 0.0146,
"step": 1100
},
{
"epoch": 3.6156351791530943,
"grad_norm": 0.29448819160461426,
"learning_rate": 3.9081129427732774e-05,
"loss": 0.0166,
"step": 1110
},
{
"epoch": 3.6482084690553744,
"grad_norm": 0.21917672455310822,
"learning_rate": 3.7386649570709644e-05,
"loss": 0.0143,
"step": 1120
},
{
"epoch": 3.6807817589576546,
"grad_norm": 0.29176798462867737,
"learning_rate": 3.5721239031346066e-05,
"loss": 0.0164,
"step": 1130
},
{
"epoch": 3.7133550488599347,
"grad_norm": 0.23363909125328064,
"learning_rate": 3.408567100493787e-05,
"loss": 0.0134,
"step": 1140
},
{
"epoch": 3.745928338762215,
"grad_norm": 0.23968514800071716,
"learning_rate": 3.248070483188426e-05,
"loss": 0.0253,
"step": 1150
},
{
"epoch": 3.778501628664495,
"grad_norm": 0.32823416590690613,
"learning_rate": 3.090708564515124e-05,
"loss": 0.0184,
"step": 1160
},
{
"epoch": 3.811074918566775,
"grad_norm": 0.25938963890075684,
"learning_rate": 2.936554402433087e-05,
"loss": 0.0181,
"step": 1170
},
{
"epoch": 3.8436482084690553,
"grad_norm": 0.29427650570869446,
"learning_rate": 2.7856795656457257e-05,
"loss": 0.0154,
"step": 1180
},
{
"epoch": 3.8762214983713354,
"grad_norm": 0.2785288393497467,
"learning_rate": 2.6381541003736486e-05,
"loss": 0.0213,
"step": 1190
},
{
"epoch": 3.9087947882736156,
"grad_norm": 0.21833765506744385,
"learning_rate": 2.494046497834518e-05,
"loss": 0.019,
"step": 1200
},
{
"epoch": 3.9413680781758957,
"grad_norm": 0.3013763725757599,
"learning_rate": 2.3534236624448302e-05,
"loss": 0.0194,
"step": 1210
},
{
"epoch": 3.973941368078176,
"grad_norm": 0.5469600558280945,
"learning_rate": 2.2163508807583998e-05,
"loss": 0.0225,
"step": 1220
},
{
"epoch": 4.006514657980456,
"grad_norm": 0.2737639248371124,
"learning_rate": 2.082891791155954e-05,
"loss": 0.0142,
"step": 1230
},
{
"epoch": 4.039087947882736,
"grad_norm": 0.43073591589927673,
"learning_rate": 1.9531083542999317e-05,
"loss": 0.0164,
"step": 1240
},
{
"epoch": 4.071661237785016,
"grad_norm": 0.14076876640319824,
"learning_rate": 1.8270608243681953e-05,
"loss": 0.0115,
"step": 1250
},
{
"epoch": 4.1042345276872965,
"grad_norm": 0.2852550446987152,
"learning_rate": 1.7048077210799772e-05,
"loss": 0.0124,
"step": 1260
},
{
"epoch": 4.136807817589577,
"grad_norm": 0.24066734313964844,
"learning_rate": 1.5864058025271246e-05,
"loss": 0.0133,
"step": 1270
},
{
"epoch": 4.169381107491857,
"grad_norm": 0.24509824812412262,
"learning_rate": 1.47191003882317e-05,
"loss": 0.0167,
"step": 1280
},
{
"epoch": 4.201954397394137,
"grad_norm": 0.2472413182258606,
"learning_rate": 1.3613735865825305e-05,
"loss": 0.0166,
"step": 1290
},
{
"epoch": 4.234527687296417,
"grad_norm": 0.2663990557193756,
"learning_rate": 1.2548477642416256e-05,
"loss": 0.0164,
"step": 1300
},
{
"epoch": 4.267100977198697,
"grad_norm": 0.1704978048801422,
"learning_rate": 1.1523820282334219e-05,
"loss": 0.011,
"step": 1310
},
{
"epoch": 4.299674267100977,
"grad_norm": 0.26135244965553284,
"learning_rate": 1.0540239500264516e-05,
"loss": 0.0162,
"step": 1320
},
{
"epoch": 4.3322475570032575,
"grad_norm": 0.2877485454082489,
"learning_rate": 9.598191940389256e-06,
"loss": 0.0181,
"step": 1330
},
{
"epoch": 4.364820846905538,
"grad_norm": 0.5725845098495483,
"learning_rate": 8.698114964382598e-06,
"loss": 0.0283,
"step": 1340
},
{
"epoch": 4.397394136807818,
"grad_norm": 0.17230850458145142,
"learning_rate": 7.840426448358085e-06,
"loss": 0.0099,
"step": 1350
},
{
"epoch": 4.429967426710098,
"grad_norm": 0.42446282505989075,
"learning_rate": 7.025524588862542e-06,
"loss": 0.022,
"step": 1360
},
{
"epoch": 4.462540716612378,
"grad_norm": 0.15192359685897827,
"learning_rate": 6.253787718006498e-06,
"loss": 0.0179,
"step": 1370
},
{
"epoch": 4.495114006514658,
"grad_norm": 0.27675601840019226,
"learning_rate": 5.525574127817046e-06,
"loss": 0.0128,
"step": 1380
},
{
"epoch": 4.527687296416938,
"grad_norm": 0.273651123046875,
"learning_rate": 4.841221903894633e-06,
"loss": 0.0143,
"step": 1390
},
{
"epoch": 4.5602605863192185,
"grad_norm": 0.14251072704792023,
"learning_rate": 4.20104876845111e-06,
"loss": 0.0137,
"step": 1400
},
{
"epoch": 4.592833876221499,
"grad_norm": 0.2250894010066986,
"learning_rate": 3.605351932801693e-06,
"loss": 0.0248,
"step": 1410
},
{
"epoch": 4.625407166123779,
"grad_norm": 0.22826635837554932,
"learning_rate": 3.0544079593795573e-06,
"loss": 0.018,
"step": 1420
},
{
"epoch": 4.657980456026059,
"grad_norm": 0.20432162284851074,
"learning_rate": 2.548472633337007e-06,
"loss": 0.0181,
"step": 1430
},
{
"epoch": 4.690553745928339,
"grad_norm": 0.31331172585487366,
"learning_rate": 2.0877808437928637e-06,
"loss": 0.0144,
"step": 1440
},
{
"epoch": 4.723127035830619,
"grad_norm": 0.2865249812602997,
"learning_rate": 1.6725464747811447e-06,
"loss": 0.0201,
"step": 1450
},
{
"epoch": 4.755700325732899,
"grad_norm": 0.1912713199853897,
"learning_rate": 1.3029623059517493e-06,
"loss": 0.0116,
"step": 1460
},
{
"epoch": 4.7882736156351795,
"grad_norm": 0.1277429610490799,
"learning_rate": 9.791999230692629e-07,
"loss": 0.0119,
"step": 1470
},
{
"epoch": 4.82084690553746,
"grad_norm": 0.13365145027637482,
"learning_rate": 7.014096383512802e-07,
"loss": 0.0179,
"step": 1480
},
{
"epoch": 4.85342019543974,
"grad_norm": 0.3035740554332733,
"learning_rate": 4.6972042068341714e-07,
"loss": 0.0136,
"step": 1490
},
{
"epoch": 4.88599348534202,
"grad_norm": 0.2395792156457901,
"learning_rate": 2.8423983574328295e-07,
"loss": 0.0183,
"step": 1500
},
{
"epoch": 4.918566775244299,
"grad_norm": 0.15957923233509064,
"learning_rate": 1.4505399606130621e-07,
"loss": 0.011,
"step": 1510
},
{
"epoch": 4.95114006514658,
"grad_norm": 0.18425774574279785,
"learning_rate": 5.2227521041470216e-08,
"loss": 0.0129,
"step": 1520
},
{
"epoch": 4.9837133550488595,
"grad_norm": 0.3651920557022095,
"learning_rate": 5.803506960722072e-09,
"loss": 0.0144,
"step": 1530
},
{
"epoch": 5.0,
"step": 1535,
"total_flos": 5.197521391244208e+16,
"train_loss": 0.059920015540398684,
"train_runtime": 725.7561,
"train_samples_per_second": 33.841,
"train_steps_per_second": 2.115
}
],
"logging_steps": 10,
"max_steps": 1535,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.197521391244208e+16,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}