lego-pickup-mono-red-fhoactvqnv / trainer_state.json
LegrandFrederic's picture
Upload trainer_state.json with huggingface_hub
1f25b0f verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.498407643312102,
"eval_steps": 500,
"global_step": 1569,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01592356687898089,
"grad_norm": 4.3297224044799805,
"learning_rate": 2.5316455696202533e-05,
"loss": 0.86,
"step": 10
},
{
"epoch": 0.03184713375796178,
"grad_norm": 6.637640476226807,
"learning_rate": 5.0632911392405066e-05,
"loss": 0.4332,
"step": 20
},
{
"epoch": 0.04777070063694268,
"grad_norm": 1.9116368293762207,
"learning_rate": 7.59493670886076e-05,
"loss": 0.3014,
"step": 30
},
{
"epoch": 0.06369426751592357,
"grad_norm": 1.2928426265716553,
"learning_rate": 0.00010126582278481013,
"loss": 0.2232,
"step": 40
},
{
"epoch": 0.07961783439490445,
"grad_norm": 1.4354435205459595,
"learning_rate": 0.00012658227848101267,
"loss": 0.1771,
"step": 50
},
{
"epoch": 0.09554140127388536,
"grad_norm": 1.710893154144287,
"learning_rate": 0.0001518987341772152,
"loss": 0.2136,
"step": 60
},
{
"epoch": 0.11146496815286625,
"grad_norm": 1.6920921802520752,
"learning_rate": 0.00017721518987341773,
"loss": 0.1644,
"step": 70
},
{
"epoch": 0.12738853503184713,
"grad_norm": 1.2916326522827148,
"learning_rate": 0.00019999977772170748,
"loss": 0.1593,
"step": 80
},
{
"epoch": 0.14331210191082802,
"grad_norm": 1.599898099899292,
"learning_rate": 0.00019997310552224846,
"loss": 0.1578,
"step": 90
},
{
"epoch": 0.1592356687898089,
"grad_norm": 1.4364933967590332,
"learning_rate": 0.00019990199125039174,
"loss": 0.155,
"step": 100
},
{
"epoch": 0.1751592356687898,
"grad_norm": 1.7148686647415161,
"learning_rate": 0.00019978646651929572,
"loss": 0.1463,
"step": 110
},
{
"epoch": 0.1910828025477707,
"grad_norm": 1.2754517793655396,
"learning_rate": 0.0001996265826843568,
"loss": 0.1471,
"step": 120
},
{
"epoch": 0.2070063694267516,
"grad_norm": 1.0604585409164429,
"learning_rate": 0.00019942241082037982,
"loss": 0.124,
"step": 130
},
{
"epoch": 0.2229299363057325,
"grad_norm": 0.6741822361946106,
"learning_rate": 0.00019917404168998256,
"loss": 0.0979,
"step": 140
},
{
"epoch": 0.23885350318471338,
"grad_norm": 1.2134864330291748,
"learning_rate": 0.00019888158570324795,
"loss": 0.1105,
"step": 150
},
{
"epoch": 0.25477707006369427,
"grad_norm": 0.6342090368270874,
"learning_rate": 0.00019854517286864245,
"loss": 0.099,
"step": 160
},
{
"epoch": 0.27070063694267515,
"grad_norm": 1.0112156867980957,
"learning_rate": 0.000198164952735222,
"loss": 0.0928,
"step": 170
},
{
"epoch": 0.28662420382165604,
"grad_norm": 1.0317986011505127,
"learning_rate": 0.00019774109432615147,
"loss": 0.0963,
"step": 180
},
{
"epoch": 0.30254777070063693,
"grad_norm": 0.7419966459274292,
"learning_rate": 0.00019727378606356703,
"loss": 0.1069,
"step": 190
},
{
"epoch": 0.3184713375796178,
"grad_norm": 1.373571753501892,
"learning_rate": 0.00019676323568481498,
"loss": 0.1103,
"step": 200
},
{
"epoch": 0.3343949044585987,
"grad_norm": 0.6964902877807617,
"learning_rate": 0.00019620967015010395,
"loss": 0.0827,
"step": 210
},
{
"epoch": 0.3503184713375796,
"grad_norm": 0.9365194439888,
"learning_rate": 0.00019561333554161224,
"loss": 0.1046,
"step": 220
},
{
"epoch": 0.3662420382165605,
"grad_norm": 1.899908423423767,
"learning_rate": 0.00019497449695409408,
"loss": 0.1031,
"step": 230
},
{
"epoch": 0.3821656050955414,
"grad_norm": 0.8126282095909119,
"learning_rate": 0.00019429343837703455,
"loss": 0.1069,
"step": 240
},
{
"epoch": 0.3980891719745223,
"grad_norm": 1.244973063468933,
"learning_rate": 0.00019357046256840473,
"loss": 0.082,
"step": 250
},
{
"epoch": 0.4140127388535032,
"grad_norm": 0.8132479786872864,
"learning_rate": 0.00019280589092007352,
"loss": 0.0867,
"step": 260
},
{
"epoch": 0.4299363057324841,
"grad_norm": 0.8742226958274841,
"learning_rate": 0.0001920000633149362,
"loss": 0.0761,
"step": 270
},
{
"epoch": 0.445859872611465,
"grad_norm": 0.6832584738731384,
"learning_rate": 0.00019115333797582254,
"loss": 0.0845,
"step": 280
},
{
"epoch": 0.46178343949044587,
"grad_norm": 0.6030533909797668,
"learning_rate": 0.00019026609130625257,
"loss": 0.0722,
"step": 290
},
{
"epoch": 0.47770700636942676,
"grad_norm": 0.7257299423217773,
"learning_rate": 0.0001893387177231099,
"loss": 0.0701,
"step": 300
},
{
"epoch": 0.49363057324840764,
"grad_norm": 0.6390131711959839,
"learning_rate": 0.00018837162948130752,
"loss": 0.0696,
"step": 310
},
{
"epoch": 0.5095541401273885,
"grad_norm": 1.8211143016815186,
"learning_rate": 0.00018736525649052394,
"loss": 0.0757,
"step": 320
},
{
"epoch": 0.5254777070063694,
"grad_norm": 1.0733026266098022,
"learning_rate": 0.00018632004612409103,
"loss": 0.0835,
"step": 330
},
{
"epoch": 0.5414012738853503,
"grad_norm": 0.7683069705963135,
"learning_rate": 0.00018523646302011867,
"loss": 0.0765,
"step": 340
},
{
"epoch": 0.5573248407643312,
"grad_norm": 0.5842871069908142,
"learning_rate": 0.00018411498887494396,
"loss": 0.073,
"step": 350
},
{
"epoch": 0.5732484076433121,
"grad_norm": 0.6933338046073914,
"learning_rate": 0.0001829561222289984,
"loss": 0.0648,
"step": 360
},
{
"epoch": 0.589171974522293,
"grad_norm": 0.42543935775756836,
"learning_rate": 0.000181760378245186,
"loss": 0.072,
"step": 370
},
{
"epoch": 0.6050955414012739,
"grad_norm": 0.72137451171875,
"learning_rate": 0.0001805282884798732,
"loss": 0.0661,
"step": 380
},
{
"epoch": 0.6210191082802548,
"grad_norm": 0.6393111944198608,
"learning_rate": 0.00017926040064659014,
"loss": 0.0841,
"step": 390
},
{
"epoch": 0.6369426751592356,
"grad_norm": 0.46522364020347595,
"learning_rate": 0.00017795727837255015,
"loss": 0.0668,
"step": 400
},
{
"epoch": 0.6528662420382165,
"grad_norm": 0.790262758731842,
"learning_rate": 0.0001766195009480949,
"loss": 0.0702,
"step": 410
},
{
"epoch": 0.6687898089171974,
"grad_norm": 0.7343723177909851,
"learning_rate": 0.00017524766306917618,
"loss": 0.0751,
"step": 420
},
{
"epoch": 0.6847133757961783,
"grad_norm": 0.497380793094635,
"learning_rate": 0.00017384237457298987,
"loss": 0.0663,
"step": 430
},
{
"epoch": 0.7006369426751592,
"grad_norm": 0.5845641493797302,
"learning_rate": 0.00017240426016687863,
"loss": 0.0656,
"step": 440
},
{
"epoch": 0.7165605095541401,
"grad_norm": 1.0296058654785156,
"learning_rate": 0.00017093395915062428,
"loss": 0.0664,
"step": 450
},
{
"epoch": 0.732484076433121,
"grad_norm": 0.7704136371612549,
"learning_rate": 0.00016943212513225345,
"loss": 0.0597,
"step": 460
},
{
"epoch": 0.7484076433121019,
"grad_norm": 0.7734206318855286,
"learning_rate": 0.00016789942573748232,
"loss": 0.0638,
"step": 470
},
{
"epoch": 0.7643312101910829,
"grad_norm": 0.6289462447166443,
"learning_rate": 0.00016633654231293013,
"loss": 0.0573,
"step": 480
},
{
"epoch": 0.7802547770700637,
"grad_norm": 0.6616397500038147,
"learning_rate": 0.00016474416962323325,
"loss": 0.0506,
"step": 490
},
{
"epoch": 0.7961783439490446,
"grad_norm": 0.3910081386566162,
"learning_rate": 0.00016312301554219426,
"loss": 0.0457,
"step": 500
},
{
"epoch": 0.8121019108280255,
"grad_norm": 0.6165742874145508,
"learning_rate": 0.00016147380073810346,
"loss": 0.0503,
"step": 510
},
{
"epoch": 0.8280254777070064,
"grad_norm": 0.6931295990943909,
"learning_rate": 0.00015979725835337294,
"loss": 0.0481,
"step": 520
},
{
"epoch": 0.8439490445859873,
"grad_norm": 0.7499469518661499,
"learning_rate": 0.00015809413367862512,
"loss": 0.0725,
"step": 530
},
{
"epoch": 0.8598726114649682,
"grad_norm": 0.4135420620441437,
"learning_rate": 0.00015636518382138107,
"loss": 0.0646,
"step": 540
},
{
"epoch": 0.8757961783439491,
"grad_norm": 0.4710856080055237,
"learning_rate": 0.00015461117736949577,
"loss": 0.0538,
"step": 550
},
{
"epoch": 0.89171974522293,
"grad_norm": 0.428799033164978,
"learning_rate": 0.00015283289404948976,
"loss": 0.0593,
"step": 560
},
{
"epoch": 0.9076433121019108,
"grad_norm": 0.4142407774925232,
"learning_rate": 0.0001510311243799295,
"loss": 0.0605,
"step": 570
},
{
"epoch": 0.9235668789808917,
"grad_norm": 0.46757104992866516,
"learning_rate": 0.0001492066693200096,
"loss": 0.0634,
"step": 580
},
{
"epoch": 0.9394904458598726,
"grad_norm": 0.4138467609882355,
"learning_rate": 0.0001473603399134948,
"loss": 0.0586,
"step": 590
},
{
"epoch": 0.9554140127388535,
"grad_norm": 0.43409624695777893,
"learning_rate": 0.00014549295692817778,
"loss": 0.0579,
"step": 600
},
{
"epoch": 0.9713375796178344,
"grad_norm": 0.44268661737442017,
"learning_rate": 0.0001436053504910151,
"loss": 0.0637,
"step": 610
},
{
"epoch": 0.9872611464968153,
"grad_norm": 0.3840467929840088,
"learning_rate": 0.00014169835971910238,
"loss": 0.0511,
"step": 620
},
{
"epoch": 1.0031847133757963,
"grad_norm": 0.4400401711463928,
"learning_rate": 0.00013977283234665273,
"loss": 0.0477,
"step": 630
},
{
"epoch": 1.019108280254777,
"grad_norm": 0.3332836925983429,
"learning_rate": 0.00013782962434814492,
"loss": 0.0478,
"step": 640
},
{
"epoch": 1.035031847133758,
"grad_norm": 0.3282802700996399,
"learning_rate": 0.00013586959955780824,
"loss": 0.0508,
"step": 650
},
{
"epoch": 1.0509554140127388,
"grad_norm": 0.6200388073921204,
"learning_rate": 0.00013389362928561317,
"loss": 0.0479,
"step": 660
},
{
"epoch": 1.0668789808917198,
"grad_norm": 0.5026119947433472,
"learning_rate": 0.0001319025919299394,
"loss": 0.0402,
"step": 670
},
{
"epoch": 1.0828025477707006,
"grad_norm": 0.440782755613327,
"learning_rate": 0.00012989737258709203,
"loss": 0.0508,
"step": 680
},
{
"epoch": 1.0987261146496816,
"grad_norm": 0.34029659628868103,
"learning_rate": 0.0001278788626578407,
"loss": 0.044,
"step": 690
},
{
"epoch": 1.1146496815286624,
"grad_norm": 0.46119582653045654,
"learning_rate": 0.00012584795945115603,
"loss": 0.0426,
"step": 700
},
{
"epoch": 1.1305732484076434,
"grad_norm": 0.47696712613105774,
"learning_rate": 0.0001238055657853198,
"loss": 0.0515,
"step": 710
},
{
"epoch": 1.1464968152866242,
"grad_norm": 0.49719110131263733,
"learning_rate": 0.00012175258958658564,
"loss": 0.0453,
"step": 720
},
{
"epoch": 1.1624203821656052,
"grad_norm": 0.36977869272232056,
"learning_rate": 0.0001196899434855693,
"loss": 0.0529,
"step": 730
},
{
"epoch": 1.178343949044586,
"grad_norm": 0.3333928883075714,
"learning_rate": 0.00011761854441154767,
"loss": 0.0429,
"step": 740
},
{
"epoch": 1.194267515923567,
"grad_norm": 0.3594333231449127,
"learning_rate": 0.0001155393131848467,
"loss": 0.044,
"step": 750
},
{
"epoch": 1.2101910828025477,
"grad_norm": 0.33035799860954285,
"learning_rate": 0.00011345317410749964,
"loss": 0.0425,
"step": 760
},
{
"epoch": 1.2261146496815287,
"grad_norm": 0.431429386138916,
"learning_rate": 0.00011136105455235766,
"loss": 0.0395,
"step": 770
},
{
"epoch": 1.2420382165605095,
"grad_norm": 0.34141987562179565,
"learning_rate": 0.00010926388455083522,
"loss": 0.0455,
"step": 780
},
{
"epoch": 1.2579617834394905,
"grad_norm": 0.48105695843696594,
"learning_rate": 0.00010716259637947357,
"loss": 0.033,
"step": 790
},
{
"epoch": 1.2738853503184713,
"grad_norm": 0.40072181820869446,
"learning_rate": 0.0001050581241455064,
"loss": 0.0404,
"step": 800
},
{
"epoch": 1.2898089171974523,
"grad_norm": 0.5137969851493835,
"learning_rate": 0.00010295140337161146,
"loss": 0.0388,
"step": 810
},
{
"epoch": 1.305732484076433,
"grad_norm": 0.6727393865585327,
"learning_rate": 0.00010084337058003303,
"loss": 0.0455,
"step": 820
},
{
"epoch": 1.321656050955414,
"grad_norm": 0.36769551038742065,
"learning_rate": 9.873496287626019e-05,
"loss": 0.0416,
"step": 830
},
{
"epoch": 1.3375796178343948,
"grad_norm": 0.40487590432167053,
"learning_rate": 9.662711753244551e-05,
"loss": 0.0394,
"step": 840
},
{
"epoch": 1.3535031847133758,
"grad_norm": 0.3262194097042084,
"learning_rate": 9.452077157074994e-05,
"loss": 0.0371,
"step": 850
},
{
"epoch": 1.3694267515923566,
"grad_norm": 0.49285051226615906,
"learning_rate": 9.241686134679867e-05,
"loss": 0.0434,
"step": 860
},
{
"epoch": 1.3853503184713376,
"grad_norm": 0.4286457896232605,
"learning_rate": 9.031632213343339e-05,
"loss": 0.0388,
"step": 870
},
{
"epoch": 1.4012738853503186,
"grad_norm": 0.4375673234462738,
"learning_rate": 8.822008770494572e-05,
"loss": 0.0419,
"step": 880
},
{
"epoch": 1.4171974522292994,
"grad_norm": 0.34244051575660706,
"learning_rate": 8.612908992197705e-05,
"loss": 0.0478,
"step": 890
},
{
"epoch": 1.4331210191082802,
"grad_norm": 0.325065016746521,
"learning_rate": 8.404425831726894e-05,
"loss": 0.04,
"step": 900
},
{
"epoch": 1.4490445859872612,
"grad_norm": 0.4267027676105499,
"learning_rate": 8.196651968244826e-05,
"loss": 0.0377,
"step": 910
},
{
"epoch": 1.4649681528662422,
"grad_norm": 0.5654813051223755,
"learning_rate": 7.989679765603108e-05,
"loss": 0.0445,
"step": 920
},
{
"epoch": 1.480891719745223,
"grad_norm": 0.39815276861190796,
"learning_rate": 7.783601231282812e-05,
"loss": 0.0553,
"step": 930
},
{
"epoch": 1.4968152866242037,
"grad_norm": 0.46311256289482117,
"learning_rate": 7.578507975493448e-05,
"loss": 0.0373,
"step": 940
},
{
"epoch": 1.5127388535031847,
"grad_norm": 0.37769854068756104,
"learning_rate": 7.374491170448525e-05,
"loss": 0.0368,
"step": 950
},
{
"epoch": 1.5286624203821657,
"grad_norm": 0.2921994924545288,
"learning_rate": 7.17164150983584e-05,
"loss": 0.0428,
"step": 960
},
{
"epoch": 1.5445859872611465,
"grad_norm": 0.48072826862335205,
"learning_rate": 6.970049168500474e-05,
"loss": 0.0329,
"step": 970
},
{
"epoch": 1.5605095541401273,
"grad_norm": 0.37165966629981995,
"learning_rate": 6.769803762358443e-05,
"loss": 0.0376,
"step": 980
},
{
"epoch": 1.5764331210191083,
"grad_norm": 0.309915155172348,
"learning_rate": 6.570994308558812e-05,
"loss": 0.032,
"step": 990
},
{
"epoch": 1.5923566878980893,
"grad_norm": 0.41205787658691406,
"learning_rate": 6.373709185911998e-05,
"loss": 0.036,
"step": 1000
},
{
"epoch": 1.60828025477707,
"grad_norm": 0.45491883158683777,
"learning_rate": 6.17803609560181e-05,
"loss": 0.0453,
"step": 1010
},
{
"epoch": 1.6242038216560508,
"grad_norm": 0.4330281615257263,
"learning_rate": 5.98406202219875e-05,
"loss": 0.0319,
"step": 1020
},
{
"epoch": 1.6401273885350318,
"grad_norm": 0.2400377243757248,
"learning_rate": 5.791873194991872e-05,
"loss": 0.0342,
"step": 1030
},
{
"epoch": 1.6560509554140128,
"grad_norm": 0.26502475142478943,
"learning_rate": 5.601555049656382e-05,
"loss": 0.0318,
"step": 1040
},
{
"epoch": 1.6719745222929936,
"grad_norm": 0.2682415246963501,
"learning_rate": 5.41319219027404e-05,
"loss": 0.0301,
"step": 1050
},
{
"epoch": 1.6878980891719744,
"grad_norm": 0.38560014963150024,
"learning_rate": 5.226868351723244e-05,
"loss": 0.0432,
"step": 1060
},
{
"epoch": 1.7038216560509554,
"grad_norm": 0.1889456957578659,
"learning_rate": 5.042666362455498e-05,
"loss": 0.0255,
"step": 1070
},
{
"epoch": 1.7197452229299364,
"grad_norm": 0.3188437223434448,
"learning_rate": 4.860668107674823e-05,
"loss": 0.037,
"step": 1080
},
{
"epoch": 1.7356687898089171,
"grad_norm": 0.7025167942047119,
"learning_rate": 4.6809544929365004e-05,
"loss": 0.0357,
"step": 1090
},
{
"epoch": 1.7515923566878981,
"grad_norm": 0.39571383595466614,
"learning_rate": 4.503605408181286e-05,
"loss": 0.0273,
"step": 1100
},
{
"epoch": 1.767515923566879,
"grad_norm": 0.2839336693286896,
"learning_rate": 4.3286996922211034e-05,
"loss": 0.0302,
"step": 1110
},
{
"epoch": 1.78343949044586,
"grad_norm": 0.3293486535549164,
"learning_rate": 4.156315097692037e-05,
"loss": 0.0308,
"step": 1120
},
{
"epoch": 1.799363057324841,
"grad_norm": 0.3942393958568573,
"learning_rate": 3.986528256490141e-05,
"loss": 0.0293,
"step": 1130
},
{
"epoch": 1.8152866242038217,
"grad_norm": 0.31392380595207214,
"learning_rate": 3.8194146457054655e-05,
"loss": 0.0249,
"step": 1140
},
{
"epoch": 1.8312101910828025,
"grad_norm": 0.3941175639629364,
"learning_rate": 3.655048554069478e-05,
"loss": 0.032,
"step": 1150
},
{
"epoch": 1.8471337579617835,
"grad_norm": 0.2796083092689514,
"learning_rate": 3.4935030489306883e-05,
"loss": 0.034,
"step": 1160
},
{
"epoch": 1.8630573248407645,
"grad_norm": 0.30333149433135986,
"learning_rate": 3.334849943773323e-05,
"loss": 0.0351,
"step": 1170
},
{
"epoch": 1.8789808917197452,
"grad_norm": 0.3254244029521942,
"learning_rate": 3.179159766293282e-05,
"loss": 0.0298,
"step": 1180
},
{
"epoch": 1.894904458598726,
"grad_norm": 0.20831990242004395,
"learning_rate": 3.0265017270457775e-05,
"loss": 0.0288,
"step": 1190
},
{
"epoch": 1.910828025477707,
"grad_norm": 0.31671157479286194,
"learning_rate": 2.8769436886784408e-05,
"loss": 0.0334,
"step": 1200
},
{
"epoch": 1.926751592356688,
"grad_norm": 0.5833495259284973,
"learning_rate": 2.730552135763632e-05,
"loss": 0.0425,
"step": 1210
},
{
"epoch": 1.9426751592356688,
"grad_norm": 0.32988494634628296,
"learning_rate": 2.5873921452433915e-05,
"loss": 0.0328,
"step": 1220
},
{
"epoch": 1.9585987261146496,
"grad_norm": 0.20500314235687256,
"learning_rate": 2.4475273575000936e-05,
"loss": 0.0273,
"step": 1230
},
{
"epoch": 1.9745222929936306,
"grad_norm": 0.33419910073280334,
"learning_rate": 2.3110199480657525e-05,
"loss": 0.0264,
"step": 1240
},
{
"epoch": 1.9904458598726116,
"grad_norm": 0.20512598752975464,
"learning_rate": 2.1779305999824884e-05,
"loss": 0.0287,
"step": 1250
},
{
"epoch": 2.0063694267515926,
"grad_norm": 0.4389275312423706,
"learning_rate": 2.0483184768264596e-05,
"loss": 0.0306,
"step": 1260
},
{
"epoch": 2.022292993630573,
"grad_norm": 0.18635383248329163,
"learning_rate": 1.9222411964072884e-05,
"loss": 0.028,
"step": 1270
},
{
"epoch": 2.038216560509554,
"grad_norm": 0.32153043150901794,
"learning_rate": 1.799754805154603e-05,
"loss": 0.0274,
"step": 1280
},
{
"epoch": 2.054140127388535,
"grad_norm": 0.448691189289093,
"learning_rate": 1.6809137532031704e-05,
"loss": 0.0306,
"step": 1290
},
{
"epoch": 2.070063694267516,
"grad_norm": 0.38386714458465576,
"learning_rate": 1.565770870187585e-05,
"loss": 0.0247,
"step": 1300
},
{
"epoch": 2.0859872611464967,
"grad_norm": 0.18705415725708008,
"learning_rate": 1.4543773417573925e-05,
"loss": 0.025,
"step": 1310
},
{
"epoch": 2.1019108280254777,
"grad_norm": 0.23585732281208038,
"learning_rate": 1.3467826868229994e-05,
"loss": 0.0265,
"step": 1320
},
{
"epoch": 2.1178343949044587,
"grad_norm": 0.36259782314300537,
"learning_rate": 1.243034735542512e-05,
"loss": 0.0305,
"step": 1330
},
{
"epoch": 2.1337579617834397,
"grad_norm": 0.34997355937957764,
"learning_rate": 1.1431796080593283e-05,
"loss": 0.032,
"step": 1340
},
{
"epoch": 2.1496815286624202,
"grad_norm": 0.3579126298427582,
"learning_rate": 1.0472616939998492e-05,
"loss": 0.024,
"step": 1350
},
{
"epoch": 2.1656050955414012,
"grad_norm": 0.41504374146461487,
"learning_rate": 9.553236327405246e-06,
"loss": 0.0259,
"step": 1360
},
{
"epoch": 2.1815286624203822,
"grad_norm": 0.2632668912410736,
"learning_rate": 8.674062944529216e-06,
"loss": 0.0301,
"step": 1370
},
{
"epoch": 2.1974522292993632,
"grad_norm": 0.3617275059223175,
"learning_rate": 7.835487619352811e-06,
"loss": 0.0232,
"step": 1380
},
{
"epoch": 2.213375796178344,
"grad_norm": 0.3129204213619232,
"learning_rate": 7.037883132386547e-06,
"loss": 0.0355,
"step": 1390
},
{
"epoch": 2.229299363057325,
"grad_norm": 0.2824781835079193,
"learning_rate": 6.2816040509530165e-06,
"loss": 0.0269,
"step": 1400
},
{
"epoch": 2.245222929936306,
"grad_norm": 0.20426948368549347,
"learning_rate": 5.566986571567401e-06,
"loss": 0.0355,
"step": 1410
},
{
"epoch": 2.261146496815287,
"grad_norm": 0.22026273608207703,
"learning_rate": 4.8943483704846475e-06,
"loss": 0.0267,
"step": 1420
},
{
"epoch": 2.2770700636942673,
"grad_norm": 0.27001041173934937,
"learning_rate": 4.263988462479484e-06,
"loss": 0.0286,
"step": 1430
},
{
"epoch": 2.2929936305732483,
"grad_norm": 0.22612400352954865,
"learning_rate": 3.676187067922421e-06,
"loss": 0.0263,
"step": 1440
},
{
"epoch": 2.3089171974522293,
"grad_norm": 0.3205125033855438,
"learning_rate": 3.131205488210409e-06,
"loss": 0.0311,
"step": 1450
},
{
"epoch": 2.3248407643312103,
"grad_norm": 0.2743224501609802,
"learning_rate": 2.6292859896079213e-06,
"loss": 0.0233,
"step": 1460
},
{
"epoch": 2.340764331210191,
"grad_norm": 0.23394609987735748,
"learning_rate": 2.170651695549786e-06,
"loss": 0.0161,
"step": 1470
},
{
"epoch": 2.356687898089172,
"grad_norm": 0.23162613809108734,
"learning_rate": 1.7555064874538397e-06,
"loss": 0.0243,
"step": 1480
},
{
"epoch": 2.372611464968153,
"grad_norm": 0.3181140720844269,
"learning_rate": 1.3840349140874619e-06,
"loss": 0.0224,
"step": 1490
},
{
"epoch": 2.388535031847134,
"grad_norm": 0.29145529866218567,
"learning_rate": 1.0564021095281652e-06,
"loss": 0.0345,
"step": 1500
},
{
"epoch": 2.404458598726115,
"grad_norm": 0.321485698223114,
"learning_rate": 7.727537197548707e-07,
"loss": 0.0205,
"step": 1510
},
{
"epoch": 2.4203821656050954,
"grad_norm": 0.2429160177707672,
"learning_rate": 5.332158379024122e-07,
"loss": 0.0217,
"step": 1520
},
{
"epoch": 2.4363057324840764,
"grad_norm": 0.22818560898303986,
"learning_rate": 3.3789494820803957e-07,
"loss": 0.0219,
"step": 1530
},
{
"epoch": 2.4522292993630574,
"grad_norm": 0.2582818865776062,
"learning_rate": 1.8687787867489592e-07,
"loss": 0.0186,
"step": 1540
},
{
"epoch": 2.468152866242038,
"grad_norm": 0.2430548369884491,
"learning_rate": 8.023176247348163e-08,
"loss": 0.0344,
"step": 1550
},
{
"epoch": 2.484076433121019,
"grad_norm": 0.38517120480537415,
"learning_rate": 1.8004008098226887e-08,
"loss": 0.0225,
"step": 1560
},
{
"epoch": 2.498407643312102,
"step": 1569,
"total_flos": 5.687859742190496e+16,
"train_loss": 0.06483107545027846,
"train_runtime": 706.8123,
"train_samples_per_second": 35.517,
"train_steps_per_second": 2.22
}
],
"logging_steps": 10,
"max_steps": 1569,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.687859742190496e+16,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}