|
{
|
|
"best_global_step": null,
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 19.999535243996903,
|
|
"eval_steps": 500,
|
|
"global_step": 32260,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.061967467079783116,
|
|
"grad_norm": 20.777441024780273,
|
|
"learning_rate": 9.97179169249845e-06,
|
|
"loss": 5.1314,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.12393493415956623,
|
|
"grad_norm": 7.313554763793945,
|
|
"learning_rate": 9.940793552386857e-06,
|
|
"loss": 2.5079,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.18590240123934934,
|
|
"grad_norm": 6.049108982086182,
|
|
"learning_rate": 9.909795412275263e-06,
|
|
"loss": 2.262,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.24786986831913246,
|
|
"grad_norm": 7.533471584320068,
|
|
"learning_rate": 9.878797272163671e-06,
|
|
"loss": 2.1505,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.30983733539891556,
|
|
"grad_norm": 7.321562767028809,
|
|
"learning_rate": 9.847799132052077e-06,
|
|
"loss": 2.0719,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.3718048024786987,
|
|
"grad_norm": 9.049120903015137,
|
|
"learning_rate": 9.816800991940483e-06,
|
|
"loss": 2.0476,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.4337722695584818,
|
|
"grad_norm": 4.262355804443359,
|
|
"learning_rate": 9.785802851828891e-06,
|
|
"loss": 2.0214,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.4957397366382649,
|
|
"grad_norm": 4.2409491539001465,
|
|
"learning_rate": 9.754804711717297e-06,
|
|
"loss": 2.0108,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.557707203718048,
|
|
"grad_norm": 4.823696136474609,
|
|
"learning_rate": 9.723806571605703e-06,
|
|
"loss": 1.9636,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.6196746707978311,
|
|
"grad_norm": 16.929576873779297,
|
|
"learning_rate": 9.692808431494111e-06,
|
|
"loss": 1.9787,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.6816421378776143,
|
|
"grad_norm": 4.100649833679199,
|
|
"learning_rate": 9.661810291382517e-06,
|
|
"loss": 1.9076,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.7436096049573974,
|
|
"grad_norm": 8.428403854370117,
|
|
"learning_rate": 9.630812151270923e-06,
|
|
"loss": 1.9312,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 0.8055770720371804,
|
|
"grad_norm": 21.134408950805664,
|
|
"learning_rate": 9.599814011159331e-06,
|
|
"loss": 1.9262,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 0.8675445391169636,
|
|
"grad_norm": 4.445871829986572,
|
|
"learning_rate": 9.568815871047737e-06,
|
|
"loss": 1.9038,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 0.9295120061967467,
|
|
"grad_norm": 10.70151424407959,
|
|
"learning_rate": 9.537817730936143e-06,
|
|
"loss": 1.909,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.9914794732765299,
|
|
"grad_norm": 4.220454692840576,
|
|
"learning_rate": 9.506819590824551e-06,
|
|
"loss": 1.9011,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 0.9995352439969016,
|
|
"eval_loss": 1.478387713432312,
|
|
"eval_runtime": 71.2201,
|
|
"eval_samples_per_second": 45.31,
|
|
"eval_steps_per_second": 22.662,
|
|
"step": 1613
|
|
},
|
|
{
|
|
"epoch": 1.0539116963594113,
|
|
"grad_norm": 5.241837024688721,
|
|
"learning_rate": 9.475821450712957e-06,
|
|
"loss": 1.8493,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 1.1158791634391945,
|
|
"grad_norm": 8.490580558776855,
|
|
"learning_rate": 9.444823310601364e-06,
|
|
"loss": 1.8844,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 1.1778466305189776,
|
|
"grad_norm": 4.5867743492126465,
|
|
"learning_rate": 9.413825170489771e-06,
|
|
"loss": 1.8588,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 1.2398140975987606,
|
|
"grad_norm": 5.750781536102295,
|
|
"learning_rate": 9.382827030378178e-06,
|
|
"loss": 1.8462,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 1.3017815646785438,
|
|
"grad_norm": 3.4414772987365723,
|
|
"learning_rate": 9.351828890266584e-06,
|
|
"loss": 1.8476,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 1.363749031758327,
|
|
"grad_norm": 3.991649627685547,
|
|
"learning_rate": 9.320830750154991e-06,
|
|
"loss": 1.8111,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 1.42571649883811,
|
|
"grad_norm": 3.774946689605713,
|
|
"learning_rate": 9.289832610043398e-06,
|
|
"loss": 1.8224,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 1.487683965917893,
|
|
"grad_norm": 11.512676239013672,
|
|
"learning_rate": 9.258834469931804e-06,
|
|
"loss": 1.8164,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 1.5496514329976763,
|
|
"grad_norm": 4.229243755340576,
|
|
"learning_rate": 9.227836329820212e-06,
|
|
"loss": 1.8052,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 1.6116189000774592,
|
|
"grad_norm": 3.6015431880950928,
|
|
"learning_rate": 9.196838189708618e-06,
|
|
"loss": 1.8253,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 1.6735863671572424,
|
|
"grad_norm": 3.8703105449676514,
|
|
"learning_rate": 9.165840049597024e-06,
|
|
"loss": 1.7702,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 1.7355538342370256,
|
|
"grad_norm": 3.6859114170074463,
|
|
"learning_rate": 9.134841909485432e-06,
|
|
"loss": 1.7925,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 1.7975213013168085,
|
|
"grad_norm": 6.744151592254639,
|
|
"learning_rate": 9.103843769373838e-06,
|
|
"loss": 1.7995,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 1.859488768396592,
|
|
"grad_norm": 5.791450500488281,
|
|
"learning_rate": 9.072845629262244e-06,
|
|
"loss": 1.7894,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 1.921456235476375,
|
|
"grad_norm": 5.028300762176514,
|
|
"learning_rate": 9.041847489150652e-06,
|
|
"loss": 1.7955,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 1.9834237025561579,
|
|
"grad_norm": 8.930970191955566,
|
|
"learning_rate": 9.010849349039058e-06,
|
|
"loss": 1.7856,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 1.9995352439969016,
|
|
"eval_loss": 1.438106894493103,
|
|
"eval_runtime": 71.4662,
|
|
"eval_samples_per_second": 45.154,
|
|
"eval_steps_per_second": 22.584,
|
|
"step": 3226
|
|
},
|
|
{
|
|
"epoch": 2.0458559256390396,
|
|
"grad_norm": 4.4157915115356445,
|
|
"learning_rate": 8.979851208927464e-06,
|
|
"loss": 1.7661,
|
|
"step": 3300
|
|
},
|
|
{
|
|
"epoch": 2.1078233927188226,
|
|
"grad_norm": 5.970052242279053,
|
|
"learning_rate": 8.948853068815872e-06,
|
|
"loss": 1.7539,
|
|
"step": 3400
|
|
},
|
|
{
|
|
"epoch": 2.1697908597986055,
|
|
"grad_norm": 6.610071659088135,
|
|
"learning_rate": 8.917854928704278e-06,
|
|
"loss": 1.7629,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 2.231758326878389,
|
|
"grad_norm": 5.124378204345703,
|
|
"learning_rate": 8.886856788592684e-06,
|
|
"loss": 1.7512,
|
|
"step": 3600
|
|
},
|
|
{
|
|
"epoch": 2.293725793958172,
|
|
"grad_norm": 3.7913496494293213,
|
|
"learning_rate": 8.855858648481092e-06,
|
|
"loss": 1.7574,
|
|
"step": 3700
|
|
},
|
|
{
|
|
"epoch": 2.3556932610379553,
|
|
"grad_norm": 3.5346639156341553,
|
|
"learning_rate": 8.824860508369498e-06,
|
|
"loss": 1.7298,
|
|
"step": 3800
|
|
},
|
|
{
|
|
"epoch": 2.4176607281177382,
|
|
"grad_norm": 3.6782634258270264,
|
|
"learning_rate": 8.793862368257904e-06,
|
|
"loss": 1.756,
|
|
"step": 3900
|
|
},
|
|
{
|
|
"epoch": 2.479628195197521,
|
|
"grad_norm": 3.087649345397949,
|
|
"learning_rate": 8.762864228146312e-06,
|
|
"loss": 1.7557,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 2.541595662277304,
|
|
"grad_norm": 3.854729413986206,
|
|
"learning_rate": 8.731866088034718e-06,
|
|
"loss": 1.7425,
|
|
"step": 4100
|
|
},
|
|
{
|
|
"epoch": 2.6035631293570876,
|
|
"grad_norm": 3.7198565006256104,
|
|
"learning_rate": 8.700867947923124e-06,
|
|
"loss": 1.7379,
|
|
"step": 4200
|
|
},
|
|
{
|
|
"epoch": 2.6655305964368705,
|
|
"grad_norm": 4.5739593505859375,
|
|
"learning_rate": 8.669869807811532e-06,
|
|
"loss": 1.7379,
|
|
"step": 4300
|
|
},
|
|
{
|
|
"epoch": 2.727498063516654,
|
|
"grad_norm": 4.533520698547363,
|
|
"learning_rate": 8.638871667699938e-06,
|
|
"loss": 1.7475,
|
|
"step": 4400
|
|
},
|
|
{
|
|
"epoch": 2.789465530596437,
|
|
"grad_norm": 3.113757610321045,
|
|
"learning_rate": 8.607873527588344e-06,
|
|
"loss": 1.7243,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 2.85143299767622,
|
|
"grad_norm": 3.061246871948242,
|
|
"learning_rate": 8.576875387476752e-06,
|
|
"loss": 1.7114,
|
|
"step": 4600
|
|
},
|
|
{
|
|
"epoch": 2.9134004647560032,
|
|
"grad_norm": 9.025867462158203,
|
|
"learning_rate": 8.545877247365158e-06,
|
|
"loss": 1.7184,
|
|
"step": 4700
|
|
},
|
|
{
|
|
"epoch": 2.975367931835786,
|
|
"grad_norm": 18.252887725830078,
|
|
"learning_rate": 8.514879107253565e-06,
|
|
"loss": 1.7165,
|
|
"step": 4800
|
|
},
|
|
{
|
|
"epoch": 2.9995352439969016,
|
|
"eval_loss": 1.4203578233718872,
|
|
"eval_runtime": 71.3513,
|
|
"eval_samples_per_second": 45.227,
|
|
"eval_steps_per_second": 22.62,
|
|
"step": 4839
|
|
},
|
|
{
|
|
"epoch": 3.0378001549186675,
|
|
"grad_norm": 3.237386703491211,
|
|
"learning_rate": 8.483880967141972e-06,
|
|
"loss": 1.7036,
|
|
"step": 4900
|
|
},
|
|
{
|
|
"epoch": 3.099767621998451,
|
|
"grad_norm": 3.4343390464782715,
|
|
"learning_rate": 8.452882827030378e-06,
|
|
"loss": 1.6937,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 3.161735089078234,
|
|
"grad_norm": 4.684260845184326,
|
|
"learning_rate": 8.421884686918785e-06,
|
|
"loss": 1.6825,
|
|
"step": 5100
|
|
},
|
|
{
|
|
"epoch": 3.223702556158017,
|
|
"grad_norm": 4.4700236320495605,
|
|
"learning_rate": 8.390886546807192e-06,
|
|
"loss": 1.7013,
|
|
"step": 5200
|
|
},
|
|
{
|
|
"epoch": 3.2856700232378,
|
|
"grad_norm": 3.5046918392181396,
|
|
"learning_rate": 8.359888406695599e-06,
|
|
"loss": 1.6819,
|
|
"step": 5300
|
|
},
|
|
{
|
|
"epoch": 3.347637490317583,
|
|
"grad_norm": 3.9169533252716064,
|
|
"learning_rate": 8.328890266584005e-06,
|
|
"loss": 1.6939,
|
|
"step": 5400
|
|
},
|
|
{
|
|
"epoch": 3.4096049573973666,
|
|
"grad_norm": 3.2398970127105713,
|
|
"learning_rate": 8.297892126472413e-06,
|
|
"loss": 1.689,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 3.4715724244771495,
|
|
"grad_norm": 5.543476104736328,
|
|
"learning_rate": 8.266893986360819e-06,
|
|
"loss": 1.6819,
|
|
"step": 5600
|
|
},
|
|
{
|
|
"epoch": 3.5335398915569325,
|
|
"grad_norm": 3.900956153869629,
|
|
"learning_rate": 8.235895846249225e-06,
|
|
"loss": 1.7153,
|
|
"step": 5700
|
|
},
|
|
{
|
|
"epoch": 3.595507358636716,
|
|
"grad_norm": 3.7085001468658447,
|
|
"learning_rate": 8.204897706137633e-06,
|
|
"loss": 1.6663,
|
|
"step": 5800
|
|
},
|
|
{
|
|
"epoch": 3.657474825716499,
|
|
"grad_norm": 3.672489881515503,
|
|
"learning_rate": 8.173899566026039e-06,
|
|
"loss": 1.6961,
|
|
"step": 5900
|
|
},
|
|
{
|
|
"epoch": 3.719442292796282,
|
|
"grad_norm": 5.308550834655762,
|
|
"learning_rate": 8.142901425914445e-06,
|
|
"loss": 1.6702,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 3.781409759876065,
|
|
"grad_norm": 6.25948429107666,
|
|
"learning_rate": 8.111903285802853e-06,
|
|
"loss": 1.7,
|
|
"step": 6100
|
|
},
|
|
{
|
|
"epoch": 3.843377226955848,
|
|
"grad_norm": 4.889005661010742,
|
|
"learning_rate": 8.080905145691259e-06,
|
|
"loss": 1.6783,
|
|
"step": 6200
|
|
},
|
|
{
|
|
"epoch": 3.905344694035631,
|
|
"grad_norm": 3.8252604007720947,
|
|
"learning_rate": 8.049907005579665e-06,
|
|
"loss": 1.6679,
|
|
"step": 6300
|
|
},
|
|
{
|
|
"epoch": 3.9673121611154145,
|
|
"grad_norm": 5.56712532043457,
|
|
"learning_rate": 8.018908865468073e-06,
|
|
"loss": 1.6778,
|
|
"step": 6400
|
|
},
|
|
{
|
|
"epoch": 3.9995352439969016,
|
|
"eval_loss": 1.4068138599395752,
|
|
"eval_runtime": 71.8216,
|
|
"eval_samples_per_second": 44.931,
|
|
"eval_steps_per_second": 22.472,
|
|
"step": 6452
|
|
},
|
|
{
|
|
"epoch": 4.029744384198296,
|
|
"grad_norm": 4.865719318389893,
|
|
"learning_rate": 7.987910725356479e-06,
|
|
"loss": 1.6718,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 4.091711851278079,
|
|
"grad_norm": 3.5504400730133057,
|
|
"learning_rate": 7.956912585244885e-06,
|
|
"loss": 1.6252,
|
|
"step": 6600
|
|
},
|
|
{
|
|
"epoch": 4.153679318357862,
|
|
"grad_norm": 2.8009276390075684,
|
|
"learning_rate": 7.925914445133293e-06,
|
|
"loss": 1.6529,
|
|
"step": 6700
|
|
},
|
|
{
|
|
"epoch": 4.215646785437645,
|
|
"grad_norm": 3.3487560749053955,
|
|
"learning_rate": 7.894916305021699e-06,
|
|
"loss": 1.6402,
|
|
"step": 6800
|
|
},
|
|
{
|
|
"epoch": 4.277614252517428,
|
|
"grad_norm": 3.037754535675049,
|
|
"learning_rate": 7.863918164910105e-06,
|
|
"loss": 1.6659,
|
|
"step": 6900
|
|
},
|
|
{
|
|
"epoch": 4.339581719597211,
|
|
"grad_norm": 3.340179204940796,
|
|
"learning_rate": 7.832920024798513e-06,
|
|
"loss": 1.646,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 4.401549186676995,
|
|
"grad_norm": 4.411381244659424,
|
|
"learning_rate": 7.801921884686919e-06,
|
|
"loss": 1.6583,
|
|
"step": 7100
|
|
},
|
|
{
|
|
"epoch": 4.463516653756778,
|
|
"grad_norm": 4.7763352394104,
|
|
"learning_rate": 7.770923744575325e-06,
|
|
"loss": 1.6524,
|
|
"step": 7200
|
|
},
|
|
{
|
|
"epoch": 4.525484120836561,
|
|
"grad_norm": 3.343606472015381,
|
|
"learning_rate": 7.739925604463733e-06,
|
|
"loss": 1.6454,
|
|
"step": 7300
|
|
},
|
|
{
|
|
"epoch": 4.587451587916344,
|
|
"grad_norm": 5.252141952514648,
|
|
"learning_rate": 7.70892746435214e-06,
|
|
"loss": 1.6555,
|
|
"step": 7400
|
|
},
|
|
{
|
|
"epoch": 4.649419054996127,
|
|
"grad_norm": 2.8249170780181885,
|
|
"learning_rate": 7.677929324240545e-06,
|
|
"loss": 1.6506,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 4.711386522075911,
|
|
"grad_norm": 6.525687217712402,
|
|
"learning_rate": 7.646931184128953e-06,
|
|
"loss": 1.6215,
|
|
"step": 7600
|
|
},
|
|
{
|
|
"epoch": 4.7733539891556935,
|
|
"grad_norm": 4.455771446228027,
|
|
"learning_rate": 7.615933044017359e-06,
|
|
"loss": 1.6465,
|
|
"step": 7700
|
|
},
|
|
{
|
|
"epoch": 4.8353214562354765,
|
|
"grad_norm": 2.8847739696502686,
|
|
"learning_rate": 7.584934903905766e-06,
|
|
"loss": 1.6411,
|
|
"step": 7800
|
|
},
|
|
{
|
|
"epoch": 4.8972889233152594,
|
|
"grad_norm": 2.8771958351135254,
|
|
"learning_rate": 7.553936763794173e-06,
|
|
"loss": 1.6353,
|
|
"step": 7900
|
|
},
|
|
{
|
|
"epoch": 4.959256390395042,
|
|
"grad_norm": 3.5082240104675293,
|
|
"learning_rate": 7.522938623682579e-06,
|
|
"loss": 1.6679,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 4.999535243996902,
|
|
"eval_loss": 1.3975656032562256,
|
|
"eval_runtime": 71.9374,
|
|
"eval_samples_per_second": 44.858,
|
|
"eval_steps_per_second": 22.436,
|
|
"step": 8065
|
|
},
|
|
{
|
|
"epoch": 5.021688613477924,
|
|
"grad_norm": 3.450159788131714,
|
|
"learning_rate": 7.491940483570986e-06,
|
|
"loss": 1.6245,
|
|
"step": 8100
|
|
},
|
|
{
|
|
"epoch": 5.083656080557708,
|
|
"grad_norm": 3.078734874725342,
|
|
"learning_rate": 7.460942343459393e-06,
|
|
"loss": 1.5949,
|
|
"step": 8200
|
|
},
|
|
{
|
|
"epoch": 5.1456235476374905,
|
|
"grad_norm": 3.123811721801758,
|
|
"learning_rate": 7.4299442033477995e-06,
|
|
"loss": 1.5804,
|
|
"step": 8300
|
|
},
|
|
{
|
|
"epoch": 5.2075910147172735,
|
|
"grad_norm": 2.746354341506958,
|
|
"learning_rate": 7.3989460632362065e-06,
|
|
"loss": 1.6264,
|
|
"step": 8400
|
|
},
|
|
{
|
|
"epoch": 5.269558481797056,
|
|
"grad_norm": 3.969360113143921,
|
|
"learning_rate": 7.3679479231246135e-06,
|
|
"loss": 1.6285,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 5.331525948876839,
|
|
"grad_norm": 11.645624160766602,
|
|
"learning_rate": 7.33694978301302e-06,
|
|
"loss": 1.6196,
|
|
"step": 8600
|
|
},
|
|
{
|
|
"epoch": 5.393493415956623,
|
|
"grad_norm": 4.237279415130615,
|
|
"learning_rate": 7.305951642901427e-06,
|
|
"loss": 1.6271,
|
|
"step": 8700
|
|
},
|
|
{
|
|
"epoch": 5.455460883036406,
|
|
"grad_norm": 5.038291931152344,
|
|
"learning_rate": 7.2749535027898336e-06,
|
|
"loss": 1.6097,
|
|
"step": 8800
|
|
},
|
|
{
|
|
"epoch": 5.517428350116189,
|
|
"grad_norm": 6.474031448364258,
|
|
"learning_rate": 7.24395536267824e-06,
|
|
"loss": 1.6148,
|
|
"step": 8900
|
|
},
|
|
{
|
|
"epoch": 5.579395817195972,
|
|
"grad_norm": 3.735469341278076,
|
|
"learning_rate": 7.212957222566647e-06,
|
|
"loss": 1.608,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 5.641363284275755,
|
|
"grad_norm": 4.774326801300049,
|
|
"learning_rate": 7.181959082455054e-06,
|
|
"loss": 1.6362,
|
|
"step": 9100
|
|
},
|
|
{
|
|
"epoch": 5.703330751355538,
|
|
"grad_norm": 6.094056606292725,
|
|
"learning_rate": 7.15096094234346e-06,
|
|
"loss": 1.5946,
|
|
"step": 9200
|
|
},
|
|
{
|
|
"epoch": 5.765298218435321,
|
|
"grad_norm": 2.613210439682007,
|
|
"learning_rate": 7.119962802231867e-06,
|
|
"loss": 1.6237,
|
|
"step": 9300
|
|
},
|
|
{
|
|
"epoch": 5.827265685515105,
|
|
"grad_norm": 5.291254043579102,
|
|
"learning_rate": 7.088964662120274e-06,
|
|
"loss": 1.6225,
|
|
"step": 9400
|
|
},
|
|
{
|
|
"epoch": 5.889233152594888,
|
|
"grad_norm": 3.139016628265381,
|
|
"learning_rate": 7.05796652200868e-06,
|
|
"loss": 1.6229,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 5.951200619674671,
|
|
"grad_norm": 2.9346985816955566,
|
|
"learning_rate": 7.026968381897087e-06,
|
|
"loss": 1.6091,
|
|
"step": 9600
|
|
},
|
|
{
|
|
"epoch": 5.999535243996902,
|
|
"eval_loss": 1.3893319368362427,
|
|
"eval_runtime": 71.9002,
|
|
"eval_samples_per_second": 44.882,
|
|
"eval_steps_per_second": 22.448,
|
|
"step": 9678
|
|
},
|
|
{
|
|
"epoch": 6.013632842757552,
|
|
"grad_norm": 2.6308019161224365,
|
|
"learning_rate": 6.995970241785494e-06,
|
|
"loss": 1.6145,
|
|
"step": 9700
|
|
},
|
|
{
|
|
"epoch": 6.075600309837335,
|
|
"grad_norm": 6.885835647583008,
|
|
"learning_rate": 6.9649721016739e-06,
|
|
"loss": 1.575,
|
|
"step": 9800
|
|
},
|
|
{
|
|
"epoch": 6.137567776917119,
|
|
"grad_norm": 8.84401798248291,
|
|
"learning_rate": 6.933973961562307e-06,
|
|
"loss": 1.5861,
|
|
"step": 9900
|
|
},
|
|
{
|
|
"epoch": 6.199535243996902,
|
|
"grad_norm": 3.830970287322998,
|
|
"learning_rate": 6.902975821450714e-06,
|
|
"loss": 1.6048,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 6.261502711076685,
|
|
"grad_norm": 2.8187596797943115,
|
|
"learning_rate": 6.87197768133912e-06,
|
|
"loss": 1.5923,
|
|
"step": 10100
|
|
},
|
|
{
|
|
"epoch": 6.323470178156468,
|
|
"grad_norm": 3.0165576934814453,
|
|
"learning_rate": 6.840979541227527e-06,
|
|
"loss": 1.5773,
|
|
"step": 10200
|
|
},
|
|
{
|
|
"epoch": 6.385437645236251,
|
|
"grad_norm": 8.133129119873047,
|
|
"learning_rate": 6.809981401115934e-06,
|
|
"loss": 1.576,
|
|
"step": 10300
|
|
},
|
|
{
|
|
"epoch": 6.447405112316034,
|
|
"grad_norm": 3.153198480606079,
|
|
"learning_rate": 6.77898326100434e-06,
|
|
"loss": 1.5922,
|
|
"step": 10400
|
|
},
|
|
{
|
|
"epoch": 6.5093725793958175,
|
|
"grad_norm": 3.5953714847564697,
|
|
"learning_rate": 6.7482951022938625e-06,
|
|
"loss": 1.5954,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 6.5713400464756,
|
|
"grad_norm": 3.0549609661102295,
|
|
"learning_rate": 6.7172969621822695e-06,
|
|
"loss": 1.5751,
|
|
"step": 10600
|
|
},
|
|
{
|
|
"epoch": 6.633307513555383,
|
|
"grad_norm": 3.2885892391204834,
|
|
"learning_rate": 6.686298822070676e-06,
|
|
"loss": 1.6103,
|
|
"step": 10700
|
|
},
|
|
{
|
|
"epoch": 6.695274980635166,
|
|
"grad_norm": 3.2449259757995605,
|
|
"learning_rate": 6.6553006819590826e-06,
|
|
"loss": 1.5879,
|
|
"step": 10800
|
|
},
|
|
{
|
|
"epoch": 6.757242447714949,
|
|
"grad_norm": 3.855558395385742,
|
|
"learning_rate": 6.6243025418474895e-06,
|
|
"loss": 1.5863,
|
|
"step": 10900
|
|
},
|
|
{
|
|
"epoch": 6.819209914794733,
|
|
"grad_norm": 7.185797214508057,
|
|
"learning_rate": 6.593304401735896e-06,
|
|
"loss": 1.5594,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 6.881177381874516,
|
|
"grad_norm": 5.2492804527282715,
|
|
"learning_rate": 6.562306261624303e-06,
|
|
"loss": 1.5716,
|
|
"step": 11100
|
|
},
|
|
{
|
|
"epoch": 6.943144848954299,
|
|
"grad_norm": 3.938108444213867,
|
|
"learning_rate": 6.53130812151271e-06,
|
|
"loss": 1.6033,
|
|
"step": 11200
|
|
},
|
|
{
|
|
"epoch": 6.999535243996902,
|
|
"eval_loss": 1.3868727684020996,
|
|
"eval_runtime": 71.7471,
|
|
"eval_samples_per_second": 44.977,
|
|
"eval_steps_per_second": 22.496,
|
|
"step": 11291
|
|
},
|
|
{
|
|
"epoch": 7.00557707203718,
|
|
"grad_norm": 3.1100411415100098,
|
|
"learning_rate": 6.500309981401116e-06,
|
|
"loss": 1.5961,
|
|
"step": 11300
|
|
},
|
|
{
|
|
"epoch": 7.067544539116963,
|
|
"grad_norm": 4.577524185180664,
|
|
"learning_rate": 6.469311841289523e-06,
|
|
"loss": 1.5562,
|
|
"step": 11400
|
|
},
|
|
{
|
|
"epoch": 7.129512006196746,
|
|
"grad_norm": 3.86492919921875,
|
|
"learning_rate": 6.43831370117793e-06,
|
|
"loss": 1.5757,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 7.19147947327653,
|
|
"grad_norm": 6.450658321380615,
|
|
"learning_rate": 6.407315561066336e-06,
|
|
"loss": 1.5467,
|
|
"step": 11600
|
|
},
|
|
{
|
|
"epoch": 7.253446940356313,
|
|
"grad_norm": 3.469329357147217,
|
|
"learning_rate": 6.376317420954743e-06,
|
|
"loss": 1.5676,
|
|
"step": 11700
|
|
},
|
|
{
|
|
"epoch": 7.315414407436096,
|
|
"grad_norm": 3.4291296005249023,
|
|
"learning_rate": 6.34531928084315e-06,
|
|
"loss": 1.5693,
|
|
"step": 11800
|
|
},
|
|
{
|
|
"epoch": 7.377381874515879,
|
|
"grad_norm": 2.776810884475708,
|
|
"learning_rate": 6.314321140731556e-06,
|
|
"loss": 1.5761,
|
|
"step": 11900
|
|
},
|
|
{
|
|
"epoch": 7.439349341595662,
|
|
"grad_norm": 3.687276840209961,
|
|
"learning_rate": 6.283323000619963e-06,
|
|
"loss": 1.5582,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 7.501316808675446,
|
|
"grad_norm": 3.623704195022583,
|
|
"learning_rate": 6.25232486050837e-06,
|
|
"loss": 1.557,
|
|
"step": 12100
|
|
},
|
|
{
|
|
"epoch": 7.563284275755229,
|
|
"grad_norm": 2.607255220413208,
|
|
"learning_rate": 6.221326720396776e-06,
|
|
"loss": 1.5467,
|
|
"step": 12200
|
|
},
|
|
{
|
|
"epoch": 7.625251742835012,
|
|
"grad_norm": 4.738313674926758,
|
|
"learning_rate": 6.190328580285183e-06,
|
|
"loss": 1.5648,
|
|
"step": 12300
|
|
},
|
|
{
|
|
"epoch": 7.687219209914795,
|
|
"grad_norm": 3.4459781646728516,
|
|
"learning_rate": 6.15933044017359e-06,
|
|
"loss": 1.57,
|
|
"step": 12400
|
|
},
|
|
{
|
|
"epoch": 7.749186676994578,
|
|
"grad_norm": 2.6644325256347656,
|
|
"learning_rate": 6.128332300061996e-06,
|
|
"loss": 1.5757,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 7.811154144074361,
|
|
"grad_norm": 6.151153564453125,
|
|
"learning_rate": 6.097334159950403e-06,
|
|
"loss": 1.5653,
|
|
"step": 12600
|
|
},
|
|
{
|
|
"epoch": 7.873121611154144,
|
|
"grad_norm": 4.14183235168457,
|
|
"learning_rate": 6.06633601983881e-06,
|
|
"loss": 1.5708,
|
|
"step": 12700
|
|
},
|
|
{
|
|
"epoch": 7.935089078233927,
|
|
"grad_norm": 11.848958015441895,
|
|
"learning_rate": 6.035337879727216e-06,
|
|
"loss": 1.5633,
|
|
"step": 12800
|
|
},
|
|
{
|
|
"epoch": 7.99705654531371,
|
|
"grad_norm": 3.1628572940826416,
|
|
"learning_rate": 6.004339739615623e-06,
|
|
"loss": 1.548,
|
|
"step": 12900
|
|
},
|
|
{
|
|
"epoch": 7.999535243996902,
|
|
"eval_loss": 1.3799071311950684,
|
|
"eval_runtime": 71.5231,
|
|
"eval_samples_per_second": 45.118,
|
|
"eval_steps_per_second": 22.566,
|
|
"step": 12904
|
|
},
|
|
{
|
|
"epoch": 8.059488768396593,
|
|
"grad_norm": 2.565005302429199,
|
|
"learning_rate": 5.97334159950403e-06,
|
|
"loss": 1.5559,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 8.121456235476375,
|
|
"grad_norm": 3.772648334503174,
|
|
"learning_rate": 5.942343459392436e-06,
|
|
"loss": 1.5516,
|
|
"step": 13100
|
|
},
|
|
{
|
|
"epoch": 8.183423702556158,
|
|
"grad_norm": 3.1010353565216064,
|
|
"learning_rate": 5.911345319280843e-06,
|
|
"loss": 1.5494,
|
|
"step": 13200
|
|
},
|
|
{
|
|
"epoch": 8.24539116963594,
|
|
"grad_norm": 4.525482177734375,
|
|
"learning_rate": 5.88034717916925e-06,
|
|
"loss": 1.5391,
|
|
"step": 13300
|
|
},
|
|
{
|
|
"epoch": 8.307358636715724,
|
|
"grad_norm": 4.108111381530762,
|
|
"learning_rate": 5.8493490390576564e-06,
|
|
"loss": 1.5798,
|
|
"step": 13400
|
|
},
|
|
{
|
|
"epoch": 8.369326103795508,
|
|
"grad_norm": 3.5569262504577637,
|
|
"learning_rate": 5.818350898946063e-06,
|
|
"loss": 1.5332,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 8.43129357087529,
|
|
"grad_norm": 2.829259157180786,
|
|
"learning_rate": 5.78735275883447e-06,
|
|
"loss": 1.5564,
|
|
"step": 13600
|
|
},
|
|
{
|
|
"epoch": 8.493261037955074,
|
|
"grad_norm": 9.748035430908203,
|
|
"learning_rate": 5.7563546187228765e-06,
|
|
"loss": 1.5082,
|
|
"step": 13700
|
|
},
|
|
{
|
|
"epoch": 8.555228505034856,
|
|
"grad_norm": 5.017675399780273,
|
|
"learning_rate": 5.7253564786112835e-06,
|
|
"loss": 1.5086,
|
|
"step": 13800
|
|
},
|
|
{
|
|
"epoch": 8.61719597211464,
|
|
"grad_norm": 3.3384227752685547,
|
|
"learning_rate": 5.6943583384996905e-06,
|
|
"loss": 1.5682,
|
|
"step": 13900
|
|
},
|
|
{
|
|
"epoch": 8.679163439194422,
|
|
"grad_norm": 3.0591983795166016,
|
|
"learning_rate": 5.663670179789214e-06,
|
|
"loss": 1.5359,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 8.741130906274206,
|
|
"grad_norm": 3.4246435165405273,
|
|
"learning_rate": 5.632672039677621e-06,
|
|
"loss": 1.5611,
|
|
"step": 14100
|
|
},
|
|
{
|
|
"epoch": 8.80309837335399,
|
|
"grad_norm": 3.443187713623047,
|
|
"learning_rate": 5.601673899566027e-06,
|
|
"loss": 1.5592,
|
|
"step": 14200
|
|
},
|
|
{
|
|
"epoch": 8.865065840433772,
|
|
"grad_norm": 2.6613450050354004,
|
|
"learning_rate": 5.570675759454434e-06,
|
|
"loss": 1.5484,
|
|
"step": 14300
|
|
},
|
|
{
|
|
"epoch": 8.927033307513556,
|
|
"grad_norm": 3.1063573360443115,
|
|
"learning_rate": 5.539677619342841e-06,
|
|
"loss": 1.5146,
|
|
"step": 14400
|
|
},
|
|
{
|
|
"epoch": 8.989000774593338,
|
|
"grad_norm": 3.6589224338531494,
|
|
"learning_rate": 5.508679479231247e-06,
|
|
"loss": 1.5641,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 8.999535243996903,
|
|
"eval_loss": 1.3773841857910156,
|
|
"eval_runtime": 71.387,
|
|
"eval_samples_per_second": 45.204,
|
|
"eval_steps_per_second": 22.609,
|
|
"step": 14517
|
|
},
|
|
{
|
|
"epoch": 9.05143299767622,
|
|
"grad_norm": 3.909100294113159,
|
|
"learning_rate": 5.477681339119654e-06,
|
|
"loss": 1.5137,
|
|
"step": 14600
|
|
},
|
|
{
|
|
"epoch": 9.113400464756003,
|
|
"grad_norm": 2.70833683013916,
|
|
"learning_rate": 5.446683199008061e-06,
|
|
"loss": 1.519,
|
|
"step": 14700
|
|
},
|
|
{
|
|
"epoch": 9.175367931835787,
|
|
"grad_norm": 2.6168839931488037,
|
|
"learning_rate": 5.415685058896467e-06,
|
|
"loss": 1.5082,
|
|
"step": 14800
|
|
},
|
|
{
|
|
"epoch": 9.237335398915569,
|
|
"grad_norm": 2.4784512519836426,
|
|
"learning_rate": 5.384686918784874e-06,
|
|
"loss": 1.5098,
|
|
"step": 14900
|
|
},
|
|
{
|
|
"epoch": 9.299302865995353,
|
|
"grad_norm": 2.7963085174560547,
|
|
"learning_rate": 5.353688778673281e-06,
|
|
"loss": 1.5257,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 9.361270333075135,
|
|
"grad_norm": 2.6264543533325195,
|
|
"learning_rate": 5.322690638561687e-06,
|
|
"loss": 1.5122,
|
|
"step": 15100
|
|
},
|
|
{
|
|
"epoch": 9.423237800154919,
|
|
"grad_norm": 2.796173095703125,
|
|
"learning_rate": 5.291692498450094e-06,
|
|
"loss": 1.5629,
|
|
"step": 15200
|
|
},
|
|
{
|
|
"epoch": 9.485205267234702,
|
|
"grad_norm": 2.661559820175171,
|
|
"learning_rate": 5.260694358338501e-06,
|
|
"loss": 1.5337,
|
|
"step": 15300
|
|
},
|
|
{
|
|
"epoch": 9.547172734314485,
|
|
"grad_norm": 2.721785068511963,
|
|
"learning_rate": 5.229696218226907e-06,
|
|
"loss": 1.5443,
|
|
"step": 15400
|
|
},
|
|
{
|
|
"epoch": 9.609140201394268,
|
|
"grad_norm": 3.5480453968048096,
|
|
"learning_rate": 5.198698078115314e-06,
|
|
"loss": 1.5475,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 9.67110766847405,
|
|
"grad_norm": 4.556975841522217,
|
|
"learning_rate": 5.167699938003721e-06,
|
|
"loss": 1.5032,
|
|
"step": 15600
|
|
},
|
|
{
|
|
"epoch": 9.733075135553834,
|
|
"grad_norm": 3.4796106815338135,
|
|
"learning_rate": 5.136701797892127e-06,
|
|
"loss": 1.5328,
|
|
"step": 15700
|
|
},
|
|
{
|
|
"epoch": 9.795042602633618,
|
|
"grad_norm": 2.3589675426483154,
|
|
"learning_rate": 5.105703657780534e-06,
|
|
"loss": 1.5333,
|
|
"step": 15800
|
|
},
|
|
{
|
|
"epoch": 9.8570100697134,
|
|
"grad_norm": 8.50188159942627,
|
|
"learning_rate": 5.074705517668941e-06,
|
|
"loss": 1.5401,
|
|
"step": 15900
|
|
},
|
|
{
|
|
"epoch": 9.918977536793184,
|
|
"grad_norm": 2.818558931350708,
|
|
"learning_rate": 5.043707377557347e-06,
|
|
"loss": 1.5305,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 9.980945003872966,
|
|
"grad_norm": 7.929882526397705,
|
|
"learning_rate": 5.01301921884687e-06,
|
|
"loss": 1.5037,
|
|
"step": 16100
|
|
},
|
|
{
|
|
"epoch": 9.999535243996903,
|
|
"eval_loss": 1.3749291896820068,
|
|
"eval_runtime": 71.4165,
|
|
"eval_samples_per_second": 45.186,
|
|
"eval_steps_per_second": 22.6,
|
|
"step": 16130
|
|
},
|
|
{
|
|
"epoch": 10.043377226955847,
|
|
"grad_norm": 2.9387943744659424,
|
|
"learning_rate": 4.982021078735277e-06,
|
|
"loss": 1.5233,
|
|
"step": 16200
|
|
},
|
|
{
|
|
"epoch": 10.105344694035631,
|
|
"grad_norm": 3.994661331176758,
|
|
"learning_rate": 4.951022938623683e-06,
|
|
"loss": 1.4957,
|
|
"step": 16300
|
|
},
|
|
{
|
|
"epoch": 10.167312161115415,
|
|
"grad_norm": 5.1318840980529785,
|
|
"learning_rate": 4.92002479851209e-06,
|
|
"loss": 1.5198,
|
|
"step": 16400
|
|
},
|
|
{
|
|
"epoch": 10.229279628195197,
|
|
"grad_norm": 5.07450008392334,
|
|
"learning_rate": 4.889026658400497e-06,
|
|
"loss": 1.5168,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 10.291247095274981,
|
|
"grad_norm": 3.5865488052368164,
|
|
"learning_rate": 4.858028518288903e-06,
|
|
"loss": 1.5136,
|
|
"step": 16600
|
|
},
|
|
{
|
|
"epoch": 10.353214562354763,
|
|
"grad_norm": 3.8123815059661865,
|
|
"learning_rate": 4.82703037817731e-06,
|
|
"loss": 1.5095,
|
|
"step": 16700
|
|
},
|
|
{
|
|
"epoch": 10.415182029434547,
|
|
"grad_norm": 3.0475172996520996,
|
|
"learning_rate": 4.796032238065717e-06,
|
|
"loss": 1.5005,
|
|
"step": 16800
|
|
},
|
|
{
|
|
"epoch": 10.47714949651433,
|
|
"grad_norm": 3.0361108779907227,
|
|
"learning_rate": 4.765034097954123e-06,
|
|
"loss": 1.5169,
|
|
"step": 16900
|
|
},
|
|
{
|
|
"epoch": 10.539116963594113,
|
|
"grad_norm": 2.7775065898895264,
|
|
"learning_rate": 4.73403595784253e-06,
|
|
"loss": 1.5025,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 10.601084430673897,
|
|
"grad_norm": 4.8608598709106445,
|
|
"learning_rate": 4.703037817730937e-06,
|
|
"loss": 1.5216,
|
|
"step": 17100
|
|
},
|
|
{
|
|
"epoch": 10.663051897753679,
|
|
"grad_norm": 3.024165630340576,
|
|
"learning_rate": 4.672039677619343e-06,
|
|
"loss": 1.5107,
|
|
"step": 17200
|
|
},
|
|
{
|
|
"epoch": 10.725019364833463,
|
|
"grad_norm": 3.055216073989868,
|
|
"learning_rate": 4.64104153750775e-06,
|
|
"loss": 1.5083,
|
|
"step": 17300
|
|
},
|
|
{
|
|
"epoch": 10.786986831913246,
|
|
"grad_norm": 2.7835144996643066,
|
|
"learning_rate": 4.610043397396157e-06,
|
|
"loss": 1.501,
|
|
"step": 17400
|
|
},
|
|
{
|
|
"epoch": 10.848954298993029,
|
|
"grad_norm": 3.1248793601989746,
|
|
"learning_rate": 4.579045257284563e-06,
|
|
"loss": 1.519,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 10.910921766072812,
|
|
"grad_norm": 3.8737070560455322,
|
|
"learning_rate": 4.54804711717297e-06,
|
|
"loss": 1.5087,
|
|
"step": 17600
|
|
},
|
|
{
|
|
"epoch": 10.972889233152594,
|
|
"grad_norm": 2.474802017211914,
|
|
"learning_rate": 4.517048977061377e-06,
|
|
"loss": 1.4803,
|
|
"step": 17700
|
|
},
|
|
{
|
|
"epoch": 10.999535243996903,
|
|
"eval_loss": 1.3729863166809082,
|
|
"eval_runtime": 71.8366,
|
|
"eval_samples_per_second": 44.921,
|
|
"eval_steps_per_second": 22.468,
|
|
"step": 17743
|
|
},
|
|
{
|
|
"epoch": 11.035321456235476,
|
|
"grad_norm": 2.705595016479492,
|
|
"learning_rate": 4.486050836949783e-06,
|
|
"loss": 1.5091,
|
|
"step": 17800
|
|
},
|
|
{
|
|
"epoch": 11.09728892331526,
|
|
"grad_norm": 2.9070913791656494,
|
|
"learning_rate": 4.45505269683819e-06,
|
|
"loss": 1.5065,
|
|
"step": 17900
|
|
},
|
|
{
|
|
"epoch": 11.159256390395043,
|
|
"grad_norm": 2.360722780227661,
|
|
"learning_rate": 4.424054556726597e-06,
|
|
"loss": 1.4999,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 11.221223857474826,
|
|
"grad_norm": 2.7821319103240967,
|
|
"learning_rate": 4.393056416615003e-06,
|
|
"loss": 1.4863,
|
|
"step": 18100
|
|
},
|
|
{
|
|
"epoch": 11.28319132455461,
|
|
"grad_norm": 3.3408045768737793,
|
|
"learning_rate": 4.36205827650341e-06,
|
|
"loss": 1.4846,
|
|
"step": 18200
|
|
},
|
|
{
|
|
"epoch": 11.345158791634391,
|
|
"grad_norm": 2.658116579055786,
|
|
"learning_rate": 4.331060136391817e-06,
|
|
"loss": 1.4911,
|
|
"step": 18300
|
|
},
|
|
{
|
|
"epoch": 11.407126258714175,
|
|
"grad_norm": 7.110032081604004,
|
|
"learning_rate": 4.300061996280223e-06,
|
|
"loss": 1.5037,
|
|
"step": 18400
|
|
},
|
|
{
|
|
"epoch": 11.469093725793957,
|
|
"grad_norm": 3.009939193725586,
|
|
"learning_rate": 4.26906385616863e-06,
|
|
"loss": 1.481,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 11.531061192873741,
|
|
"grad_norm": 3.2190310955047607,
|
|
"learning_rate": 4.238065716057037e-06,
|
|
"loss": 1.5163,
|
|
"step": 18600
|
|
},
|
|
{
|
|
"epoch": 11.593028659953525,
|
|
"grad_norm": 3.240391969680786,
|
|
"learning_rate": 4.2070675759454435e-06,
|
|
"loss": 1.4934,
|
|
"step": 18700
|
|
},
|
|
{
|
|
"epoch": 11.654996127033307,
|
|
"grad_norm": 2.4078099727630615,
|
|
"learning_rate": 4.1760694358338505e-06,
|
|
"loss": 1.5076,
|
|
"step": 18800
|
|
},
|
|
{
|
|
"epoch": 11.716963594113091,
|
|
"grad_norm": 2.757641077041626,
|
|
"learning_rate": 4.1450712957222575e-06,
|
|
"loss": 1.5011,
|
|
"step": 18900
|
|
},
|
|
{
|
|
"epoch": 11.778931061192873,
|
|
"grad_norm": 3.859056234359741,
|
|
"learning_rate": 4.11438313701178e-06,
|
|
"loss": 1.5009,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 11.840898528272657,
|
|
"grad_norm": 2.826043128967285,
|
|
"learning_rate": 4.083384996900187e-06,
|
|
"loss": 1.5103,
|
|
"step": 19100
|
|
},
|
|
{
|
|
"epoch": 11.90286599535244,
|
|
"grad_norm": 3.0263473987579346,
|
|
"learning_rate": 4.052386856788593e-06,
|
|
"loss": 1.4945,
|
|
"step": 19200
|
|
},
|
|
{
|
|
"epoch": 11.964833462432223,
|
|
"grad_norm": 3.322974920272827,
|
|
"learning_rate": 4.021388716677e-06,
|
|
"loss": 1.4653,
|
|
"step": 19300
|
|
},
|
|
{
|
|
"epoch": 11.999535243996903,
|
|
"eval_loss": 1.3713266849517822,
|
|
"eval_runtime": 71.4255,
|
|
"eval_samples_per_second": 45.18,
|
|
"eval_steps_per_second": 22.597,
|
|
"step": 19356
|
|
},
|
|
{
|
|
"epoch": 12.027265685515104,
|
|
"grad_norm": 3.6630640029907227,
|
|
"learning_rate": 3.990390576565407e-06,
|
|
"loss": 1.5061,
|
|
"step": 19400
|
|
},
|
|
{
|
|
"epoch": 12.089233152594888,
|
|
"grad_norm": 2.810619831085205,
|
|
"learning_rate": 3.959392436453813e-06,
|
|
"loss": 1.5,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 12.15120061967467,
|
|
"grad_norm": 2.4999682903289795,
|
|
"learning_rate": 3.92839429634222e-06,
|
|
"loss": 1.4863,
|
|
"step": 19600
|
|
},
|
|
{
|
|
"epoch": 12.213168086754454,
|
|
"grad_norm": 3.3250415325164795,
|
|
"learning_rate": 3.897396156230627e-06,
|
|
"loss": 1.4719,
|
|
"step": 19700
|
|
},
|
|
{
|
|
"epoch": 12.275135553834238,
|
|
"grad_norm": 3.2047810554504395,
|
|
"learning_rate": 3.866398016119033e-06,
|
|
"loss": 1.492,
|
|
"step": 19800
|
|
},
|
|
{
|
|
"epoch": 12.33710302091402,
|
|
"grad_norm": 2.8707590103149414,
|
|
"learning_rate": 3.83539987600744e-06,
|
|
"loss": 1.4772,
|
|
"step": 19900
|
|
},
|
|
{
|
|
"epoch": 12.399070487993804,
|
|
"grad_norm": 5.695312023162842,
|
|
"learning_rate": 3.804401735895847e-06,
|
|
"loss": 1.4785,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 12.461037955073586,
|
|
"grad_norm": 3.48818039894104,
|
|
"learning_rate": 3.7734035957842536e-06,
|
|
"loss": 1.4789,
|
|
"step": 20100
|
|
},
|
|
{
|
|
"epoch": 12.52300542215337,
|
|
"grad_norm": 2.8195247650146484,
|
|
"learning_rate": 3.7424054556726597e-06,
|
|
"loss": 1.4859,
|
|
"step": 20200
|
|
},
|
|
{
|
|
"epoch": 12.584972889233153,
|
|
"grad_norm": 3.1309738159179688,
|
|
"learning_rate": 3.7114073155610663e-06,
|
|
"loss": 1.4914,
|
|
"step": 20300
|
|
},
|
|
{
|
|
"epoch": 12.646940356312935,
|
|
"grad_norm": 2.7474892139434814,
|
|
"learning_rate": 3.6804091754494733e-06,
|
|
"loss": 1.4776,
|
|
"step": 20400
|
|
},
|
|
{
|
|
"epoch": 12.70890782339272,
|
|
"grad_norm": 3.022172451019287,
|
|
"learning_rate": 3.64941103533788e-06,
|
|
"loss": 1.498,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 12.770875290472501,
|
|
"grad_norm": 2.747607946395874,
|
|
"learning_rate": 3.6184128952262864e-06,
|
|
"loss": 1.4901,
|
|
"step": 20600
|
|
},
|
|
{
|
|
"epoch": 12.832842757552285,
|
|
"grad_norm": 3.077296733856201,
|
|
"learning_rate": 3.5874147551146933e-06,
|
|
"loss": 1.4773,
|
|
"step": 20700
|
|
},
|
|
{
|
|
"epoch": 12.894810224632067,
|
|
"grad_norm": 4.3411078453063965,
|
|
"learning_rate": 3.5564166150031e-06,
|
|
"loss": 1.4909,
|
|
"step": 20800
|
|
},
|
|
{
|
|
"epoch": 12.956777691711851,
|
|
"grad_norm": 3.112104654312134,
|
|
"learning_rate": 3.5254184748915065e-06,
|
|
"loss": 1.5027,
|
|
"step": 20900
|
|
},
|
|
{
|
|
"epoch": 12.999535243996903,
|
|
"eval_loss": 1.3685630559921265,
|
|
"eval_runtime": 70.8923,
|
|
"eval_samples_per_second": 45.52,
|
|
"eval_steps_per_second": 22.767,
|
|
"step": 20969
|
|
},
|
|
{
|
|
"epoch": 13.019209914794732,
|
|
"grad_norm": 2.8162424564361572,
|
|
"learning_rate": 3.4944203347799134e-06,
|
|
"loss": 1.4868,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 13.081177381874516,
|
|
"grad_norm": 2.862400770187378,
|
|
"learning_rate": 3.4637321760694358e-06,
|
|
"loss": 1.4565,
|
|
"step": 21100
|
|
},
|
|
{
|
|
"epoch": 13.143144848954298,
|
|
"grad_norm": 9.166791915893555,
|
|
"learning_rate": 3.4327340359578427e-06,
|
|
"loss": 1.4968,
|
|
"step": 21200
|
|
},
|
|
{
|
|
"epoch": 13.205112316034082,
|
|
"grad_norm": 2.9333505630493164,
|
|
"learning_rate": 3.4017358958462493e-06,
|
|
"loss": 1.4707,
|
|
"step": 21300
|
|
},
|
|
{
|
|
"epoch": 13.267079783113866,
|
|
"grad_norm": 3.7271652221679688,
|
|
"learning_rate": 3.370737755734656e-06,
|
|
"loss": 1.4769,
|
|
"step": 21400
|
|
},
|
|
{
|
|
"epoch": 13.329047250193648,
|
|
"grad_norm": 4.895938396453857,
|
|
"learning_rate": 3.339739615623063e-06,
|
|
"loss": 1.4825,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 13.391014717273432,
|
|
"grad_norm": 4.098718643188477,
|
|
"learning_rate": 3.3087414755114694e-06,
|
|
"loss": 1.4473,
|
|
"step": 21600
|
|
},
|
|
{
|
|
"epoch": 13.452982184353214,
|
|
"grad_norm": 2.393143892288208,
|
|
"learning_rate": 3.277743335399876e-06,
|
|
"loss": 1.4547,
|
|
"step": 21700
|
|
},
|
|
{
|
|
"epoch": 13.514949651432998,
|
|
"grad_norm": 2.8273305892944336,
|
|
"learning_rate": 3.246745195288283e-06,
|
|
"loss": 1.4937,
|
|
"step": 21800
|
|
},
|
|
{
|
|
"epoch": 13.576917118512782,
|
|
"grad_norm": 2.560944080352783,
|
|
"learning_rate": 3.2157470551766895e-06,
|
|
"loss": 1.4617,
|
|
"step": 21900
|
|
},
|
|
{
|
|
"epoch": 13.638884585592564,
|
|
"grad_norm": 2.7508301734924316,
|
|
"learning_rate": 3.184748915065096e-06,
|
|
"loss": 1.4681,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 13.700852052672348,
|
|
"grad_norm": 3.0896778106689453,
|
|
"learning_rate": 3.153750774953503e-06,
|
|
"loss": 1.4863,
|
|
"step": 22100
|
|
},
|
|
{
|
|
"epoch": 13.76281951975213,
|
|
"grad_norm": 3.0587971210479736,
|
|
"learning_rate": 3.1227526348419096e-06,
|
|
"loss": 1.4872,
|
|
"step": 22200
|
|
},
|
|
{
|
|
"epoch": 13.824786986831914,
|
|
"grad_norm": 5.594135284423828,
|
|
"learning_rate": 3.091754494730316e-06,
|
|
"loss": 1.4551,
|
|
"step": 22300
|
|
},
|
|
{
|
|
"epoch": 13.886754453911696,
|
|
"grad_norm": 4.74953031539917,
|
|
"learning_rate": 3.060756354618723e-06,
|
|
"loss": 1.458,
|
|
"step": 22400
|
|
},
|
|
{
|
|
"epoch": 13.94872192099148,
|
|
"grad_norm": 2.3097078800201416,
|
|
"learning_rate": 3.0297582145071297e-06,
|
|
"loss": 1.4707,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 13.999535243996903,
|
|
"eval_loss": 1.367042899131775,
|
|
"eval_runtime": 71.4468,
|
|
"eval_samples_per_second": 45.167,
|
|
"eval_steps_per_second": 22.59,
|
|
"step": 22582
|
|
},
|
|
{
|
|
"epoch": 14.01115414407436,
|
|
"grad_norm": 2.73579478263855,
|
|
"learning_rate": 2.9987600743955362e-06,
|
|
"loss": 1.4815,
|
|
"step": 22600
|
|
},
|
|
{
|
|
"epoch": 14.073121611154145,
|
|
"grad_norm": 2.643583297729492,
|
|
"learning_rate": 2.967761934283943e-06,
|
|
"loss": 1.4623,
|
|
"step": 22700
|
|
},
|
|
{
|
|
"epoch": 14.135089078233927,
|
|
"grad_norm": 4.030912399291992,
|
|
"learning_rate": 2.9367637941723498e-06,
|
|
"loss": 1.4406,
|
|
"step": 22800
|
|
},
|
|
{
|
|
"epoch": 14.19705654531371,
|
|
"grad_norm": 3.95310378074646,
|
|
"learning_rate": 2.9057656540607563e-06,
|
|
"loss": 1.4533,
|
|
"step": 22900
|
|
},
|
|
{
|
|
"epoch": 14.259024012393493,
|
|
"grad_norm": 2.382369041442871,
|
|
"learning_rate": 2.8747675139491633e-06,
|
|
"loss": 1.4729,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 14.320991479473276,
|
|
"grad_norm": 3.3369767665863037,
|
|
"learning_rate": 2.84376937383757e-06,
|
|
"loss": 1.447,
|
|
"step": 23100
|
|
},
|
|
{
|
|
"epoch": 14.38295894655306,
|
|
"grad_norm": 3.532909393310547,
|
|
"learning_rate": 2.8127712337259764e-06,
|
|
"loss": 1.4646,
|
|
"step": 23200
|
|
},
|
|
{
|
|
"epoch": 14.444926413632842,
|
|
"grad_norm": 2.707247257232666,
|
|
"learning_rate": 2.7817730936143834e-06,
|
|
"loss": 1.4782,
|
|
"step": 23300
|
|
},
|
|
{
|
|
"epoch": 14.506893880712626,
|
|
"grad_norm": 2.511441707611084,
|
|
"learning_rate": 2.75077495350279e-06,
|
|
"loss": 1.4732,
|
|
"step": 23400
|
|
},
|
|
{
|
|
"epoch": 14.568861347792408,
|
|
"grad_norm": 6.0035576820373535,
|
|
"learning_rate": 2.7197768133911965e-06,
|
|
"loss": 1.4625,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 14.630828814872192,
|
|
"grad_norm": 3.396369695663452,
|
|
"learning_rate": 2.6887786732796035e-06,
|
|
"loss": 1.4794,
|
|
"step": 23600
|
|
},
|
|
{
|
|
"epoch": 14.692796281951976,
|
|
"grad_norm": 2.5729544162750244,
|
|
"learning_rate": 2.65778053316801e-06,
|
|
"loss": 1.4666,
|
|
"step": 23700
|
|
},
|
|
{
|
|
"epoch": 14.754763749031758,
|
|
"grad_norm": 2.8480477333068848,
|
|
"learning_rate": 2.6270923744575328e-06,
|
|
"loss": 1.4644,
|
|
"step": 23800
|
|
},
|
|
{
|
|
"epoch": 14.816731216111542,
|
|
"grad_norm": 2.4807331562042236,
|
|
"learning_rate": 2.5960942343459393e-06,
|
|
"loss": 1.4752,
|
|
"step": 23900
|
|
},
|
|
{
|
|
"epoch": 14.878698683191324,
|
|
"grad_norm": 2.850637435913086,
|
|
"learning_rate": 2.5650960942343463e-06,
|
|
"loss": 1.4769,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 14.940666150271108,
|
|
"grad_norm": 2.480095148086548,
|
|
"learning_rate": 2.534097954122753e-06,
|
|
"loss": 1.4791,
|
|
"step": 24100
|
|
},
|
|
{
|
|
"epoch": 14.999535243996903,
|
|
"eval_loss": 1.3670425415039062,
|
|
"eval_runtime": 71.2802,
|
|
"eval_samples_per_second": 45.272,
|
|
"eval_steps_per_second": 22.643,
|
|
"step": 24195
|
|
},
|
|
{
|
|
"epoch": 15.003098373353989,
|
|
"grad_norm": 2.5988998413085938,
|
|
"learning_rate": 2.5030998140111594e-06,
|
|
"loss": 1.4693,
|
|
"step": 24200
|
|
},
|
|
{
|
|
"epoch": 15.065065840433773,
|
|
"grad_norm": 2.674956798553467,
|
|
"learning_rate": 2.4721016738995664e-06,
|
|
"loss": 1.4584,
|
|
"step": 24300
|
|
},
|
|
{
|
|
"epoch": 15.127033307513555,
|
|
"grad_norm": 20.858081817626953,
|
|
"learning_rate": 2.441103533787973e-06,
|
|
"loss": 1.4593,
|
|
"step": 24400
|
|
},
|
|
{
|
|
"epoch": 15.189000774593339,
|
|
"grad_norm": 2.9068284034729004,
|
|
"learning_rate": 2.4101053936763795e-06,
|
|
"loss": 1.437,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 15.250968241673121,
|
|
"grad_norm": 2.5827035903930664,
|
|
"learning_rate": 2.3791072535647865e-06,
|
|
"loss": 1.4371,
|
|
"step": 24600
|
|
},
|
|
{
|
|
"epoch": 15.312935708752905,
|
|
"grad_norm": 2.852088451385498,
|
|
"learning_rate": 2.348109113453193e-06,
|
|
"loss": 1.4434,
|
|
"step": 24700
|
|
},
|
|
{
|
|
"epoch": 15.374903175832689,
|
|
"grad_norm": 3.28821063041687,
|
|
"learning_rate": 2.3171109733415996e-06,
|
|
"loss": 1.4606,
|
|
"step": 24800
|
|
},
|
|
{
|
|
"epoch": 15.43687064291247,
|
|
"grad_norm": 3.762103319168091,
|
|
"learning_rate": 2.2861128332300066e-06,
|
|
"loss": 1.4584,
|
|
"step": 24900
|
|
},
|
|
{
|
|
"epoch": 15.498838109992255,
|
|
"grad_norm": 2.845301866531372,
|
|
"learning_rate": 2.255114693118413e-06,
|
|
"loss": 1.4781,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 15.560805577072037,
|
|
"grad_norm": 3.243618965148926,
|
|
"learning_rate": 2.2241165530068197e-06,
|
|
"loss": 1.4489,
|
|
"step": 25100
|
|
},
|
|
{
|
|
"epoch": 15.62277304415182,
|
|
"grad_norm": 2.8567991256713867,
|
|
"learning_rate": 2.1931184128952267e-06,
|
|
"loss": 1.4768,
|
|
"step": 25200
|
|
},
|
|
{
|
|
"epoch": 15.684740511231603,
|
|
"grad_norm": 2.683873414993286,
|
|
"learning_rate": 2.162430254184749e-06,
|
|
"loss": 1.4578,
|
|
"step": 25300
|
|
},
|
|
{
|
|
"epoch": 15.746707978311386,
|
|
"grad_norm": 3.3914294242858887,
|
|
"learning_rate": 2.131432114073156e-06,
|
|
"loss": 1.4564,
|
|
"step": 25400
|
|
},
|
|
{
|
|
"epoch": 15.80867544539117,
|
|
"grad_norm": 2.8761868476867676,
|
|
"learning_rate": 2.1004339739615625e-06,
|
|
"loss": 1.4648,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"epoch": 15.870642912470952,
|
|
"grad_norm": 3.788562059402466,
|
|
"learning_rate": 2.069435833849969e-06,
|
|
"loss": 1.4568,
|
|
"step": 25600
|
|
},
|
|
{
|
|
"epoch": 15.932610379550736,
|
|
"grad_norm": 3.973054885864258,
|
|
"learning_rate": 2.038437693738376e-06,
|
|
"loss": 1.4785,
|
|
"step": 25700
|
|
},
|
|
{
|
|
"epoch": 15.994577846630518,
|
|
"grad_norm": 4.635127544403076,
|
|
"learning_rate": 2.0074395536267826e-06,
|
|
"loss": 1.4741,
|
|
"step": 25800
|
|
},
|
|
{
|
|
"epoch": 15.999535243996903,
|
|
"eval_loss": 1.3650240898132324,
|
|
"eval_runtime": 71.1811,
|
|
"eval_samples_per_second": 45.335,
|
|
"eval_steps_per_second": 22.675,
|
|
"step": 25808
|
|
},
|
|
{
|
|
"epoch": 16.0570100697134,
|
|
"grad_norm": 3.8094494342803955,
|
|
"learning_rate": 1.976441413515189e-06,
|
|
"loss": 1.4585,
|
|
"step": 25900
|
|
},
|
|
{
|
|
"epoch": 16.118977536793185,
|
|
"grad_norm": 2.649723529815674,
|
|
"learning_rate": 1.945443273403596e-06,
|
|
"loss": 1.4624,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"epoch": 16.180945003872967,
|
|
"grad_norm": 2.537461757659912,
|
|
"learning_rate": 1.9144451332920027e-06,
|
|
"loss": 1.4576,
|
|
"step": 26100
|
|
},
|
|
{
|
|
"epoch": 16.24291247095275,
|
|
"grad_norm": 3.1259541511535645,
|
|
"learning_rate": 1.8834469931804095e-06,
|
|
"loss": 1.4657,
|
|
"step": 26200
|
|
},
|
|
{
|
|
"epoch": 16.30487993803253,
|
|
"grad_norm": 3.396057367324829,
|
|
"learning_rate": 1.8524488530688158e-06,
|
|
"loss": 1.4532,
|
|
"step": 26300
|
|
},
|
|
{
|
|
"epoch": 16.366847405112317,
|
|
"grad_norm": 2.7809932231903076,
|
|
"learning_rate": 1.8214507129572226e-06,
|
|
"loss": 1.4483,
|
|
"step": 26400
|
|
},
|
|
{
|
|
"epoch": 16.4288148721921,
|
|
"grad_norm": 4.369870662689209,
|
|
"learning_rate": 1.7904525728456294e-06,
|
|
"loss": 1.4457,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"epoch": 16.49078233927188,
|
|
"grad_norm": 2.7700088024139404,
|
|
"learning_rate": 1.759454432734036e-06,
|
|
"loss": 1.4722,
|
|
"step": 26600
|
|
},
|
|
{
|
|
"epoch": 16.552749806351667,
|
|
"grad_norm": 2.55792236328125,
|
|
"learning_rate": 1.7284562926224427e-06,
|
|
"loss": 1.4242,
|
|
"step": 26700
|
|
},
|
|
{
|
|
"epoch": 16.61471727343145,
|
|
"grad_norm": 3.0416712760925293,
|
|
"learning_rate": 1.6974581525108495e-06,
|
|
"loss": 1.4516,
|
|
"step": 26800
|
|
},
|
|
{
|
|
"epoch": 16.67668474051123,
|
|
"grad_norm": 2.6362993717193604,
|
|
"learning_rate": 1.666460012399256e-06,
|
|
"loss": 1.4263,
|
|
"step": 26900
|
|
},
|
|
{
|
|
"epoch": 16.738652207591016,
|
|
"grad_norm": 2.5275797843933105,
|
|
"learning_rate": 1.6354618722876628e-06,
|
|
"loss": 1.4513,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"epoch": 16.8006196746708,
|
|
"grad_norm": 3.758063793182373,
|
|
"learning_rate": 1.6044637321760696e-06,
|
|
"loss": 1.4629,
|
|
"step": 27100
|
|
},
|
|
{
|
|
"epoch": 16.86258714175058,
|
|
"grad_norm": 2.653982162475586,
|
|
"learning_rate": 1.5734655920644761e-06,
|
|
"loss": 1.4549,
|
|
"step": 27200
|
|
},
|
|
{
|
|
"epoch": 16.924554608830363,
|
|
"grad_norm": 3.276362180709839,
|
|
"learning_rate": 1.5424674519528829e-06,
|
|
"loss": 1.4645,
|
|
"step": 27300
|
|
},
|
|
{
|
|
"epoch": 16.98652207591015,
|
|
"grad_norm": 4.8964056968688965,
|
|
"learning_rate": 1.5114693118412897e-06,
|
|
"loss": 1.4292,
|
|
"step": 27400
|
|
},
|
|
{
|
|
"epoch": 16.999535243996903,
|
|
"eval_loss": 1.3658363819122314,
|
|
"eval_runtime": 71.5249,
|
|
"eval_samples_per_second": 45.117,
|
|
"eval_steps_per_second": 22.566,
|
|
"step": 27421
|
|
},
|
|
{
|
|
"epoch": 17.048954298993028,
|
|
"grad_norm": 2.632962703704834,
|
|
"learning_rate": 1.4804711717296962e-06,
|
|
"loss": 1.4485,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"epoch": 17.110921766072813,
|
|
"grad_norm": 2.65366792678833,
|
|
"learning_rate": 1.449473031618103e-06,
|
|
"loss": 1.4335,
|
|
"step": 27600
|
|
},
|
|
{
|
|
"epoch": 17.172889233152596,
|
|
"grad_norm": 7.086144924163818,
|
|
"learning_rate": 1.4184748915065097e-06,
|
|
"loss": 1.4483,
|
|
"step": 27700
|
|
},
|
|
{
|
|
"epoch": 17.234856700232378,
|
|
"grad_norm": 4.501598834991455,
|
|
"learning_rate": 1.3874767513949163e-06,
|
|
"loss": 1.4328,
|
|
"step": 27800
|
|
},
|
|
{
|
|
"epoch": 17.29682416731216,
|
|
"grad_norm": 3.042269706726074,
|
|
"learning_rate": 1.356478611283323e-06,
|
|
"loss": 1.4476,
|
|
"step": 27900
|
|
},
|
|
{
|
|
"epoch": 17.358791634391945,
|
|
"grad_norm": 2.902914047241211,
|
|
"learning_rate": 1.3254804711717298e-06,
|
|
"loss": 1.4551,
|
|
"step": 28000
|
|
},
|
|
{
|
|
"epoch": 17.420759101471727,
|
|
"grad_norm": 4.015540599822998,
|
|
"learning_rate": 1.2944823310601364e-06,
|
|
"loss": 1.4408,
|
|
"step": 28100
|
|
},
|
|
{
|
|
"epoch": 17.48272656855151,
|
|
"grad_norm": 2.1433181762695312,
|
|
"learning_rate": 1.2634841909485432e-06,
|
|
"loss": 1.4363,
|
|
"step": 28200
|
|
},
|
|
{
|
|
"epoch": 17.544694035631295,
|
|
"grad_norm": 3.0657026767730713,
|
|
"learning_rate": 1.23248605083695e-06,
|
|
"loss": 1.4519,
|
|
"step": 28300
|
|
},
|
|
{
|
|
"epoch": 17.606661502711077,
|
|
"grad_norm": 2.260270357131958,
|
|
"learning_rate": 1.2014879107253565e-06,
|
|
"loss": 1.4609,
|
|
"step": 28400
|
|
},
|
|
{
|
|
"epoch": 17.66862896979086,
|
|
"grad_norm": 3.2962841987609863,
|
|
"learning_rate": 1.1704897706137633e-06,
|
|
"loss": 1.4267,
|
|
"step": 28500
|
|
},
|
|
{
|
|
"epoch": 17.73059643687064,
|
|
"grad_norm": 2.8596572875976562,
|
|
"learning_rate": 1.13949163050217e-06,
|
|
"loss": 1.4485,
|
|
"step": 28600
|
|
},
|
|
{
|
|
"epoch": 17.792563903950427,
|
|
"grad_norm": 2.8049371242523193,
|
|
"learning_rate": 1.1084934903905766e-06,
|
|
"loss": 1.4272,
|
|
"step": 28700
|
|
},
|
|
{
|
|
"epoch": 17.85453137103021,
|
|
"grad_norm": 3.8641581535339355,
|
|
"learning_rate": 1.0774953502789834e-06,
|
|
"loss": 1.4426,
|
|
"step": 28800
|
|
},
|
|
{
|
|
"epoch": 17.91649883810999,
|
|
"grad_norm": 2.718754529953003,
|
|
"learning_rate": 1.0464972101673901e-06,
|
|
"loss": 1.4696,
|
|
"step": 28900
|
|
},
|
|
{
|
|
"epoch": 17.978466305189777,
|
|
"grad_norm": 2.8387906551361084,
|
|
"learning_rate": 1.0154990700557969e-06,
|
|
"loss": 1.437,
|
|
"step": 29000
|
|
},
|
|
{
|
|
"epoch": 17.999535243996903,
|
|
"eval_loss": 1.3650579452514648,
|
|
"eval_runtime": 71.088,
|
|
"eval_samples_per_second": 45.394,
|
|
"eval_steps_per_second": 22.704,
|
|
"step": 29034
|
|
},
|
|
{
|
|
"epoch": 18.040898528272656,
|
|
"grad_norm": 7.15321159362793,
|
|
"learning_rate": 9.845009299442034e-07,
|
|
"loss": 1.4728,
|
|
"step": 29100
|
|
},
|
|
{
|
|
"epoch": 18.10286599535244,
|
|
"grad_norm": 3.0295169353485107,
|
|
"learning_rate": 9.535027898326102e-07,
|
|
"loss": 1.444,
|
|
"step": 29200
|
|
},
|
|
{
|
|
"epoch": 18.164833462432224,
|
|
"grad_norm": 4.30358362197876,
|
|
"learning_rate": 9.225046497210168e-07,
|
|
"loss": 1.4373,
|
|
"step": 29300
|
|
},
|
|
{
|
|
"epoch": 18.226800929512006,
|
|
"grad_norm": 2.6451058387756348,
|
|
"learning_rate": 8.915065096094234e-07,
|
|
"loss": 1.4633,
|
|
"step": 29400
|
|
},
|
|
{
|
|
"epoch": 18.288768396591788,
|
|
"grad_norm": 2.5222158432006836,
|
|
"learning_rate": 8.605083694978301e-07,
|
|
"loss": 1.4361,
|
|
"step": 29500
|
|
},
|
|
{
|
|
"epoch": 18.350735863671574,
|
|
"grad_norm": 3.099586009979248,
|
|
"learning_rate": 8.295102293862369e-07,
|
|
"loss": 1.4451,
|
|
"step": 29600
|
|
},
|
|
{
|
|
"epoch": 18.412703330751356,
|
|
"grad_norm": 2.569441795349121,
|
|
"learning_rate": 7.985120892746435e-07,
|
|
"loss": 1.4167,
|
|
"step": 29700
|
|
},
|
|
{
|
|
"epoch": 18.474670797831138,
|
|
"grad_norm": 3.3484814167022705,
|
|
"learning_rate": 7.678239305641662e-07,
|
|
"loss": 1.4573,
|
|
"step": 29800
|
|
},
|
|
{
|
|
"epoch": 18.536638264910923,
|
|
"grad_norm": 3.530923843383789,
|
|
"learning_rate": 7.368257904525729e-07,
|
|
"loss": 1.4653,
|
|
"step": 29900
|
|
},
|
|
{
|
|
"epoch": 18.598605731990705,
|
|
"grad_norm": 2.9912829399108887,
|
|
"learning_rate": 7.058276503409796e-07,
|
|
"loss": 1.4376,
|
|
"step": 30000
|
|
},
|
|
{
|
|
"epoch": 18.660573199070488,
|
|
"grad_norm": 4.254590034484863,
|
|
"learning_rate": 6.748295102293862e-07,
|
|
"loss": 1.4159,
|
|
"step": 30100
|
|
},
|
|
{
|
|
"epoch": 18.72254066615027,
|
|
"grad_norm": 2.5701520442962646,
|
|
"learning_rate": 6.43831370117793e-07,
|
|
"loss": 1.4456,
|
|
"step": 30200
|
|
},
|
|
{
|
|
"epoch": 18.784508133230055,
|
|
"grad_norm": 2.5587334632873535,
|
|
"learning_rate": 6.131432114073156e-07,
|
|
"loss": 1.4398,
|
|
"step": 30300
|
|
},
|
|
{
|
|
"epoch": 18.846475600309837,
|
|
"grad_norm": 2.7531826496124268,
|
|
"learning_rate": 5.821450712957223e-07,
|
|
"loss": 1.4373,
|
|
"step": 30400
|
|
},
|
|
{
|
|
"epoch": 18.90844306738962,
|
|
"grad_norm": 2.606565237045288,
|
|
"learning_rate": 5.51146931184129e-07,
|
|
"loss": 1.4322,
|
|
"step": 30500
|
|
},
|
|
{
|
|
"epoch": 18.970410534469405,
|
|
"grad_norm": 2.6392128467559814,
|
|
"learning_rate": 5.201487910725357e-07,
|
|
"loss": 1.4442,
|
|
"step": 30600
|
|
},
|
|
{
|
|
"epoch": 18.999535243996903,
|
|
"eval_loss": 1.3643269538879395,
|
|
"eval_runtime": 71.2663,
|
|
"eval_samples_per_second": 45.281,
|
|
"eval_steps_per_second": 22.647,
|
|
"step": 30647
|
|
},
|
|
{
|
|
"epoch": 19.032842757552284,
|
|
"grad_norm": 2.5503151416778564,
|
|
"learning_rate": 4.891506509609424e-07,
|
|
"loss": 1.4546,
|
|
"step": 30700
|
|
},
|
|
{
|
|
"epoch": 19.094810224632067,
|
|
"grad_norm": 3.5481629371643066,
|
|
"learning_rate": 4.5815251084934906e-07,
|
|
"loss": 1.4389,
|
|
"step": 30800
|
|
},
|
|
{
|
|
"epoch": 19.156777691711852,
|
|
"grad_norm": 2.849660634994507,
|
|
"learning_rate": 4.271543707377557e-07,
|
|
"loss": 1.4311,
|
|
"step": 30900
|
|
},
|
|
{
|
|
"epoch": 19.218745158791634,
|
|
"grad_norm": 2.581972599029541,
|
|
"learning_rate": 3.9615623062616244e-07,
|
|
"loss": 1.4772,
|
|
"step": 31000
|
|
},
|
|
{
|
|
"epoch": 19.280712625871416,
|
|
"grad_norm": 4.163836479187012,
|
|
"learning_rate": 3.6515809051456916e-07,
|
|
"loss": 1.4237,
|
|
"step": 31100
|
|
},
|
|
{
|
|
"epoch": 19.342680092951202,
|
|
"grad_norm": 2.7274580001831055,
|
|
"learning_rate": 3.341599504029758e-07,
|
|
"loss": 1.4404,
|
|
"step": 31200
|
|
},
|
|
{
|
|
"epoch": 19.404647560030984,
|
|
"grad_norm": 3.983724355697632,
|
|
"learning_rate": 3.0316181029138253e-07,
|
|
"loss": 1.4473,
|
|
"step": 31300
|
|
},
|
|
{
|
|
"epoch": 19.466615027110766,
|
|
"grad_norm": 2.6508560180664062,
|
|
"learning_rate": 2.7216367017978925e-07,
|
|
"loss": 1.4563,
|
|
"step": 31400
|
|
},
|
|
{
|
|
"epoch": 19.52858249419055,
|
|
"grad_norm": 2.608508348464966,
|
|
"learning_rate": 2.4116553006819597e-07,
|
|
"loss": 1.4381,
|
|
"step": 31500
|
|
},
|
|
{
|
|
"epoch": 19.590549961270334,
|
|
"grad_norm": 3.2333600521087646,
|
|
"learning_rate": 2.101673899566026e-07,
|
|
"loss": 1.4418,
|
|
"step": 31600
|
|
},
|
|
{
|
|
"epoch": 19.652517428350116,
|
|
"grad_norm": 5.008152008056641,
|
|
"learning_rate": 1.7916924984500932e-07,
|
|
"loss": 1.447,
|
|
"step": 31700
|
|
},
|
|
{
|
|
"epoch": 19.714484895429898,
|
|
"grad_norm": 3.073624610900879,
|
|
"learning_rate": 1.48171109733416e-07,
|
|
"loss": 1.4241,
|
|
"step": 31800
|
|
},
|
|
{
|
|
"epoch": 19.776452362509684,
|
|
"grad_norm": 5.159261226654053,
|
|
"learning_rate": 1.171729696218227e-07,
|
|
"loss": 1.424,
|
|
"step": 31900
|
|
},
|
|
{
|
|
"epoch": 19.838419829589466,
|
|
"grad_norm": 3.225008249282837,
|
|
"learning_rate": 8.61748295102294e-08,
|
|
"loss": 1.4327,
|
|
"step": 32000
|
|
},
|
|
{
|
|
"epoch": 19.900387296669248,
|
|
"grad_norm": 3.629281520843506,
|
|
"learning_rate": 5.517668939863608e-08,
|
|
"loss": 1.437,
|
|
"step": 32100
|
|
},
|
|
{
|
|
"epoch": 19.962354763749033,
|
|
"grad_norm": 2.323373556137085,
|
|
"learning_rate": 2.417854928704278e-08,
|
|
"loss": 1.4345,
|
|
"step": 32200
|
|
},
|
|
{
|
|
"epoch": 19.999535243996903,
|
|
"eval_loss": 1.3642308712005615,
|
|
"eval_runtime": 70.2626,
|
|
"eval_samples_per_second": 45.928,
|
|
"eval_steps_per_second": 22.971,
|
|
"step": 32260
|
|
}
|
|
],
|
|
"logging_steps": 100,
|
|
"max_steps": 32260,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 20,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 2.2226940169519104e+17,
|
|
"train_batch_size": 2,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|