|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9997970367363507, |
|
"eval_steps": 80, |
|
"global_step": 821, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006088897909478384, |
|
"grad_norm": 3.566783556339775, |
|
"learning_rate": 5.5e-07, |
|
"loss": 1.1009, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.012177795818956769, |
|
"grad_norm": 3.472124035548172, |
|
"learning_rate": 1.1e-06, |
|
"loss": 1.0381, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.018266693728435153, |
|
"grad_norm": 2.9811017753128577, |
|
"learning_rate": 1.6499999999999999e-06, |
|
"loss": 0.9465, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.024355591637913537, |
|
"grad_norm": 2.922160623410553, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.2107, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03044448954739192, |
|
"grad_norm": 2.4899147411553546, |
|
"learning_rate": 2.75e-06, |
|
"loss": 1.1834, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.036533387456870306, |
|
"grad_norm": 2.4171440435334826, |
|
"learning_rate": 3.2999999999999997e-06, |
|
"loss": 1.0345, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04262228536634869, |
|
"grad_norm": 2.3631050864525105, |
|
"learning_rate": 3.8499999999999996e-06, |
|
"loss": 0.8937, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.048711183275827075, |
|
"grad_norm": 2.367215460791824, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.0384, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05480008118530546, |
|
"grad_norm": 2.3735664390744695, |
|
"learning_rate": 4.95e-06, |
|
"loss": 0.8559, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06088897909478384, |
|
"grad_norm": 2.1106128418644574, |
|
"learning_rate": 5.5e-06, |
|
"loss": 1.0974, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06697787700426223, |
|
"grad_norm": 2.4447079098341042, |
|
"learning_rate": 5.416103509565232e-06, |
|
"loss": 1.0212, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07306677491374061, |
|
"grad_norm": 2.4511103681763307, |
|
"learning_rate": 5.333024453086441e-06, |
|
"loss": 0.9416, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.079155672823219, |
|
"grad_norm": 2.2384105642036918, |
|
"learning_rate": 5.250760158320862e-06, |
|
"loss": 1.2031, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08524457073269738, |
|
"grad_norm": 3.119602266469178, |
|
"learning_rate": 5.16930794423241e-06, |
|
"loss": 0.8486, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09133346864217577, |
|
"grad_norm": 2.5881641097806862, |
|
"learning_rate": 5.088665120904279e-06, |
|
"loss": 1.1537, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09742236655165415, |
|
"grad_norm": 2.2055002770457657, |
|
"learning_rate": 5.008828989450109e-06, |
|
"loss": 1.0313, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09742236655165415, |
|
"eval_loss": 0.9289844036102295, |
|
"eval_runtime": 10.8703, |
|
"eval_samples_per_second": 18.399, |
|
"eval_steps_per_second": 4.6, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10351126446113254, |
|
"grad_norm": 2.415544688492964, |
|
"learning_rate": 4.92979684192366e-06, |
|
"loss": 0.9282, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10960016237061092, |
|
"grad_norm": 1.7793675433556284, |
|
"learning_rate": 4.851565961226947e-06, |
|
"loss": 0.8977, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11568906028008931, |
|
"grad_norm": 2.8859944584710293, |
|
"learning_rate": 4.774133621016826e-06, |
|
"loss": 0.9419, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12177795818956769, |
|
"grad_norm": 2.469055183117004, |
|
"learning_rate": 4.69749708560997e-06, |
|
"loss": 0.9848, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12786685609904608, |
|
"grad_norm": 2.2447776512327544, |
|
"learning_rate": 4.621653609886213e-06, |
|
"loss": 1.215, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13395575400852447, |
|
"grad_norm": 2.531579161423441, |
|
"learning_rate": 4.5466004391902066e-06, |
|
"loss": 1.1061, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14004465191800283, |
|
"grad_norm": 1.9522000168289182, |
|
"learning_rate": 4.472334809231365e-06, |
|
"loss": 0.9953, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14613354982748122, |
|
"grad_norm": 2.5590272228727136, |
|
"learning_rate": 4.398853945982053e-06, |
|
"loss": 0.6939, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15222244773695962, |
|
"grad_norm": 2.801526994914379, |
|
"learning_rate": 4.326155065573944e-06, |
|
"loss": 0.9363, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.158311345646438, |
|
"grad_norm": 1.8373874255277394, |
|
"learning_rate": 4.254235374192553e-06, |
|
"loss": 1.195, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16440024355591637, |
|
"grad_norm": 2.236882560839364, |
|
"learning_rate": 4.183092067969859e-06, |
|
"loss": 0.9793, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.17048914146539476, |
|
"grad_norm": 3.3065776583087643, |
|
"learning_rate": 4.112722332874986e-06, |
|
"loss": 0.9404, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17657803937487315, |
|
"grad_norm": 2.5409656409615695, |
|
"learning_rate": 4.043123344602879e-06, |
|
"loss": 0.7751, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18266693728435154, |
|
"grad_norm": 2.6275401841731814, |
|
"learning_rate": 3.974292268460949e-06, |
|
"loss": 0.874, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1887558351938299, |
|
"grad_norm": 2.8046791101602007, |
|
"learning_rate": 3.906226259253599e-06, |
|
"loss": 0.9028, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.1948447331033083, |
|
"grad_norm": 2.1392572989093006, |
|
"learning_rate": 3.838922461164612e-06, |
|
"loss": 0.985, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1948447331033083, |
|
"eval_loss": 0.9194434881210327, |
|
"eval_runtime": 10.0404, |
|
"eval_samples_per_second": 19.919, |
|
"eval_steps_per_second": 4.98, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2009336310127867, |
|
"grad_norm": 1.937352853882511, |
|
"learning_rate": 3.772378007637317e-06, |
|
"loss": 1.2669, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.20702252892226508, |
|
"grad_norm": 2.31447918391138, |
|
"learning_rate": 3.7065900212524865e-06, |
|
"loss": 0.8301, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21311142683174344, |
|
"grad_norm": 1.9170456106220028, |
|
"learning_rate": 3.6415556136039017e-06, |
|
"loss": 0.9021, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.21920032474122184, |
|
"grad_norm": 2.2610666535764716, |
|
"learning_rate": 3.57727188517153e-06, |
|
"loss": 0.925, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22528922265070023, |
|
"grad_norm": 2.4930543513110566, |
|
"learning_rate": 3.513735925192236e-06, |
|
"loss": 0.861, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.23137812056017862, |
|
"grad_norm": 2.1552816913331334, |
|
"learning_rate": 3.4509448115279674e-06, |
|
"loss": 1.1969, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.23746701846965698, |
|
"grad_norm": 1.8621642100291538, |
|
"learning_rate": 3.388895610531349e-06, |
|
"loss": 0.8704, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.24355591637913537, |
|
"grad_norm": 2.1209832615085285, |
|
"learning_rate": 3.3275853769086063e-06, |
|
"loss": 0.9503, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.24964481428861376, |
|
"grad_norm": 2.237393861143882, |
|
"learning_rate": 3.267011153579745e-06, |
|
"loss": 0.9347, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.25573371219809216, |
|
"grad_norm": 2.2028682955334964, |
|
"learning_rate": 3.2071699715359122e-06, |
|
"loss": 0.8733, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2618226101075705, |
|
"grad_norm": 2.1266652440547475, |
|
"learning_rate": 3.148058849693859e-06, |
|
"loss": 1.065, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.26791150801704894, |
|
"grad_norm": 2.285707011170251, |
|
"learning_rate": 3.0896747947474105e-06, |
|
"loss": 0.9307, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2740004059265273, |
|
"grad_norm": 2.5710181491838737, |
|
"learning_rate": 3.0320148010158774e-06, |
|
"loss": 1.0314, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.28008930383600567, |
|
"grad_norm": 2.292322109336443, |
|
"learning_rate": 2.975075850289298e-06, |
|
"loss": 0.9115, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2861782017454841, |
|
"grad_norm": 2.1450080532027025, |
|
"learning_rate": 2.9188549116704386e-06, |
|
"loss": 1.0111, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.29226709965496245, |
|
"grad_norm": 2.1135370162342717, |
|
"learning_rate": 2.8633489414134293e-06, |
|
"loss": 1.091, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.29226709965496245, |
|
"eval_loss": 0.9108514189720154, |
|
"eval_runtime": 10.044, |
|
"eval_samples_per_second": 19.912, |
|
"eval_steps_per_second": 4.978, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2983559975644408, |
|
"grad_norm": 1.8496992962121142, |
|
"learning_rate": 2.8085548827589632e-06, |
|
"loss": 0.9449, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.30444489547391923, |
|
"grad_norm": 2.2350891035464615, |
|
"learning_rate": 2.7544696657659385e-06, |
|
"loss": 1.0587, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3105337933833976, |
|
"grad_norm": 2.6722037220078767, |
|
"learning_rate": 2.7010902071394348e-06, |
|
"loss": 0.7525, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.316622691292876, |
|
"grad_norm": 2.277080303923668, |
|
"learning_rate": 2.6484134100549204e-06, |
|
"loss": 0.8815, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3227115892023544, |
|
"grad_norm": 2.0493281130802616, |
|
"learning_rate": 2.5964361639785647e-06, |
|
"loss": 1.1089, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.32880048711183274, |
|
"grad_norm": 2.042407938817224, |
|
"learning_rate": 2.5451553444835415e-06, |
|
"loss": 1.0962, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.33488938502131116, |
|
"grad_norm": 2.4634190870687287, |
|
"learning_rate": 2.4945678130621847e-06, |
|
"loss": 0.981, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3409782829307895, |
|
"grad_norm": 2.260346564740007, |
|
"learning_rate": 2.4446704169338846e-06, |
|
"loss": 0.8348, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3470671808402679, |
|
"grad_norm": 2.1046411435401806, |
|
"learning_rate": 2.3954599888485643e-06, |
|
"loss": 0.7611, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3531560787497463, |
|
"grad_norm": 2.192608175141679, |
|
"learning_rate": 2.3469333468856106e-06, |
|
"loss": 1.1149, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.35924497665922467, |
|
"grad_norm": 1.9896811996512633, |
|
"learning_rate": 2.299087294248103e-06, |
|
"loss": 0.8705, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3653338745687031, |
|
"grad_norm": 2.342371852699277, |
|
"learning_rate": 2.25191861905219e-06, |
|
"loss": 0.7842, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.37142277247818145, |
|
"grad_norm": 2.7056358054268945, |
|
"learning_rate": 2.205424094111439e-06, |
|
"loss": 0.9357, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3775116703876598, |
|
"grad_norm": 1.827000174010884, |
|
"learning_rate": 2.1596004767160085e-06, |
|
"loss": 1.088, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.38360056829713823, |
|
"grad_norm": 2.11667666402572, |
|
"learning_rate": 2.1144445084064565e-06, |
|
"loss": 1.0323, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3896894662066166, |
|
"grad_norm": 1.9194964413075437, |
|
"learning_rate": 2.069952914741992e-06, |
|
"loss": 0.9087, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3896894662066166, |
|
"eval_loss": 0.9030415415763855, |
|
"eval_runtime": 10.0588, |
|
"eval_samples_per_second": 19.883, |
|
"eval_steps_per_second": 4.971, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.39577836411609496, |
|
"grad_norm": 2.379416328048356, |
|
"learning_rate": 2.026122405062999e-06, |
|
"loss": 0.9356, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.4018672620255734, |
|
"grad_norm": 2.3051629049862967, |
|
"learning_rate": 1.982949672247615e-06, |
|
"loss": 0.9179, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.40795615993505174, |
|
"grad_norm": 2.2745740533676724, |
|
"learning_rate": 1.940431392462155e-06, |
|
"loss": 0.9632, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.41404505784453016, |
|
"grad_norm": 2.2794095449818252, |
|
"learning_rate": 1.8985642249051713e-06, |
|
"loss": 0.8374, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4201339557540085, |
|
"grad_norm": 1.9892966603109936, |
|
"learning_rate": 1.8573448115449084e-06, |
|
"loss": 0.8853, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.4262228536634869, |
|
"grad_norm": 1.9076114123449572, |
|
"learning_rate": 1.8167697768499194e-06, |
|
"loss": 1.006, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4323117515729653, |
|
"grad_norm": 2.4400224099994183, |
|
"learning_rate": 1.7768357275125856e-06, |
|
"loss": 0.7937, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.43840064948244367, |
|
"grad_norm": 1.9229232682741382, |
|
"learning_rate": 1.737539252165281e-06, |
|
"loss": 0.937, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.44448954739192204, |
|
"grad_norm": 2.482169188918342, |
|
"learning_rate": 1.6988769210889033e-06, |
|
"loss": 0.8412, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.45057844530140045, |
|
"grad_norm": 2.0823727911627117, |
|
"learning_rate": 1.6608452859134694e-06, |
|
"loss": 1.0682, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4566673432108788, |
|
"grad_norm": 2.1349739191332624, |
|
"learning_rate": 1.6234408793104964e-06, |
|
"loss": 0.9272, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.46275624112035724, |
|
"grad_norm": 2.30837580526881, |
|
"learning_rate": 1.5866602146768156e-06, |
|
"loss": 0.8386, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4688451390298356, |
|
"grad_norm": 1.998746509174225, |
|
"learning_rate": 1.55049978580951e-06, |
|
"loss": 1.0616, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.47493403693931396, |
|
"grad_norm": 1.7126649948610602, |
|
"learning_rate": 1.5149560665716053e-06, |
|
"loss": 0.9667, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4810229348487924, |
|
"grad_norm": 5.6311081714105935, |
|
"learning_rate": 1.4800255105481483e-06, |
|
"loss": 0.876, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.48711183275827075, |
|
"grad_norm": 2.3657992350084545, |
|
"learning_rate": 1.445704550692281e-06, |
|
"loss": 1.0107, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.48711183275827075, |
|
"eval_loss": 0.8974834680557251, |
|
"eval_runtime": 10.0412, |
|
"eval_samples_per_second": 19.918, |
|
"eval_steps_per_second": 4.979, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4932007306677491, |
|
"grad_norm": 1.7698636674115102, |
|
"learning_rate": 1.4119895989608974e-06, |
|
"loss": 0.9153, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.49928962857722753, |
|
"grad_norm": 2.054779528570307, |
|
"learning_rate": 1.3788770459394496e-06, |
|
"loss": 0.8891, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.5053785264867059, |
|
"grad_norm": 1.9369644275849407, |
|
"learning_rate": 1.3463632604554425e-06, |
|
"loss": 0.9571, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5114674243961843, |
|
"grad_norm": 1.8987765645217645, |
|
"learning_rate": 1.3144445891801363e-06, |
|
"loss": 0.963, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5175563223056627, |
|
"grad_norm": 2.403051805328199, |
|
"learning_rate": 1.28311735621795e-06, |
|
"loss": 1.1635, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.523645220215141, |
|
"grad_norm": 2.109077522246386, |
|
"learning_rate": 1.252377862683016e-06, |
|
"loss": 1.0585, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5297341181246195, |
|
"grad_norm": 1.874133422791628, |
|
"learning_rate": 1.222222386262332e-06, |
|
"loss": 0.9143, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5358230160340979, |
|
"grad_norm": 2.1929101428736177, |
|
"learning_rate": 1.1926471807648967e-06, |
|
"loss": 0.9188, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5419119139435762, |
|
"grad_norm": 1.6260373540934423, |
|
"learning_rate": 1.1636484756561917e-06, |
|
"loss": 0.8914, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5480008118530546, |
|
"grad_norm": 1.9739709292895855, |
|
"learning_rate": 1.1352224755773445e-06, |
|
"loss": 1.0831, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.554089709762533, |
|
"grad_norm": 2.023310630964453, |
|
"learning_rate": 1.107365359848245e-06, |
|
"loss": 1.0402, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.5601786076720113, |
|
"grad_norm": 2.466428732846668, |
|
"learning_rate": 1.0800732819538616e-06, |
|
"loss": 0.9395, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5662675055814897, |
|
"grad_norm": 1.9030259115900607, |
|
"learning_rate": 1.0533423690129516e-06, |
|
"loss": 0.8586, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5723564034909682, |
|
"grad_norm": 2.1592861330197946, |
|
"learning_rate": 1.0271687212283061e-06, |
|
"loss": 0.9337, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5784453014004465, |
|
"grad_norm": 2.356602817957484, |
|
"learning_rate": 1.0015484113176207e-06, |
|
"loss": 0.8562, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5845341993099249, |
|
"grad_norm": 1.9750104030522446, |
|
"learning_rate": 9.764774839240228e-07, |
|
"loss": 0.9108, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5845341993099249, |
|
"eval_loss": 0.8940006494522095, |
|
"eval_runtime": 10.0627, |
|
"eval_samples_per_second": 19.875, |
|
"eval_steps_per_second": 4.969, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5906230972194033, |
|
"grad_norm": 2.294181751504768, |
|
"learning_rate": 9.519519550052165e-07, |
|
"loss": 0.9483, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5967119951288816, |
|
"grad_norm": 2.390172284421488, |
|
"learning_rate": 9.27967811200154e-07, |
|
"loss": 0.9252, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.60280089303836, |
|
"grad_norm": 1.9195131453200192, |
|
"learning_rate": 9.045210091720488e-07, |
|
"loss": 1.045, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6088897909478385, |
|
"grad_norm": 2.3087652972941957, |
|
"learning_rate": 8.816074749264782e-07, |
|
"loss": 0.8324, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6149786888573169, |
|
"grad_norm": 1.887331233479458, |
|
"learning_rate": 8.592231031032369e-07, |
|
"loss": 0.9797, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6210675867667952, |
|
"grad_norm": 2.546311216104545, |
|
"learning_rate": 8.37363756240493e-07, |
|
"loss": 0.9758, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6271564846762736, |
|
"grad_norm": 2.052324183223674, |
|
"learning_rate": 8.160252640097203e-07, |
|
"loss": 0.9379, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.633245382585752, |
|
"grad_norm": 1.8162505508545923, |
|
"learning_rate": 7.952034224197429e-07, |
|
"loss": 1.0445, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6393342804952303, |
|
"grad_norm": 1.9039113307487847, |
|
"learning_rate": 7.748939929881281e-07, |
|
"loss": 0.9693, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.6454231784047088, |
|
"grad_norm": 1.7791797404105452, |
|
"learning_rate": 7.5509270187801e-07, |
|
"loss": 1.1654, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6515120763141872, |
|
"grad_norm": 2.2713665666241023, |
|
"learning_rate": 7.357952389983003e-07, |
|
"loss": 1.0812, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6576009742236655, |
|
"grad_norm": 2.7771426272207926, |
|
"learning_rate": 7.169972570650695e-07, |
|
"loss": 1.0663, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.6636898721331439, |
|
"grad_norm": 2.1095605296190794, |
|
"learning_rate": 6.986943706217093e-07, |
|
"loss": 1.2464, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.6697787700426223, |
|
"grad_norm": 2.165590845905102, |
|
"learning_rate": 6.808821550153036e-07, |
|
"loss": 1.1615, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6758676679521006, |
|
"grad_norm": 2.098201375795868, |
|
"learning_rate": 6.635561453264081e-07, |
|
"loss": 1.2256, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.681956565861579, |
|
"grad_norm": 1.597477657082927, |
|
"learning_rate": 6.467118352492235e-07, |
|
"loss": 1.1531, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.681956565861579, |
|
"eval_loss": 0.9038890600204468, |
|
"eval_runtime": 10.0714, |
|
"eval_samples_per_second": 19.858, |
|
"eval_steps_per_second": 4.965, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6880454637710575, |
|
"grad_norm": 1.9610561566970353, |
|
"learning_rate": 6.303446759188759e-07, |
|
"loss": 1.1032, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6941343616805358, |
|
"grad_norm": 1.8771579285205593, |
|
"learning_rate": 6.14450074682243e-07, |
|
"loss": 1.0247, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.7002232595900142, |
|
"grad_norm": 1.9428546209099045, |
|
"learning_rate": 5.990233938084431e-07, |
|
"loss": 0.9957, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7063121574994926, |
|
"grad_norm": 2.0685369493893453, |
|
"learning_rate": 5.840599491347582e-07, |
|
"loss": 1.073, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.712401055408971, |
|
"grad_norm": 1.4232455246953637, |
|
"learning_rate": 5.695550086433726e-07, |
|
"loss": 1.17, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7184899533184493, |
|
"grad_norm": 1.9953305653364002, |
|
"learning_rate": 5.555037909638744e-07, |
|
"loss": 1.0394, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7245788512279278, |
|
"grad_norm": 2.234757544147466, |
|
"learning_rate": 5.419014637959808e-07, |
|
"loss": 1.1472, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7306677491374062, |
|
"grad_norm": 2.2367049852297876, |
|
"learning_rate": 5.287431422464098e-07, |
|
"loss": 1.0496, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7367566470468845, |
|
"grad_norm": 2.2212989204910367, |
|
"learning_rate": 5.160238870732056e-07, |
|
"loss": 0.9907, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7428455449563629, |
|
"grad_norm": 1.8355911585139566, |
|
"learning_rate": 5.037387028301462e-07, |
|
"loss": 1.3347, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.7489344428658413, |
|
"grad_norm": 2.172477947251129, |
|
"learning_rate": 4.918825359030837e-07, |
|
"loss": 1.1386, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.7550233407753196, |
|
"grad_norm": 1.8047316252716201, |
|
"learning_rate": 4.804502724291952e-07, |
|
"loss": 1.1201, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.761112238684798, |
|
"grad_norm": 1.8666031715794023, |
|
"learning_rate": 4.6943673608912795e-07, |
|
"loss": 1.0354, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7672011365942765, |
|
"grad_norm": 1.948049167249183, |
|
"learning_rate": 4.588366857608947e-07, |
|
"loss": 0.9314, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.7732900345037548, |
|
"grad_norm": 1.9360471601880767, |
|
"learning_rate": 4.4864481302308764e-07, |
|
"loss": 1.3016, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.7793789324132332, |
|
"grad_norm": 1.7603394486420607, |
|
"learning_rate": 4.388557394934996e-07, |
|
"loss": 1.2698, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7793789324132332, |
|
"eval_loss": 0.9068670868873596, |
|
"eval_runtime": 10.0454, |
|
"eval_samples_per_second": 19.91, |
|
"eval_steps_per_second": 4.977, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7854678303227116, |
|
"grad_norm": 1.8121869716312213, |
|
"learning_rate": 4.2946401398754887e-07, |
|
"loss": 1.0936, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.7915567282321899, |
|
"grad_norm": 2.2784724809217116, |
|
"learning_rate": 4.20464109478947e-07, |
|
"loss": 1.0688, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7976456261416683, |
|
"grad_norm": 1.9511498628400707, |
|
"learning_rate": 4.118504198427839e-07, |
|
"loss": 1.2066, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.8037345240511468, |
|
"grad_norm": 1.6886651269860387, |
|
"learning_rate": 4.036172563585779e-07, |
|
"loss": 1.0209, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8098234219606252, |
|
"grad_norm": 1.770877392703053, |
|
"learning_rate": 3.957588439477604e-07, |
|
"loss": 1.1583, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.8159123198701035, |
|
"grad_norm": 2.1758718521360385, |
|
"learning_rate": 3.8826931711647146e-07, |
|
"loss": 1.0352, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8220012177795819, |
|
"grad_norm": 2.003054070991938, |
|
"learning_rate": 3.8114271557029493e-07, |
|
"loss": 1.1568, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8280901156890603, |
|
"grad_norm": 2.167247263874176, |
|
"learning_rate": 3.74372979462545e-07, |
|
"loss": 1.0868, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8341790135985386, |
|
"grad_norm": 1.8384381989648775, |
|
"learning_rate": 3.679539442317327e-07, |
|
"loss": 1.1053, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.840267911508017, |
|
"grad_norm": 2.1673707360409877, |
|
"learning_rate": 3.6187933497668937e-07, |
|
"loss": 1.1503, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8463568094174955, |
|
"grad_norm": 1.9246135785489056, |
|
"learning_rate": 3.5614276030919706e-07, |
|
"loss": 1.0148, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.8524457073269738, |
|
"grad_norm": 1.675592035681543, |
|
"learning_rate": 3.507377056135281e-07, |
|
"loss": 1.0533, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8585346052364522, |
|
"grad_norm": 1.58433949898804, |
|
"learning_rate": 3.456575256295274e-07, |
|
"loss": 0.9223, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.8646235031459306, |
|
"grad_norm": 1.5089882230199272, |
|
"learning_rate": 3.408954362601715e-07, |
|
"loss": 1.1612, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8707124010554089, |
|
"grad_norm": 1.8812149702939354, |
|
"learning_rate": 3.364445054850564e-07, |
|
"loss": 1.1011, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.8768012989648873, |
|
"grad_norm": 1.8009898237225854, |
|
"learning_rate": 3.322976432368887e-07, |
|
"loss": 1.1988, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.8768012989648873, |
|
"eval_loss": 0.9062988758087158, |
|
"eval_runtime": 10.0522, |
|
"eval_samples_per_second": 19.896, |
|
"eval_steps_per_second": 4.974, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.8828901968743658, |
|
"grad_norm": 2.0516728550272494, |
|
"learning_rate": 3.284475900672534e-07, |
|
"loss": 1.217, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.8889790947838441, |
|
"grad_norm": 1.8636754853646536, |
|
"learning_rate": 3.2488690438860155e-07, |
|
"loss": 1.05, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.8950679926933225, |
|
"grad_norm": 2.0197164306265973, |
|
"learning_rate": 3.216079480286014e-07, |
|
"loss": 1.083, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.9011568906028009, |
|
"grad_norm": 1.7192487834075223, |
|
"learning_rate": 3.186028697665536e-07, |
|
"loss": 0.9577, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9072457885122793, |
|
"grad_norm": 1.9547679875155324, |
|
"learning_rate": 3.1586358643344154e-07, |
|
"loss": 1.0405, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9133346864217576, |
|
"grad_norm": 2.0062949300387847, |
|
"learning_rate": 3.1338176103847303e-07, |
|
"loss": 1.1779, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.919423584331236, |
|
"grad_norm": 1.5531676528239142, |
|
"learning_rate": 3.111487772222592e-07, |
|
"loss": 1.1676, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.9255124822407145, |
|
"grad_norm": 1.5372578345670411, |
|
"learning_rate": 3.0915570910938297e-07, |
|
"loss": 1.0719, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9316013801501928, |
|
"grad_norm": 2.1454113233082706, |
|
"learning_rate": 3.07393285308207e-07, |
|
"loss": 1.0105, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.9376902780596712, |
|
"grad_norm": 1.5547170974986044, |
|
"learning_rate": 3.0585184532967033e-07, |
|
"loss": 1.0721, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9437791759691496, |
|
"grad_norm": 1.852645012921805, |
|
"learning_rate": 3.045212859784411e-07, |
|
"loss": 1.0933, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9498680738786279, |
|
"grad_norm": 1.8094221288522232, |
|
"learning_rate": 3.033909941480833e-07, |
|
"loss": 0.9671, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9559569717881063, |
|
"grad_norm": 1.5561960733621902, |
|
"learning_rate": 3.024497606276943e-07, |
|
"loss": 1.1151, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.9620458696975848, |
|
"grad_norm": 1.84094988169291, |
|
"learning_rate": 3.016856664111943e-07, |
|
"loss": 0.9727, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.9681347676070631, |
|
"grad_norm": 1.8600686188407434, |
|
"learning_rate": 3.0108592734273526e-07, |
|
"loss": 1.1713, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.9742236655165415, |
|
"grad_norm": 1.9971464440071154, |
|
"learning_rate": 3.006366718310598e-07, |
|
"loss": 1.1649, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9742236655165415, |
|
"eval_loss": 0.9046012759208679, |
|
"eval_runtime": 10.0656, |
|
"eval_samples_per_second": 19.87, |
|
"eval_steps_per_second": 4.967, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.9803125634260199, |
|
"grad_norm": 1.801943230059916, |
|
"learning_rate": 3.00322602222577e-07, |
|
"loss": 1.0695, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.9864014613354982, |
|
"grad_norm": 1.839016967735732, |
|
"learning_rate": 3.001264297011597e-07, |
|
"loss": 0.9731, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.9924903592449766, |
|
"grad_norm": 1.9692055921355986, |
|
"learning_rate": 3.00027780849675e-07, |
|
"loss": 0.9058, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.9985792571544551, |
|
"grad_norm": 2.8720134735995413, |
|
"learning_rate": 3.0000031504123295e-07, |
|
"loss": 1.2194, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.9997970367363507, |
|
"step": 821, |
|
"total_flos": 1.4841304991137792e+17, |
|
"train_loss": 1.0157385155576737, |
|
"train_runtime": 6062.254, |
|
"train_samples_per_second": 4.876, |
|
"train_steps_per_second": 0.135 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 821, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 821, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.4841304991137792e+17, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|