|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 381, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.007874015748031496, |
|
"grad_norm": 97.86459103253922, |
|
"learning_rate": 5.128205128205128e-07, |
|
"loss": 10.8466, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03937007874015748, |
|
"grad_norm": 51.400005997600886, |
|
"learning_rate": 2.564102564102564e-06, |
|
"loss": 10.5913, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07874015748031496, |
|
"grad_norm": 18.973044788046785, |
|
"learning_rate": 5.128205128205128e-06, |
|
"loss": 9.5728, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11811023622047244, |
|
"grad_norm": 18.974754241662772, |
|
"learning_rate": 7.692307692307694e-06, |
|
"loss": 8.6008, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15748031496062992, |
|
"grad_norm": 16.264940537929053, |
|
"learning_rate": 1.0256410256410256e-05, |
|
"loss": 7.6501, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1968503937007874, |
|
"grad_norm": 13.83260251756727, |
|
"learning_rate": 1.2820512820512823e-05, |
|
"loss": 6.519, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.23622047244094488, |
|
"grad_norm": 11.83126275501012, |
|
"learning_rate": 1.5384615384615387e-05, |
|
"loss": 5.9682, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2755905511811024, |
|
"grad_norm": 11.5143365026344, |
|
"learning_rate": 1.794871794871795e-05, |
|
"loss": 5.3936, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.31496062992125984, |
|
"grad_norm": 12.072390547814743, |
|
"learning_rate": 1.9999578095183126e-05, |
|
"loss": 4.7938, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3543307086614173, |
|
"grad_norm": 11.243881904676005, |
|
"learning_rate": 1.9984815164333163e-05, |
|
"loss": 4.3308, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3937007874015748, |
|
"grad_norm": 11.150844255163953, |
|
"learning_rate": 1.9948992579873538e-05, |
|
"loss": 3.8431, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4330708661417323, |
|
"grad_norm": 9.63318982358096, |
|
"learning_rate": 1.989218589765658e-05, |
|
"loss": 3.5038, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.47244094488188976, |
|
"grad_norm": 8.283345964506, |
|
"learning_rate": 1.981451493252418e-05, |
|
"loss": 3.2468, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5118110236220472, |
|
"grad_norm": 7.133949874571078, |
|
"learning_rate": 1.971614350559814e-05, |
|
"loss": 2.9607, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5511811023622047, |
|
"grad_norm": 12.013998534508621, |
|
"learning_rate": 1.9597279098753893e-05, |
|
"loss": 2.9499, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5905511811023622, |
|
"grad_norm": 14.00843961898459, |
|
"learning_rate": 1.9458172417006347e-05, |
|
"loss": 2.8118, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6299212598425197, |
|
"grad_norm": 4.942733823635868, |
|
"learning_rate": 1.929911685973088e-05, |
|
"loss": 2.7099, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6692913385826772, |
|
"grad_norm": 4.505148495160888, |
|
"learning_rate": 1.9120447901834708e-05, |
|
"loss": 2.6232, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7086614173228346, |
|
"grad_norm": 4.482679636836862, |
|
"learning_rate": 1.8922542386183942e-05, |
|
"loss": 2.5165, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7480314960629921, |
|
"grad_norm": 4.421296731369613, |
|
"learning_rate": 1.8705817728778626e-05, |
|
"loss": 2.5159, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.7874015748031497, |
|
"grad_norm": 4.031128786638602, |
|
"learning_rate": 1.847073103835222e-05, |
|
"loss": 2.432, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8267716535433071, |
|
"grad_norm": 3.8222494747123665, |
|
"learning_rate": 1.821777815225245e-05, |
|
"loss": 2.322, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.8661417322834646, |
|
"grad_norm": 3.0512683342688938, |
|
"learning_rate": 1.7947492590636998e-05, |
|
"loss": 2.3086, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.905511811023622, |
|
"grad_norm": 3.487728646307278, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 2.2626, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.9448818897637795, |
|
"grad_norm": 3.429390931093978, |
|
"learning_rate": 1.735723910673132e-05, |
|
"loss": 2.2276, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.984251968503937, |
|
"grad_norm": 3.0101351501349285, |
|
"learning_rate": 1.7038516128259118e-05, |
|
"loss": 2.1477, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.1499931812286377, |
|
"eval_runtime": 9.2946, |
|
"eval_samples_per_second": 26.682, |
|
"eval_steps_per_second": 3.335, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.0236220472440944, |
|
"grad_norm": 2.951694006526968, |
|
"learning_rate": 1.670494773611149e-05, |
|
"loss": 1.975, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.0629921259842519, |
|
"grad_norm": 4.048795878335224, |
|
"learning_rate": 1.6357237482099682e-05, |
|
"loss": 2.0001, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.1023622047244095, |
|
"grad_norm": 3.297695818798186, |
|
"learning_rate": 1.5996118745598817e-05, |
|
"loss": 1.9234, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.141732283464567, |
|
"grad_norm": 3.038860034781639, |
|
"learning_rate": 1.5622353186727542e-05, |
|
"loss": 1.9526, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.1811023622047245, |
|
"grad_norm": 3.066625145980664, |
|
"learning_rate": 1.523672913987878e-05, |
|
"loss": 1.9514, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.220472440944882, |
|
"grad_norm": 2.8503120369521238, |
|
"learning_rate": 1.4840059950989992e-05, |
|
"loss": 1.8463, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.2598425196850394, |
|
"grad_norm": 2.879622274636194, |
|
"learning_rate": 1.4433182262059861e-05, |
|
"loss": 1.9393, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.2992125984251968, |
|
"grad_norm": 2.865024409867327, |
|
"learning_rate": 1.4016954246529697e-05, |
|
"loss": 1.8229, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.3385826771653544, |
|
"grad_norm": 3.0018089829005796, |
|
"learning_rate": 1.3592253799251377e-05, |
|
"loss": 1.9101, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.3779527559055118, |
|
"grad_norm": 2.6200297090547564, |
|
"learning_rate": 1.3159976684859528e-05, |
|
"loss": 1.8719, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.4173228346456692, |
|
"grad_norm": 3.2482197763587703, |
|
"learning_rate": 1.2721034648453353e-05, |
|
"loss": 1.8663, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.4566929133858268, |
|
"grad_norm": 2.509202477142822, |
|
"learning_rate": 1.2276353492572937e-05, |
|
"loss": 1.8042, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.4960629921259843, |
|
"grad_norm": 2.3022341616451265, |
|
"learning_rate": 1.1826871124526072e-05, |
|
"loss": 1.8121, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.5354330708661417, |
|
"grad_norm": 3.1813641245330238, |
|
"learning_rate": 1.1373535578184083e-05, |
|
"loss": 1.8176, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.574803149606299, |
|
"grad_norm": 2.6018779161783914, |
|
"learning_rate": 1.0917303014419036e-05, |
|
"loss": 1.8582, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.6141732283464567, |
|
"grad_norm": 2.477600344836676, |
|
"learning_rate": 1.045913570439972e-05, |
|
"loss": 1.8783, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.6535433070866141, |
|
"grad_norm": 2.4376336894838335, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7568, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.6929133858267718, |
|
"grad_norm": 2.5393277649924046, |
|
"learning_rate": 9.540864295600282e-06, |
|
"loss": 1.7657, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.7322834645669292, |
|
"grad_norm": 2.558708403448478, |
|
"learning_rate": 9.082696985580964e-06, |
|
"loss": 1.7453, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.7716535433070866, |
|
"grad_norm": 2.6628644142413984, |
|
"learning_rate": 8.626464421815919e-06, |
|
"loss": 1.8246, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.811023622047244, |
|
"grad_norm": 2.4749331824893046, |
|
"learning_rate": 8.173128875473933e-06, |
|
"loss": 1.7386, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.8503937007874016, |
|
"grad_norm": 2.4118889944549817, |
|
"learning_rate": 7.72364650742707e-06, |
|
"loss": 1.744, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.889763779527559, |
|
"grad_norm": 2.1983857820686485, |
|
"learning_rate": 7.278965351546648e-06, |
|
"loss": 1.7167, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.9291338582677167, |
|
"grad_norm": 2.3266205479938664, |
|
"learning_rate": 6.840023315140476e-06, |
|
"loss": 1.7168, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.968503937007874, |
|
"grad_norm": 2.6053794749618344, |
|
"learning_rate": 6.407746200748628e-06, |
|
"loss": 1.7855, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.8472861051559448, |
|
"eval_runtime": 9.2247, |
|
"eval_samples_per_second": 26.884, |
|
"eval_steps_per_second": 3.361, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.0078740157480315, |
|
"grad_norm": 4.344086351254557, |
|
"learning_rate": 5.983045753470308e-06, |
|
"loss": 1.6109, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.047244094488189, |
|
"grad_norm": 4.783883169799299, |
|
"learning_rate": 5.566817737940142e-06, |
|
"loss": 1.3618, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.0866141732283463, |
|
"grad_norm": 3.518842708372338, |
|
"learning_rate": 5.159940049010015e-06, |
|
"loss": 1.3088, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.1259842519685037, |
|
"grad_norm": 3.1090620552580006, |
|
"learning_rate": 4.763270860121222e-06, |
|
"loss": 1.2353, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.1653543307086616, |
|
"grad_norm": 3.11493213597681, |
|
"learning_rate": 4.3776468132724605e-06, |
|
"loss": 1.2021, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.204724409448819, |
|
"grad_norm": 5.279030916691569, |
|
"learning_rate": 4.003881254401183e-06, |
|
"loss": 1.2297, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.2440944881889764, |
|
"grad_norm": 3.199352297712251, |
|
"learning_rate": 3.6427625179003223e-06, |
|
"loss": 1.2056, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.283464566929134, |
|
"grad_norm": 3.320605244796421, |
|
"learning_rate": 3.2950522638885106e-06, |
|
"loss": 1.2402, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.322834645669291, |
|
"grad_norm": 3.376499709728643, |
|
"learning_rate": 2.9614838717408866e-06, |
|
"loss": 1.2743, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.362204724409449, |
|
"grad_norm": 3.0517704287796397, |
|
"learning_rate": 2.642760893268684e-06, |
|
"loss": 1.206, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.4015748031496065, |
|
"grad_norm": 3.0861115081810517, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 1.1712, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.440944881889764, |
|
"grad_norm": 3.181141612142621, |
|
"learning_rate": 2.052507409363004e-06, |
|
"loss": 1.1674, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.4803149606299213, |
|
"grad_norm": 3.296777400430187, |
|
"learning_rate": 1.7822218477475496e-06, |
|
"loss": 1.1969, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.5196850393700787, |
|
"grad_norm": 3.0166358743310386, |
|
"learning_rate": 1.5292689616477808e-06, |
|
"loss": 1.1863, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.559055118110236, |
|
"grad_norm": 3.1695663564175343, |
|
"learning_rate": 1.294182271221377e-06, |
|
"loss": 1.1468, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.5984251968503935, |
|
"grad_norm": 3.1650310230949374, |
|
"learning_rate": 1.0774576138160596e-06, |
|
"loss": 1.2172, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.637795275590551, |
|
"grad_norm": 3.130799463981766, |
|
"learning_rate": 8.79552098165296e-07, |
|
"loss": 1.2009, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.677165354330709, |
|
"grad_norm": 3.3222405034526297, |
|
"learning_rate": 7.00883140269123e-07, |
|
"loss": 1.2493, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.716535433070866, |
|
"grad_norm": 3.2043683266572915, |
|
"learning_rate": 5.418275829936537e-07, |
|
"loss": 1.1749, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.7559055118110236, |
|
"grad_norm": 3.18928929645315, |
|
"learning_rate": 4.0272090124611086e-07, |
|
"loss": 1.1584, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.795275590551181, |
|
"grad_norm": 3.169825614669709, |
|
"learning_rate": 2.838564944018618e-07, |
|
"loss": 1.1369, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.8346456692913384, |
|
"grad_norm": 3.196801372855951, |
|
"learning_rate": 1.854850674758213e-07, |
|
"loss": 1.1763, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.8740157480314963, |
|
"grad_norm": 3.1501989250825013, |
|
"learning_rate": 1.0781410234342093e-07, |
|
"loss": 1.1612, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.9133858267716537, |
|
"grad_norm": 3.2335057860947454, |
|
"learning_rate": 5.10074201264632e-08, |
|
"loss": 1.2047, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.952755905511811, |
|
"grad_norm": 3.139504284825472, |
|
"learning_rate": 1.518483566683826e-08, |
|
"loss": 1.2345, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.9921259842519685, |
|
"grad_norm": 3.172440172340041, |
|
"learning_rate": 4.2190481687631736e-10, |
|
"loss": 1.1423, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.9131228923797607, |
|
"eval_runtime": 9.2288, |
|
"eval_samples_per_second": 26.872, |
|
"eval_steps_per_second": 3.359, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 381, |
|
"total_flos": 14587822080000.0, |
|
"train_loss": 2.4270567033547428, |
|
"train_runtime": 1375.9881, |
|
"train_samples_per_second": 4.426, |
|
"train_steps_per_second": 0.277 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 381, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 14587822080000.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|