|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 375, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.65970778465271, |
|
"learning_rate": 4.9985889393594345e-05, |
|
"loss": 1.3397, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8339558839797974, |
|
"learning_rate": 4.992859235371958e-05, |
|
"loss": 1.2413, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.599440336227417, |
|
"learning_rate": 4.9827327946335875e-05, |
|
"loss": 1.0141, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4671368598937988, |
|
"learning_rate": 4.968227477476554e-05, |
|
"loss": 0.9878, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9982190728187561, |
|
"learning_rate": 4.949368867399567e-05, |
|
"loss": 1.0212, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0809102058410645, |
|
"learning_rate": 4.9261902259453614e-05, |
|
"loss": 0.7766, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.9444558620452881, |
|
"learning_rate": 4.898732434036244e-05, |
|
"loss": 0.8701, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.0244667530059814, |
|
"learning_rate": 4.867043919871076e-05, |
|
"loss": 0.6978, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7848422527313232, |
|
"learning_rate": 4.8311805735108894e-05, |
|
"loss": 0.754, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9545549154281616, |
|
"learning_rate": 4.791205648303775e-05, |
|
"loss": 0.6599, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6984307765960693, |
|
"learning_rate": 4.747189649322894e-05, |
|
"loss": 0.6316, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7565098404884338, |
|
"learning_rate": 4.699210209014394e-05, |
|
"loss": 0.7531, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5942355990409851, |
|
"learning_rate": 4.6473519502745476e-05, |
|
"loss": 0.4528, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7522299885749817, |
|
"learning_rate": 4.591706337197597e-05, |
|
"loss": 0.565, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.8787449598312378, |
|
"learning_rate": 4.532371513757564e-05, |
|
"loss": 0.6038, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6031142473220825, |
|
"learning_rate": 4.469452130708543e-05, |
|
"loss": 0.6498, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.838216245174408, |
|
"learning_rate": 4.4030591610087624e-05, |
|
"loss": 0.5324, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.0710391998291016, |
|
"learning_rate": 4.3333097040939764e-05, |
|
"loss": 0.5354, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.9150385856628418, |
|
"learning_rate": 4.260326779345393e-05, |
|
"loss": 0.5865, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.8779798746109009, |
|
"learning_rate": 4.184239109116393e-05, |
|
"loss": 0.5894, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 2.170987844467163, |
|
"learning_rate": 4.105180891700746e-05, |
|
"loss": 0.5148, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.8440865874290466, |
|
"learning_rate": 4.023291564642711e-05, |
|
"loss": 0.437, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.8862484693527222, |
|
"learning_rate": 3.938715558806525e-05, |
|
"loss": 0.5527, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1455185413360596, |
|
"learning_rate": 3.851602043638994e-05, |
|
"loss": 0.5582, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.8740842938423157, |
|
"learning_rate": 3.7621046640744975e-05, |
|
"loss": 0.528, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.2668254375457764, |
|
"learning_rate": 3.670381269546429e-05, |
|
"loss": 0.4583, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8675268292427063, |
|
"learning_rate": 3.5765936355830356e-05, |
|
"loss": 0.4325, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9672185182571411, |
|
"learning_rate": 3.480907178478654e-05, |
|
"loss": 0.5655, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1447197198867798, |
|
"learning_rate": 3.383490663543635e-05, |
|
"loss": 0.5477, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.7843021750450134, |
|
"learning_rate": 3.2845159074474806e-05, |
|
"loss": 0.673, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.8366529941558838, |
|
"learning_rate": 3.1841574751802076e-05, |
|
"loss": 0.539, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.1384434700012207, |
|
"learning_rate": 3.082592372166412e-05, |
|
"loss": 0.511, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.7654138803482056, |
|
"learning_rate": 2.9799997320750507e-05, |
|
"loss": 0.402, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.1874901056289673, |
|
"learning_rate": 2.87656050087558e-05, |
|
"loss": 0.453, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9217435717582703, |
|
"learning_rate": 2.7724571176976732e-05, |
|
"loss": 0.5608, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.0420029163360596, |
|
"learning_rate": 2.667873193057407e-05, |
|
"loss": 0.4598, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8808046579360962, |
|
"learning_rate": 2.562993185017431e-05, |
|
"loss": 0.4952, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.2101999521255493, |
|
"learning_rate": 2.4580020738523e-05, |
|
"loss": 0.4087, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 1.3266472816467285, |
|
"learning_rate": 2.353085035792756e-05, |
|
"loss": 0.5781, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 1.5473909378051758, |
|
"learning_rate": 2.2484271164243996e-05, |
|
"loss": 0.4127, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.1306591033935547, |
|
"learning_rate": 2.1442129043167874e-05, |
|
"loss": 0.5682, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.0642080307006836, |
|
"learning_rate": 2.0406262054585738e-05, |
|
"loss": 0.6413, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.9475000500679016, |
|
"learning_rate": 1.937849719072931e-05, |
|
"loss": 0.4034, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.3008495569229126, |
|
"learning_rate": 1.836064715384989e-05, |
|
"loss": 0.5331, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.1625583171844482, |
|
"learning_rate": 1.7354507159096647e-05, |
|
"loss": 0.4308, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.3643542528152466, |
|
"learning_rate": 1.6361851768237324e-05, |
|
"loss": 0.474, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.6814500093460083, |
|
"learning_rate": 1.5384431759806083e-05, |
|
"loss": 0.4306, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 1.1910830736160278, |
|
"learning_rate": 1.4423971041198556e-05, |
|
"loss": 0.5352, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.0804316997528076, |
|
"learning_rate": 1.3482163608160408e-05, |
|
"loss": 0.436, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 1.1781166791915894, |
|
"learning_rate": 1.2560670557032108e-05, |
|
"loss": 0.4172, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.1555372476577759, |
|
"learning_rate": 1.1661117155019293e-05, |
|
"loss": 0.3712, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.105285882949829, |
|
"learning_rate": 1.0785089973656337e-05, |
|
"loss": 0.5507, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.9189614057540894, |
|
"learning_rate": 9.934134090518593e-06, |
|
"loss": 0.5309, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.8669947981834412, |
|
"learning_rate": 9.109750364118924e-06, |
|
"loss": 0.4294, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.755587637424469, |
|
"learning_rate": 8.313392786794833e-06, |
|
"loss": 0.4236, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.1405256986618042, |
|
"learning_rate": 7.546465920254974e-06, |
|
"loss": 0.555, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.6737154722213745, |
|
"learning_rate": 6.810322418308085e-06, |
|
"loss": 0.5034, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.107371211051941, |
|
"learning_rate": 6.106260641143546e-06, |
|
"loss": 0.4616, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 1.2634700536727905, |
|
"learning_rate": 5.435522365371376e-06, |
|
"loss": 0.456, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8122715950012207, |
|
"learning_rate": 4.799290593860525e-06, |
|
"loss": 0.4582, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 1.333025336265564, |
|
"learning_rate": 4.198687469238297e-06, |
|
"loss": 0.3826, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.5998661518096924, |
|
"learning_rate": 3.6347722947309843e-06, |
|
"loss": 0.4933, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 1.3631378412246704, |
|
"learning_rate": 3.108539665836388e-06, |
|
"loss": 0.5099, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.714860200881958, |
|
"learning_rate": 2.6209177161234445e-06, |
|
"loss": 0.57, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5441424250602722, |
|
"learning_rate": 2.1727664802529216e-06, |
|
"loss": 0.4737, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.4750747680664062, |
|
"learning_rate": 1.7648763771063837e-06, |
|
"loss": 0.4315, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 1.6141853332519531, |
|
"learning_rate": 1.3979668156987425e-06, |
|
"loss": 0.4254, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.2416565418243408, |
|
"learning_rate": 1.0726849263332256e-06, |
|
"loss": 0.3288, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 1.2494091987609863, |
|
"learning_rate": 7.896044192366586e-07, |
|
"loss": 0.2477, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9337052702903748, |
|
"learning_rate": 5.492245726881201e-07, |
|
"loss": 0.453, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.0654840469360352, |
|
"learning_rate": 3.51969352425624e-07, |
|
"loss": 0.4833, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.7972078323364258, |
|
"learning_rate": 1.981866638839952e-07, |
|
"loss": 0.5507, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.450646996498108, |
|
"learning_rate": 8.814773858275004e-08, |
|
"loss": 0.379, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.490728497505188, |
|
"learning_rate": 2.2046655746280064e-08, |
|
"loss": 0.4705, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.9804399609565735, |
|
"learning_rate": 0.0, |
|
"loss": 0.4502, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 375, |
|
"total_flos": 6.701975198539776e+16, |
|
"train_loss": 0.5547979205449423, |
|
"train_runtime": 23630.1849, |
|
"train_samples_per_second": 0.127, |
|
"train_steps_per_second": 0.016 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 375, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 10, |
|
"total_flos": 6.701975198539776e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|