|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.136986301369863, |
|
"eval_steps": 500, |
|
"global_step": 750, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0684931506849315, |
|
"grad_norm": 6.335273265838623, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.954, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.136986301369863, |
|
"grad_norm": 2.381432056427002, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.7291, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2054794520547945, |
|
"grad_norm": 2.1213607788085938, |
|
"learning_rate": 2.4e-05, |
|
"loss": 0.3867, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.273972602739726, |
|
"grad_norm": 1.0599077939987183, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 0.2747, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3424657534246575, |
|
"grad_norm": 1.1717661619186401, |
|
"learning_rate": 4e-05, |
|
"loss": 0.2013, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.410958904109589, |
|
"grad_norm": 1.1004457473754883, |
|
"learning_rate": 4.8e-05, |
|
"loss": 0.1569, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4794520547945205, |
|
"grad_norm": 1.0647892951965332, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 0.1391, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.547945205479452, |
|
"grad_norm": 0.9715467095375061, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 0.1181, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6164383561643836, |
|
"grad_norm": 0.8772512674331665, |
|
"learning_rate": 7.2e-05, |
|
"loss": 0.1051, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.684931506849315, |
|
"grad_norm": 1.2750014066696167, |
|
"learning_rate": 8e-05, |
|
"loss": 0.0947, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7534246575342466, |
|
"grad_norm": 0.6222656965255737, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 0.0978, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.821917808219178, |
|
"grad_norm": 0.6259370446205139, |
|
"learning_rate": 9.6e-05, |
|
"loss": 0.0781, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8904109589041096, |
|
"grad_norm": 0.6297749876976013, |
|
"learning_rate": 9.999890641901125e-05, |
|
"loss": 0.0738, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.958904109589041, |
|
"grad_norm": 0.4628015160560608, |
|
"learning_rate": 9.999015805811965e-05, |
|
"loss": 0.0748, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.0273972602739727, |
|
"grad_norm": 0.6147599816322327, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.0641, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.095890410958904, |
|
"grad_norm": 0.5399206280708313, |
|
"learning_rate": 9.994642390694308e-05, |
|
"loss": 0.0662, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.1643835616438356, |
|
"grad_norm": 0.44403907656669617, |
|
"learning_rate": 9.991144576886823e-05, |
|
"loss": 0.0592, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.2328767123287672, |
|
"grad_norm": 0.571832537651062, |
|
"learning_rate": 9.986773457298311e-05, |
|
"loss": 0.0556, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.3013698630136985, |
|
"grad_norm": 0.561576247215271, |
|
"learning_rate": 9.981529796748134e-05, |
|
"loss": 0.0596, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.36986301369863, |
|
"grad_norm": 0.4030166268348694, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.0539, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.4383561643835616, |
|
"grad_norm": 0.34468358755111694, |
|
"learning_rate": 9.968428675226714e-05, |
|
"loss": 0.0577, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.5068493150684932, |
|
"grad_norm": 0.483900785446167, |
|
"learning_rate": 9.96057350657239e-05, |
|
"loss": 0.052, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.5753424657534247, |
|
"grad_norm": 0.4898487627506256, |
|
"learning_rate": 9.95185038118915e-05, |
|
"loss": 0.0477, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.643835616438356, |
|
"grad_norm": 0.5045291781425476, |
|
"learning_rate": 9.942260825371358e-05, |
|
"loss": 0.0528, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.7123287671232876, |
|
"grad_norm": 0.5695160031318665, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.0439, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.7808219178082192, |
|
"grad_norm": 0.33678483963012695, |
|
"learning_rate": 9.92048928531717e-05, |
|
"loss": 0.0488, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.8493150684931505, |
|
"grad_norm": 0.3467772305011749, |
|
"learning_rate": 9.90831111046988e-05, |
|
"loss": 0.0448, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.9178082191780823, |
|
"grad_norm": 0.4071213901042938, |
|
"learning_rate": 9.895274123299723e-05, |
|
"loss": 0.0431, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.9863013698630136, |
|
"grad_norm": 0.30543234944343567, |
|
"learning_rate": 9.881380604901964e-05, |
|
"loss": 0.0429, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.0547945205479454, |
|
"grad_norm": 0.6378459334373474, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.0464, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.1232876712328768, |
|
"grad_norm": 0.4035351276397705, |
|
"learning_rate": 9.851033847720166e-05, |
|
"loss": 0.0488, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.191780821917808, |
|
"grad_norm": 0.3094054162502289, |
|
"learning_rate": 9.834585918739936e-05, |
|
"loss": 0.0364, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.26027397260274, |
|
"grad_norm": 0.45768025517463684, |
|
"learning_rate": 9.817292077210659e-05, |
|
"loss": 0.0373, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.328767123287671, |
|
"grad_norm": 0.504475474357605, |
|
"learning_rate": 9.799155349053851e-05, |
|
"loss": 0.047, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.3972602739726026, |
|
"grad_norm": 0.36838439106941223, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.0396, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.4657534246575343, |
|
"grad_norm": 0.3892686665058136, |
|
"learning_rate": 9.760366073392246e-05, |
|
"loss": 0.04, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.5342465753424657, |
|
"grad_norm": 0.376288503408432, |
|
"learning_rate": 9.739720312887535e-05, |
|
"loss": 0.0352, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.602739726027397, |
|
"grad_norm": 0.33039844036102295, |
|
"learning_rate": 9.718245238567939e-05, |
|
"loss": 0.0364, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.671232876712329, |
|
"grad_norm": 0.38675811886787415, |
|
"learning_rate": 9.695944607949649e-05, |
|
"loss": 0.0367, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.73972602739726, |
|
"grad_norm": 0.3453963100910187, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.0337, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.808219178082192, |
|
"grad_norm": 0.5285444259643555, |
|
"learning_rate": 9.648882429441257e-05, |
|
"loss": 0.0414, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.8767123287671232, |
|
"grad_norm": 0.38015297055244446, |
|
"learning_rate": 9.624129116069694e-05, |
|
"loss": 0.0348, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.9452054794520546, |
|
"grad_norm": 0.44161203503608704, |
|
"learning_rate": 9.598566713995718e-05, |
|
"loss": 0.0377, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.0136986301369864, |
|
"grad_norm": 0.4227602779865265, |
|
"learning_rate": 9.572199695899522e-05, |
|
"loss": 0.0323, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.0821917808219177, |
|
"grad_norm": 0.2722010314464569, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.0345, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.1506849315068495, |
|
"grad_norm": 0.4375353753566742, |
|
"learning_rate": 9.517070405476575e-05, |
|
"loss": 0.0315, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.219178082191781, |
|
"grad_norm": 0.4301266372203827, |
|
"learning_rate": 9.488317779179361e-05, |
|
"loss": 0.0348, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.287671232876712, |
|
"grad_norm": 0.458469957113266, |
|
"learning_rate": 9.458779827231237e-05, |
|
"loss": 0.0348, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.356164383561644, |
|
"grad_norm": 0.3810347616672516, |
|
"learning_rate": 9.428461717918511e-05, |
|
"loss": 0.0304, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.4246575342465753, |
|
"grad_norm": 0.3728407919406891, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.0332, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.493150684931507, |
|
"grad_norm": 0.3293081223964691, |
|
"learning_rate": 9.365506381941066e-05, |
|
"loss": 0.0305, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.5616438356164384, |
|
"grad_norm": 0.35907241702079773, |
|
"learning_rate": 9.332880170637252e-05, |
|
"loss": 0.0319, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.6301369863013697, |
|
"grad_norm": 0.3396986126899719, |
|
"learning_rate": 9.299495830763286e-05, |
|
"loss": 0.0319, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.6986301369863015, |
|
"grad_norm": 0.22632896900177002, |
|
"learning_rate": 9.265359203611987e-05, |
|
"loss": 0.0318, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.767123287671233, |
|
"grad_norm": 0.2579493522644043, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.0295, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.8356164383561646, |
|
"grad_norm": 0.2979259788990021, |
|
"learning_rate": 9.194853109746074e-05, |
|
"loss": 0.0317, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.904109589041096, |
|
"grad_norm": 0.28193944692611694, |
|
"learning_rate": 9.158495979556358e-05, |
|
"loss": 0.0324, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.9726027397260273, |
|
"grad_norm": 0.30196675658226013, |
|
"learning_rate": 9.121411232980588e-05, |
|
"loss": 0.0282, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.041095890410959, |
|
"grad_norm": 0.2790427505970001, |
|
"learning_rate": 9.083605358775612e-05, |
|
"loss": 0.0322, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.109589041095891, |
|
"grad_norm": 0.3180605173110962, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.0311, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.178082191780822, |
|
"grad_norm": 0.23422259092330933, |
|
"learning_rate": 9.005856812230304e-05, |
|
"loss": 0.0313, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.2465753424657535, |
|
"grad_norm": 0.1887388378381729, |
|
"learning_rate": 8.965927743634391e-05, |
|
"loss": 0.0297, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.315068493150685, |
|
"grad_norm": 0.21844805777072906, |
|
"learning_rate": 8.92530475251784e-05, |
|
"loss": 0.0306, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.383561643835616, |
|
"grad_norm": 0.45273661613464355, |
|
"learning_rate": 8.883994946727849e-05, |
|
"loss": 0.0292, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.4520547945205475, |
|
"grad_norm": 0.39670127630233765, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.0331, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.52054794520548, |
|
"grad_norm": 0.25612273812294006, |
|
"learning_rate": 8.799343922115044e-05, |
|
"loss": 0.0305, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.589041095890411, |
|
"grad_norm": 0.22720636427402496, |
|
"learning_rate": 8.756017514770443e-05, |
|
"loss": 0.0284, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.657534246575342, |
|
"grad_norm": 0.39644864201545715, |
|
"learning_rate": 8.71203391311725e-05, |
|
"loss": 0.0286, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 4.726027397260274, |
|
"grad_norm": 0.4030825197696686, |
|
"learning_rate": 8.6674008130122e-05, |
|
"loss": 0.0268, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.794520547945205, |
|
"grad_norm": 0.33811622858047485, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.0274, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.863013698630137, |
|
"grad_norm": 0.27626654505729675, |
|
"learning_rate": 8.576217467724128e-05, |
|
"loss": 0.0267, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.931506849315069, |
|
"grad_norm": 0.3249574899673462, |
|
"learning_rate": 8.529683176986295e-05, |
|
"loss": 0.0297, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.6223666071891785, |
|
"learning_rate": 8.482531293895412e-05, |
|
"loss": 0.0289, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.068493150684931, |
|
"grad_norm": 0.4200395941734314, |
|
"learning_rate": 8.434770068665723e-05, |
|
"loss": 0.0293, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.136986301369863, |
|
"grad_norm": 0.33773040771484375, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.0243, |
|
"step": 750 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 18, |
|
"save_steps": 125, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0243918948403232e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|