dmWM-mistralai-Ministral-8B-Instruct-2410-WOHealth-Al4-NH-WO-d4-a0.2-v4
/
checkpoint-1000
/trainer_state.json
{ | |
"best_global_step": null, | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 0.4, | |
"eval_steps": 500, | |
"global_step": 1000, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.004, | |
"grad_norm": 172.0, | |
"learning_rate": 7.2e-07, | |
"loss": 15.7507, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.008, | |
"grad_norm": 159.0, | |
"learning_rate": 1.52e-06, | |
"loss": 15.2909, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.012, | |
"grad_norm": 101.5, | |
"learning_rate": 2.3200000000000002e-06, | |
"loss": 16.5977, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.016, | |
"grad_norm": 67.0, | |
"learning_rate": 3.12e-06, | |
"loss": 14.8406, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 75.0, | |
"learning_rate": 3.920000000000001e-06, | |
"loss": 14.4386, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.024, | |
"grad_norm": 148.0, | |
"learning_rate": 4.7200000000000005e-06, | |
"loss": 15.411, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.028, | |
"grad_norm": 151.0, | |
"learning_rate": 5.5200000000000005e-06, | |
"loss": 14.8833, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.032, | |
"grad_norm": 115.5, | |
"learning_rate": 6.3200000000000005e-06, | |
"loss": 15.34, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.036, | |
"grad_norm": 96.5, | |
"learning_rate": 7.1200000000000004e-06, | |
"loss": 13.5816, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 160.0, | |
"learning_rate": 7.92e-06, | |
"loss": 14.3131, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.044, | |
"grad_norm": 146.0, | |
"learning_rate": 8.720000000000001e-06, | |
"loss": 14.2578, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.048, | |
"grad_norm": 194.0, | |
"learning_rate": 9.52e-06, | |
"loss": 14.2652, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.052, | |
"grad_norm": 117.0, | |
"learning_rate": 1.0320000000000001e-05, | |
"loss": 13.4558, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.056, | |
"grad_norm": 247.0, | |
"learning_rate": 1.1120000000000002e-05, | |
"loss": 15.4046, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 186.0, | |
"learning_rate": 1.1920000000000001e-05, | |
"loss": 15.9784, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.064, | |
"grad_norm": 112.0, | |
"learning_rate": 1.2720000000000002e-05, | |
"loss": 16.3667, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.068, | |
"grad_norm": 179.0, | |
"learning_rate": 1.3520000000000003e-05, | |
"loss": 15.2704, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.072, | |
"grad_norm": 173.0, | |
"learning_rate": 1.432e-05, | |
"loss": 15.4664, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.076, | |
"grad_norm": 211.0, | |
"learning_rate": 1.5120000000000001e-05, | |
"loss": 17.3243, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 223.0, | |
"learning_rate": 1.5920000000000003e-05, | |
"loss": 18.1649, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.084, | |
"grad_norm": 116.0, | |
"learning_rate": 1.672e-05, | |
"loss": 16.1036, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.088, | |
"grad_norm": 173.0, | |
"learning_rate": 1.752e-05, | |
"loss": 18.9891, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.092, | |
"grad_norm": 196.0, | |
"learning_rate": 1.832e-05, | |
"loss": 17.4949, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.096, | |
"grad_norm": 204.0, | |
"learning_rate": 1.912e-05, | |
"loss": 18.0245, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 264.0, | |
"learning_rate": 1.9920000000000002e-05, | |
"loss": 18.3813, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.104, | |
"grad_norm": 274.0, | |
"learning_rate": 1.9999210442038164e-05, | |
"loss": 20.4446, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.108, | |
"grad_norm": 224.0, | |
"learning_rate": 1.9996481265944146e-05, | |
"loss": 19.6429, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.112, | |
"grad_norm": 153.0, | |
"learning_rate": 1.9991803256020393e-05, | |
"loss": 17.5789, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.116, | |
"grad_norm": 187.0, | |
"learning_rate": 1.99851773242542e-05, | |
"loss": 17.5557, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 117.5, | |
"learning_rate": 1.99766047623841e-05, | |
"loss": 18.0131, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.124, | |
"grad_norm": 203.0, | |
"learning_rate": 1.996608724164801e-05, | |
"loss": 18.125, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.128, | |
"grad_norm": 334.0, | |
"learning_rate": 1.995362681245744e-05, | |
"loss": 18.0407, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.132, | |
"grad_norm": 143.0, | |
"learning_rate": 1.9939225903997748e-05, | |
"loss": 17.6067, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.136, | |
"grad_norm": 154.0, | |
"learning_rate": 1.992288732375458e-05, | |
"loss": 17.8174, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 179.0, | |
"learning_rate": 1.9904614256966514e-05, | |
"loss": 17.8999, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.144, | |
"grad_norm": 162.0, | |
"learning_rate": 1.9884410266004134e-05, | |
"loss": 16.7203, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.148, | |
"grad_norm": 208.0, | |
"learning_rate": 1.986227928967551e-05, | |
"loss": 16.5527, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.152, | |
"grad_norm": 152.0, | |
"learning_rate": 1.983822564245833e-05, | |
"loss": 16.7008, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.156, | |
"grad_norm": 180.0, | |
"learning_rate": 1.981225401365877e-05, | |
"loss": 16.7611, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 149.0, | |
"learning_rate": 1.9784369466497333e-05, | |
"loss": 17.153, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.164, | |
"grad_norm": 191.0, | |
"learning_rate": 1.9754577437121733e-05, | |
"loss": 16.9309, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.168, | |
"grad_norm": 106.5, | |
"learning_rate": 1.9722883733547128e-05, | |
"loss": 16.3782, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.172, | |
"grad_norm": 106.0, | |
"learning_rate": 1.968929453452383e-05, | |
"loss": 16.4967, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.176, | |
"grad_norm": 114.0, | |
"learning_rate": 1.965381638833274e-05, | |
"loss": 16.1854, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 129.0, | |
"learning_rate": 1.9616456211508756e-05, | |
"loss": 15.8639, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.184, | |
"grad_norm": 151.0, | |
"learning_rate": 1.9577221287492368e-05, | |
"loss": 16.1214, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.188, | |
"grad_norm": 127.5, | |
"learning_rate": 1.9536119265209763e-05, | |
"loss": 15.9259, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.192, | |
"grad_norm": 135.0, | |
"learning_rate": 1.9493158157581617e-05, | |
"loss": 16.1412, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.196, | |
"grad_norm": 161.0, | |
"learning_rate": 1.9448346339960984e-05, | |
"loss": 16.5188, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 127.0, | |
"learning_rate": 1.9401692548500504e-05, | |
"loss": 16.8208, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.204, | |
"grad_norm": 160.0, | |
"learning_rate": 1.935320587844926e-05, | |
"loss": 16.2693, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.208, | |
"grad_norm": 174.0, | |
"learning_rate": 1.9302895782379648e-05, | |
"loss": 17.073, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.212, | |
"grad_norm": 151.0, | |
"learning_rate": 1.925077206834458e-05, | |
"loss": 16.9992, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.216, | |
"grad_norm": 153.0, | |
"learning_rate": 1.9196844897965393e-05, | |
"loss": 16.4262, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 127.5, | |
"learning_rate": 1.914112478445079e-05, | |
"loss": 16.4771, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.224, | |
"grad_norm": 139.0, | |
"learning_rate": 1.9083622590547313e-05, | |
"loss": 16.1604, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.228, | |
"grad_norm": 330.0, | |
"learning_rate": 1.9024349526421596e-05, | |
"loss": 15.6431, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.232, | |
"grad_norm": 143.0, | |
"learning_rate": 1.896331714747493e-05, | |
"loss": 15.7878, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.236, | |
"grad_norm": 136.0, | |
"learning_rate": 1.8900537352090523e-05, | |
"loss": 16.6053, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 105.5, | |
"learning_rate": 1.8836022379313884e-05, | |
"loss": 16.7074, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.244, | |
"grad_norm": 133.0, | |
"learning_rate": 1.8769784806466768e-05, | |
"loss": 15.4223, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.248, | |
"grad_norm": 158.0, | |
"learning_rate": 1.870183754669526e-05, | |
"loss": 14.7611, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.252, | |
"grad_norm": 114.5, | |
"learning_rate": 1.863219384645227e-05, | |
"loss": 15.5539, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.256, | |
"grad_norm": 286.0, | |
"learning_rate": 1.8560867282915164e-05, | |
"loss": 15.8155, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 180.0, | |
"learning_rate": 1.848787176133882e-05, | |
"loss": 15.113, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.264, | |
"grad_norm": 126.0, | |
"learning_rate": 1.8413221512344805e-05, | |
"loss": 15.6207, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.268, | |
"grad_norm": 145.0, | |
"learning_rate": 1.8336931089147076e-05, | |
"loss": 15.9571, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.272, | |
"grad_norm": 103.0, | |
"learning_rate": 1.8259015364714786e-05, | |
"loss": 15.5162, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.276, | |
"grad_norm": 143.0, | |
"learning_rate": 1.8179489528872808e-05, | |
"loss": 15.087, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 94.0, | |
"learning_rate": 1.80983690853404e-05, | |
"loss": 15.12, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.284, | |
"grad_norm": 176.0, | |
"learning_rate": 1.8015669848708768e-05, | |
"loss": 14.7516, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.288, | |
"grad_norm": 114.5, | |
"learning_rate": 1.793140794135795e-05, | |
"loss": 14.9079, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.292, | |
"grad_norm": 115.0, | |
"learning_rate": 1.7845599790313735e-05, | |
"loss": 16.3664, | |
"step": 730 | |
}, | |
{ | |
"epoch": 0.296, | |
"grad_norm": 104.0, | |
"learning_rate": 1.7758262124045195e-05, | |
"loss": 15.3708, | |
"step": 740 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 115.0, | |
"learning_rate": 1.7669411969203417e-05, | |
"loss": 15.4655, | |
"step": 750 | |
}, | |
{ | |
"epoch": 0.304, | |
"grad_norm": 107.5, | |
"learning_rate": 1.7579066647302134e-05, | |
"loss": 15.8288, | |
"step": 760 | |
}, | |
{ | |
"epoch": 0.308, | |
"grad_norm": 234.0, | |
"learning_rate": 1.7487243771340862e-05, | |
"loss": 14.6044, | |
"step": 770 | |
}, | |
{ | |
"epoch": 0.312, | |
"grad_norm": 156.0, | |
"learning_rate": 1.7393961242371203e-05, | |
"loss": 16.0751, | |
"step": 780 | |
}, | |
{ | |
"epoch": 0.316, | |
"grad_norm": 90.0, | |
"learning_rate": 1.7299237246007018e-05, | |
"loss": 14.7736, | |
"step": 790 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 134.0, | |
"learning_rate": 1.720309024887907e-05, | |
"loss": 15.4201, | |
"step": 800 | |
}, | |
{ | |
"epoch": 0.324, | |
"grad_norm": 106.0, | |
"learning_rate": 1.710553899503496e-05, | |
"loss": 15.3491, | |
"step": 810 | |
}, | |
{ | |
"epoch": 0.328, | |
"grad_norm": 92.5, | |
"learning_rate": 1.700660250228492e-05, | |
"loss": 14.3211, | |
"step": 820 | |
}, | |
{ | |
"epoch": 0.332, | |
"grad_norm": 121.0, | |
"learning_rate": 1.690630005849423e-05, | |
"loss": 15.5088, | |
"step": 830 | |
}, | |
{ | |
"epoch": 0.336, | |
"grad_norm": 360.0, | |
"learning_rate": 1.6804651217823055e-05, | |
"loss": 14.8252, | |
"step": 840 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 125.5, | |
"learning_rate": 1.6701675796914284e-05, | |
"loss": 14.4015, | |
"step": 850 | |
}, | |
{ | |
"epoch": 0.344, | |
"grad_norm": 103.5, | |
"learning_rate": 1.6597393871030264e-05, | |
"loss": 14.2896, | |
"step": 860 | |
}, | |
{ | |
"epoch": 0.348, | |
"grad_norm": 119.0, | |
"learning_rate": 1.649182577013906e-05, | |
"loss": 14.2633, | |
"step": 870 | |
}, | |
{ | |
"epoch": 0.352, | |
"grad_norm": 128.0, | |
"learning_rate": 1.6384992074951124e-05, | |
"loss": 14.7707, | |
"step": 880 | |
}, | |
{ | |
"epoch": 0.356, | |
"grad_norm": 151.0, | |
"learning_rate": 1.6276913612907005e-05, | |
"loss": 14.7049, | |
"step": 890 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 127.0, | |
"learning_rate": 1.6167611454117027e-05, | |
"loss": 14.4943, | |
"step": 900 | |
}, | |
{ | |
"epoch": 0.364, | |
"grad_norm": 109.0, | |
"learning_rate": 1.6057106907253617e-05, | |
"loss": 16.8878, | |
"step": 910 | |
}, | |
{ | |
"epoch": 0.368, | |
"grad_norm": 121.5, | |
"learning_rate": 1.5945421515397135e-05, | |
"loss": 13.8002, | |
"step": 920 | |
}, | |
{ | |
"epoch": 0.372, | |
"grad_norm": 96.5, | |
"learning_rate": 1.5832577051836016e-05, | |
"loss": 13.8812, | |
"step": 930 | |
}, | |
{ | |
"epoch": 0.376, | |
"grad_norm": 88.5, | |
"learning_rate": 1.5718595515822027e-05, | |
"loss": 14.1607, | |
"step": 940 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 179.0, | |
"learning_rate": 1.5603499128281447e-05, | |
"loss": 14.1383, | |
"step": 950 | |
}, | |
{ | |
"epoch": 0.384, | |
"grad_norm": 96.5, | |
"learning_rate": 1.5487310327483087e-05, | |
"loss": 13.425, | |
"step": 960 | |
}, | |
{ | |
"epoch": 0.388, | |
"grad_norm": 89.0, | |
"learning_rate": 1.5370051764663872e-05, | |
"loss": 13.8526, | |
"step": 970 | |
}, | |
{ | |
"epoch": 0.392, | |
"grad_norm": 94.5, | |
"learning_rate": 1.5251746299612959e-05, | |
"loss": 13.1624, | |
"step": 980 | |
}, | |
{ | |
"epoch": 0.396, | |
"grad_norm": 122.5, | |
"learning_rate": 1.5132416996215171e-05, | |
"loss": 12.7877, | |
"step": 990 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 104.0, | |
"learning_rate": 1.5012087117954643e-05, | |
"loss": 13.0672, | |
"step": 1000 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 2500, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 9223372036854775807, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": false | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 7.356026995081216e+17, | |
"train_batch_size": 1, | |
"trial_name": null, | |
"trial_params": null | |
} | |