dmWM-mistralai-Ministral-8B-Instruct-2410-LucieFr-Al4-OWT-d4-a0.1-v2
/
checkpoint-2000
/trainer_state.json
{ | |
"best_global_step": null, | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 0.8, | |
"eval_steps": 500, | |
"global_step": 2000, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.004, | |
"grad_norm": 192.0, | |
"learning_rate": 7.2e-07, | |
"loss": 17.8929, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.008, | |
"grad_norm": 116.5, | |
"learning_rate": 1.52e-06, | |
"loss": 17.1894, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.012, | |
"grad_norm": 93.5, | |
"learning_rate": 2.3200000000000002e-06, | |
"loss": 17.1662, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.016, | |
"grad_norm": 59.0, | |
"learning_rate": 3.12e-06, | |
"loss": 15.5562, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.02, | |
"grad_norm": 93.5, | |
"learning_rate": 3.920000000000001e-06, | |
"loss": 14.6218, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.024, | |
"grad_norm": 122.5, | |
"learning_rate": 4.7200000000000005e-06, | |
"loss": 14.0262, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.028, | |
"grad_norm": 135.0, | |
"learning_rate": 5.5200000000000005e-06, | |
"loss": 14.516, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.032, | |
"grad_norm": 211.0, | |
"learning_rate": 6.3200000000000005e-06, | |
"loss": 13.598, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.036, | |
"grad_norm": 172.0, | |
"learning_rate": 7.1200000000000004e-06, | |
"loss": 14.4849, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.04, | |
"grad_norm": 183.0, | |
"learning_rate": 7.92e-06, | |
"loss": 13.3844, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.044, | |
"grad_norm": 144.0, | |
"learning_rate": 8.720000000000001e-06, | |
"loss": 13.5488, | |
"step": 110 | |
}, | |
{ | |
"epoch": 0.048, | |
"grad_norm": 153.0, | |
"learning_rate": 9.52e-06, | |
"loss": 14.8995, | |
"step": 120 | |
}, | |
{ | |
"epoch": 0.052, | |
"grad_norm": 156.0, | |
"learning_rate": 1.0320000000000001e-05, | |
"loss": 14.0248, | |
"step": 130 | |
}, | |
{ | |
"epoch": 0.056, | |
"grad_norm": 276.0, | |
"learning_rate": 1.1120000000000002e-05, | |
"loss": 14.2637, | |
"step": 140 | |
}, | |
{ | |
"epoch": 0.06, | |
"grad_norm": 176.0, | |
"learning_rate": 1.1920000000000001e-05, | |
"loss": 15.7517, | |
"step": 150 | |
}, | |
{ | |
"epoch": 0.064, | |
"grad_norm": 178.0, | |
"learning_rate": 1.2720000000000002e-05, | |
"loss": 15.2519, | |
"step": 160 | |
}, | |
{ | |
"epoch": 0.068, | |
"grad_norm": 5408.0, | |
"learning_rate": 1.3520000000000003e-05, | |
"loss": 51.245, | |
"step": 170 | |
}, | |
{ | |
"epoch": 0.072, | |
"grad_norm": 576.0, | |
"learning_rate": 1.432e-05, | |
"loss": 23.1949, | |
"step": 180 | |
}, | |
{ | |
"epoch": 0.076, | |
"grad_norm": 402.0, | |
"learning_rate": 1.5120000000000001e-05, | |
"loss": 18.3584, | |
"step": 190 | |
}, | |
{ | |
"epoch": 0.08, | |
"grad_norm": 178.0, | |
"learning_rate": 1.5920000000000003e-05, | |
"loss": 17.5383, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.084, | |
"grad_norm": 2064.0, | |
"learning_rate": 1.672e-05, | |
"loss": 22.9987, | |
"step": 210 | |
}, | |
{ | |
"epoch": 0.088, | |
"grad_norm": 190.0, | |
"learning_rate": 1.752e-05, | |
"loss": 19.4033, | |
"step": 220 | |
}, | |
{ | |
"epoch": 0.092, | |
"grad_norm": 150.0, | |
"learning_rate": 1.832e-05, | |
"loss": 15.7816, | |
"step": 230 | |
}, | |
{ | |
"epoch": 0.096, | |
"grad_norm": 182.0, | |
"learning_rate": 1.912e-05, | |
"loss": 20.0571, | |
"step": 240 | |
}, | |
{ | |
"epoch": 0.1, | |
"grad_norm": 302.0, | |
"learning_rate": 1.9920000000000002e-05, | |
"loss": 19.3505, | |
"step": 250 | |
}, | |
{ | |
"epoch": 0.104, | |
"grad_norm": 246.0, | |
"learning_rate": 1.9999210442038164e-05, | |
"loss": 19.3353, | |
"step": 260 | |
}, | |
{ | |
"epoch": 0.108, | |
"grad_norm": 139.0, | |
"learning_rate": 1.9996481265944146e-05, | |
"loss": 17.5568, | |
"step": 270 | |
}, | |
{ | |
"epoch": 0.112, | |
"grad_norm": 314.0, | |
"learning_rate": 1.9991803256020393e-05, | |
"loss": 24.114, | |
"step": 280 | |
}, | |
{ | |
"epoch": 0.116, | |
"grad_norm": 168.0, | |
"learning_rate": 1.99851773242542e-05, | |
"loss": 24.3222, | |
"step": 290 | |
}, | |
{ | |
"epoch": 0.12, | |
"grad_norm": 157.0, | |
"learning_rate": 1.99766047623841e-05, | |
"loss": 17.1357, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.124, | |
"grad_norm": 243.0, | |
"learning_rate": 1.996608724164801e-05, | |
"loss": 17.5944, | |
"step": 310 | |
}, | |
{ | |
"epoch": 0.128, | |
"grad_norm": 168.0, | |
"learning_rate": 1.995362681245744e-05, | |
"loss": 17.9973, | |
"step": 320 | |
}, | |
{ | |
"epoch": 0.132, | |
"grad_norm": 129.0, | |
"learning_rate": 1.9939225903997748e-05, | |
"loss": 16.3556, | |
"step": 330 | |
}, | |
{ | |
"epoch": 0.136, | |
"grad_norm": 109.0, | |
"learning_rate": 1.992288732375458e-05, | |
"loss": 15.275, | |
"step": 340 | |
}, | |
{ | |
"epoch": 0.14, | |
"grad_norm": 148.0, | |
"learning_rate": 1.9904614256966514e-05, | |
"loss": 16.484, | |
"step": 350 | |
}, | |
{ | |
"epoch": 0.144, | |
"grad_norm": 139.0, | |
"learning_rate": 1.9884410266004134e-05, | |
"loss": 17.0713, | |
"step": 360 | |
}, | |
{ | |
"epoch": 0.148, | |
"grad_norm": 252.0, | |
"learning_rate": 1.986227928967551e-05, | |
"loss": 17.0777, | |
"step": 370 | |
}, | |
{ | |
"epoch": 0.152, | |
"grad_norm": 160.0, | |
"learning_rate": 1.983822564245833e-05, | |
"loss": 16.9359, | |
"step": 380 | |
}, | |
{ | |
"epoch": 0.156, | |
"grad_norm": 128.0, | |
"learning_rate": 1.981225401365877e-05, | |
"loss": 33.823, | |
"step": 390 | |
}, | |
{ | |
"epoch": 0.16, | |
"grad_norm": 142.0, | |
"learning_rate": 1.9784369466497333e-05, | |
"loss": 17.8366, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.164, | |
"grad_norm": 604.0, | |
"learning_rate": 1.9754577437121733e-05, | |
"loss": 17.0926, | |
"step": 410 | |
}, | |
{ | |
"epoch": 0.168, | |
"grad_norm": 138.0, | |
"learning_rate": 1.9722883733547128e-05, | |
"loss": 25.0166, | |
"step": 420 | |
}, | |
{ | |
"epoch": 0.172, | |
"grad_norm": 140.0, | |
"learning_rate": 1.968929453452383e-05, | |
"loss": 16.1607, | |
"step": 430 | |
}, | |
{ | |
"epoch": 0.176, | |
"grad_norm": 100.0, | |
"learning_rate": 1.965381638833274e-05, | |
"loss": 15.8693, | |
"step": 440 | |
}, | |
{ | |
"epoch": 0.18, | |
"grad_norm": 127.5, | |
"learning_rate": 1.9616456211508756e-05, | |
"loss": 16.6326, | |
"step": 450 | |
}, | |
{ | |
"epoch": 0.184, | |
"grad_norm": 136.0, | |
"learning_rate": 1.9577221287492368e-05, | |
"loss": 16.4813, | |
"step": 460 | |
}, | |
{ | |
"epoch": 0.188, | |
"grad_norm": 175.0, | |
"learning_rate": 1.9536119265209763e-05, | |
"loss": 16.9464, | |
"step": 470 | |
}, | |
{ | |
"epoch": 0.192, | |
"grad_norm": 128.0, | |
"learning_rate": 1.9493158157581617e-05, | |
"loss": 16.8985, | |
"step": 480 | |
}, | |
{ | |
"epoch": 0.196, | |
"grad_norm": 141.0, | |
"learning_rate": 1.9448346339960984e-05, | |
"loss": 16.1715, | |
"step": 490 | |
}, | |
{ | |
"epoch": 0.2, | |
"grad_norm": 112.0, | |
"learning_rate": 1.9401692548500504e-05, | |
"loss": 15.8461, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.204, | |
"grad_norm": 145.0, | |
"learning_rate": 1.935320587844926e-05, | |
"loss": 16.108, | |
"step": 510 | |
}, | |
{ | |
"epoch": 0.208, | |
"grad_norm": 136.0, | |
"learning_rate": 1.9302895782379648e-05, | |
"loss": 15.7665, | |
"step": 520 | |
}, | |
{ | |
"epoch": 0.212, | |
"grad_norm": 112.0, | |
"learning_rate": 1.925077206834458e-05, | |
"loss": 14.6068, | |
"step": 530 | |
}, | |
{ | |
"epoch": 0.216, | |
"grad_norm": 93.0, | |
"learning_rate": 1.9196844897965393e-05, | |
"loss": 15.5989, | |
"step": 540 | |
}, | |
{ | |
"epoch": 0.22, | |
"grad_norm": 121.0, | |
"learning_rate": 1.914112478445079e-05, | |
"loss": 15.9623, | |
"step": 550 | |
}, | |
{ | |
"epoch": 0.224, | |
"grad_norm": 95.0, | |
"learning_rate": 1.9083622590547313e-05, | |
"loss": 15.6861, | |
"step": 560 | |
}, | |
{ | |
"epoch": 0.228, | |
"grad_norm": 114.5, | |
"learning_rate": 1.9024349526421596e-05, | |
"loss": 15.4145, | |
"step": 570 | |
}, | |
{ | |
"epoch": 0.232, | |
"grad_norm": 114.5, | |
"learning_rate": 1.896331714747493e-05, | |
"loss": 14.4382, | |
"step": 580 | |
}, | |
{ | |
"epoch": 0.236, | |
"grad_norm": 111.0, | |
"learning_rate": 1.8900537352090523e-05, | |
"loss": 14.8372, | |
"step": 590 | |
}, | |
{ | |
"epoch": 0.24, | |
"grad_norm": 129.0, | |
"learning_rate": 1.8836022379313884e-05, | |
"loss": 16.0596, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.244, | |
"grad_norm": 97.0, | |
"learning_rate": 1.8769784806466768e-05, | |
"loss": 14.8813, | |
"step": 610 | |
}, | |
{ | |
"epoch": 0.248, | |
"grad_norm": 97.0, | |
"learning_rate": 1.870183754669526e-05, | |
"loss": 13.9936, | |
"step": 620 | |
}, | |
{ | |
"epoch": 0.252, | |
"grad_norm": 111.5, | |
"learning_rate": 1.863219384645227e-05, | |
"loss": 14.9816, | |
"step": 630 | |
}, | |
{ | |
"epoch": 0.256, | |
"grad_norm": 108.5, | |
"learning_rate": 1.8560867282915164e-05, | |
"loss": 14.2031, | |
"step": 640 | |
}, | |
{ | |
"epoch": 0.26, | |
"grad_norm": 104.0, | |
"learning_rate": 1.848787176133882e-05, | |
"loss": 14.7739, | |
"step": 650 | |
}, | |
{ | |
"epoch": 0.264, | |
"grad_norm": 89.0, | |
"learning_rate": 1.8413221512344805e-05, | |
"loss": 14.7629, | |
"step": 660 | |
}, | |
{ | |
"epoch": 0.268, | |
"grad_norm": 113.5, | |
"learning_rate": 1.8336931089147076e-05, | |
"loss": 14.6109, | |
"step": 670 | |
}, | |
{ | |
"epoch": 0.272, | |
"grad_norm": 97.5, | |
"learning_rate": 1.8259015364714786e-05, | |
"loss": 15.1143, | |
"step": 680 | |
}, | |
{ | |
"epoch": 0.276, | |
"grad_norm": 108.0, | |
"learning_rate": 1.8179489528872808e-05, | |
"loss": 14.3949, | |
"step": 690 | |
}, | |
{ | |
"epoch": 0.28, | |
"grad_norm": 111.5, | |
"learning_rate": 1.80983690853404e-05, | |
"loss": 14.4273, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.284, | |
"grad_norm": 112.5, | |
"learning_rate": 1.8015669848708768e-05, | |
"loss": 13.8492, | |
"step": 710 | |
}, | |
{ | |
"epoch": 0.288, | |
"grad_norm": 119.5, | |
"learning_rate": 1.793140794135795e-05, | |
"loss": 14.5132, | |
"step": 720 | |
}, | |
{ | |
"epoch": 0.292, | |
"grad_norm": 100.0, | |
"learning_rate": 1.7845599790313735e-05, | |
"loss": 13.9506, | |
"step": 730 | |
}, | |
{ | |
"epoch": 0.296, | |
"grad_norm": 133.0, | |
"learning_rate": 1.7758262124045195e-05, | |
"loss": 13.6922, | |
"step": 740 | |
}, | |
{ | |
"epoch": 0.3, | |
"grad_norm": 100.5, | |
"learning_rate": 1.7669411969203417e-05, | |
"loss": 14.3881, | |
"step": 750 | |
}, | |
{ | |
"epoch": 0.304, | |
"grad_norm": 128.0, | |
"learning_rate": 1.7579066647302134e-05, | |
"loss": 14.2717, | |
"step": 760 | |
}, | |
{ | |
"epoch": 0.308, | |
"grad_norm": 98.0, | |
"learning_rate": 1.7487243771340862e-05, | |
"loss": 13.3169, | |
"step": 770 | |
}, | |
{ | |
"epoch": 0.312, | |
"grad_norm": 95.0, | |
"learning_rate": 1.7393961242371203e-05, | |
"loss": 13.9998, | |
"step": 780 | |
}, | |
{ | |
"epoch": 0.316, | |
"grad_norm": 118.5, | |
"learning_rate": 1.7299237246007018e-05, | |
"loss": 13.8477, | |
"step": 790 | |
}, | |
{ | |
"epoch": 0.32, | |
"grad_norm": 83.0, | |
"learning_rate": 1.720309024887907e-05, | |
"loss": 13.4694, | |
"step": 800 | |
}, | |
{ | |
"epoch": 0.324, | |
"grad_norm": 119.0, | |
"learning_rate": 1.710553899503496e-05, | |
"loss": 14.0974, | |
"step": 810 | |
}, | |
{ | |
"epoch": 0.328, | |
"grad_norm": 93.0, | |
"learning_rate": 1.700660250228492e-05, | |
"loss": 13.7642, | |
"step": 820 | |
}, | |
{ | |
"epoch": 0.332, | |
"grad_norm": 113.0, | |
"learning_rate": 1.690630005849423e-05, | |
"loss": 13.5297, | |
"step": 830 | |
}, | |
{ | |
"epoch": 0.336, | |
"grad_norm": 98.0, | |
"learning_rate": 1.6804651217823055e-05, | |
"loss": 12.6107, | |
"step": 840 | |
}, | |
{ | |
"epoch": 0.34, | |
"grad_norm": 90.5, | |
"learning_rate": 1.6701675796914284e-05, | |
"loss": 13.7018, | |
"step": 850 | |
}, | |
{ | |
"epoch": 0.344, | |
"grad_norm": 84.5, | |
"learning_rate": 1.6597393871030264e-05, | |
"loss": 13.5795, | |
"step": 860 | |
}, | |
{ | |
"epoch": 0.348, | |
"grad_norm": 88.0, | |
"learning_rate": 1.649182577013906e-05, | |
"loss": 13.3089, | |
"step": 870 | |
}, | |
{ | |
"epoch": 0.352, | |
"grad_norm": 108.5, | |
"learning_rate": 1.6384992074951124e-05, | |
"loss": 14.8595, | |
"step": 880 | |
}, | |
{ | |
"epoch": 0.356, | |
"grad_norm": 109.5, | |
"learning_rate": 1.6276913612907005e-05, | |
"loss": 13.6668, | |
"step": 890 | |
}, | |
{ | |
"epoch": 0.36, | |
"grad_norm": 82.0, | |
"learning_rate": 1.6167611454117027e-05, | |
"loss": 14.3564, | |
"step": 900 | |
}, | |
{ | |
"epoch": 0.364, | |
"grad_norm": 88.5, | |
"learning_rate": 1.6057106907253617e-05, | |
"loss": 13.4427, | |
"step": 910 | |
}, | |
{ | |
"epoch": 0.368, | |
"grad_norm": 106.5, | |
"learning_rate": 1.5945421515397135e-05, | |
"loss": 12.7742, | |
"step": 920 | |
}, | |
{ | |
"epoch": 0.372, | |
"grad_norm": 98.0, | |
"learning_rate": 1.5832577051836016e-05, | |
"loss": 12.5311, | |
"step": 930 | |
}, | |
{ | |
"epoch": 0.376, | |
"grad_norm": 105.5, | |
"learning_rate": 1.5718595515822027e-05, | |
"loss": 13.2552, | |
"step": 940 | |
}, | |
{ | |
"epoch": 0.38, | |
"grad_norm": 104.0, | |
"learning_rate": 1.5603499128281447e-05, | |
"loss": 12.7764, | |
"step": 950 | |
}, | |
{ | |
"epoch": 0.384, | |
"grad_norm": 88.0, | |
"learning_rate": 1.5487310327483087e-05, | |
"loss": 12.5753, | |
"step": 960 | |
}, | |
{ | |
"epoch": 0.388, | |
"grad_norm": 88.0, | |
"learning_rate": 1.5370051764663872e-05, | |
"loss": 11.839, | |
"step": 970 | |
}, | |
{ | |
"epoch": 0.392, | |
"grad_norm": 79.0, | |
"learning_rate": 1.5251746299612959e-05, | |
"loss": 12.3413, | |
"step": 980 | |
}, | |
{ | |
"epoch": 0.396, | |
"grad_norm": 88.5, | |
"learning_rate": 1.5132416996215171e-05, | |
"loss": 11.7489, | |
"step": 990 | |
}, | |
{ | |
"epoch": 0.4, | |
"grad_norm": 79.0, | |
"learning_rate": 1.5012087117954643e-05, | |
"loss": 12.265, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 0.404, | |
"grad_norm": 84.5, | |
"learning_rate": 1.4890780123379565e-05, | |
"loss": 12.1652, | |
"step": 1010 | |
}, | |
{ | |
"epoch": 0.408, | |
"grad_norm": 83.0, | |
"learning_rate": 1.4768519661528879e-05, | |
"loss": 11.4174, | |
"step": 1020 | |
}, | |
{ | |
"epoch": 0.412, | |
"grad_norm": 94.5, | |
"learning_rate": 1.464532956732188e-05, | |
"loss": 12.5315, | |
"step": 1030 | |
}, | |
{ | |
"epoch": 0.416, | |
"grad_norm": 87.0, | |
"learning_rate": 1.4521233856911507e-05, | |
"loss": 12.0051, | |
"step": 1040 | |
}, | |
{ | |
"epoch": 0.42, | |
"grad_norm": 85.5, | |
"learning_rate": 1.43962567230024e-05, | |
"loss": 12.177, | |
"step": 1050 | |
}, | |
{ | |
"epoch": 0.424, | |
"grad_norm": 91.0, | |
"learning_rate": 1.4270422530134433e-05, | |
"loss": 12.3925, | |
"step": 1060 | |
}, | |
{ | |
"epoch": 0.428, | |
"grad_norm": 96.5, | |
"learning_rate": 1.4143755809932843e-05, | |
"loss": 12.4198, | |
"step": 1070 | |
}, | |
{ | |
"epoch": 0.432, | |
"grad_norm": 82.0, | |
"learning_rate": 1.4016281256325702e-05, | |
"loss": 12.5425, | |
"step": 1080 | |
}, | |
{ | |
"epoch": 0.436, | |
"grad_norm": 73.5, | |
"learning_rate": 1.388802372072981e-05, | |
"loss": 12.0934, | |
"step": 1090 | |
}, | |
{ | |
"epoch": 0.44, | |
"grad_norm": 73.0, | |
"learning_rate": 1.3759008207205869e-05, | |
"loss": 11.4355, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 0.444, | |
"grad_norm": 78.5, | |
"learning_rate": 1.3629259867583864e-05, | |
"loss": 11.8871, | |
"step": 1110 | |
}, | |
{ | |
"epoch": 0.448, | |
"grad_norm": 76.5, | |
"learning_rate": 1.349880399655969e-05, | |
"loss": 11.2964, | |
"step": 1120 | |
}, | |
{ | |
"epoch": 0.452, | |
"grad_norm": 86.0, | |
"learning_rate": 1.3367666026763884e-05, | |
"loss": 11.2128, | |
"step": 1130 | |
}, | |
{ | |
"epoch": 0.456, | |
"grad_norm": 78.5, | |
"learning_rate": 1.3235871523803496e-05, | |
"loss": 11.5252, | |
"step": 1140 | |
}, | |
{ | |
"epoch": 0.46, | |
"grad_norm": 76.5, | |
"learning_rate": 1.3103446181278015e-05, | |
"loss": 11.3193, | |
"step": 1150 | |
}, | |
{ | |
"epoch": 0.464, | |
"grad_norm": 78.0, | |
"learning_rate": 1.297041581577035e-05, | |
"loss": 11.2879, | |
"step": 1160 | |
}, | |
{ | |
"epoch": 0.468, | |
"grad_norm": 81.5, | |
"learning_rate": 1.2836806361813846e-05, | |
"loss": 11.1236, | |
"step": 1170 | |
}, | |
{ | |
"epoch": 0.472, | |
"grad_norm": 72.0, | |
"learning_rate": 1.270264386683628e-05, | |
"loss": 10.7719, | |
"step": 1180 | |
}, | |
{ | |
"epoch": 0.476, | |
"grad_norm": 73.0, | |
"learning_rate": 1.256795448608188e-05, | |
"loss": 10.945, | |
"step": 1190 | |
}, | |
{ | |
"epoch": 0.48, | |
"grad_norm": 72.5, | |
"learning_rate": 1.2432764477512294e-05, | |
"loss": 11.3054, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 0.484, | |
"grad_norm": 69.0, | |
"learning_rate": 1.2297100196687557e-05, | |
"loss": 11.0717, | |
"step": 1210 | |
}, | |
{ | |
"epoch": 0.488, | |
"grad_norm": 79.0, | |
"learning_rate": 1.2160988091628023e-05, | |
"loss": 11.1283, | |
"step": 1220 | |
}, | |
{ | |
"epoch": 0.492, | |
"grad_norm": 69.5, | |
"learning_rate": 1.202445469765826e-05, | |
"loss": 10.7246, | |
"step": 1230 | |
}, | |
{ | |
"epoch": 0.496, | |
"grad_norm": 64.5, | |
"learning_rate": 1.1887526632233954e-05, | |
"loss": 11.1319, | |
"step": 1240 | |
}, | |
{ | |
"epoch": 0.5, | |
"grad_norm": 74.0, | |
"learning_rate": 1.1750230589752763e-05, | |
"loss": 11.0599, | |
"step": 1250 | |
}, | |
{ | |
"epoch": 0.504, | |
"grad_norm": 90.5, | |
"learning_rate": 1.1612593336350209e-05, | |
"loss": 10.9624, | |
"step": 1260 | |
}, | |
{ | |
"epoch": 0.508, | |
"grad_norm": 74.5, | |
"learning_rate": 1.1474641704681551e-05, | |
"loss": 11.6825, | |
"step": 1270 | |
}, | |
{ | |
"epoch": 0.512, | |
"grad_norm": 83.5, | |
"learning_rate": 1.1336402588690727e-05, | |
"loss": 10.4348, | |
"step": 1280 | |
}, | |
{ | |
"epoch": 0.516, | |
"grad_norm": 60.25, | |
"learning_rate": 1.1197902938367297e-05, | |
"loss": 10.192, | |
"step": 1290 | |
}, | |
{ | |
"epoch": 0.52, | |
"grad_norm": 70.5, | |
"learning_rate": 1.105916975449252e-05, | |
"loss": 10.9921, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 0.524, | |
"grad_norm": 68.5, | |
"learning_rate": 1.0920230083375474e-05, | |
"loss": 10.9344, | |
"step": 1310 | |
}, | |
{ | |
"epoch": 0.528, | |
"grad_norm": 94.5, | |
"learning_rate": 1.0781111011580336e-05, | |
"loss": 10.7657, | |
"step": 1320 | |
}, | |
{ | |
"epoch": 0.532, | |
"grad_norm": 66.0, | |
"learning_rate": 1.0641839660645806e-05, | |
"loss": 10.2728, | |
"step": 1330 | |
}, | |
{ | |
"epoch": 0.536, | |
"grad_norm": 86.0, | |
"learning_rate": 1.0502443181797696e-05, | |
"loss": 10.3206, | |
"step": 1340 | |
}, | |
{ | |
"epoch": 0.54, | |
"grad_norm": 71.0, | |
"learning_rate": 1.036294875065576e-05, | |
"loss": 10.4514, | |
"step": 1350 | |
}, | |
{ | |
"epoch": 0.544, | |
"grad_norm": 63.0, | |
"learning_rate": 1.0223383561935738e-05, | |
"loss": 10.148, | |
"step": 1360 | |
}, | |
{ | |
"epoch": 0.548, | |
"grad_norm": 69.0, | |
"learning_rate": 1.0083774824147707e-05, | |
"loss": 10.213, | |
"step": 1370 | |
}, | |
{ | |
"epoch": 0.552, | |
"grad_norm": 75.0, | |
"learning_rate": 9.944149754291719e-06, | |
"loss": 10.3198, | |
"step": 1380 | |
}, | |
{ | |
"epoch": 0.556, | |
"grad_norm": 75.0, | |
"learning_rate": 9.80453557255179e-06, | |
"loss": 9.7986, | |
"step": 1390 | |
}, | |
{ | |
"epoch": 0.56, | |
"grad_norm": 69.0, | |
"learning_rate": 9.664959496989286e-06, | |
"loss": 10.2986, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 0.564, | |
"grad_norm": 74.0, | |
"learning_rate": 9.525448738236691e-06, | |
"loss": 10.4321, | |
"step": 1410 | |
}, | |
{ | |
"epoch": 0.568, | |
"grad_norm": 74.5, | |
"learning_rate": 9.386030494192847e-06, | |
"loss": 9.978, | |
"step": 1420 | |
}, | |
{ | |
"epoch": 0.572, | |
"grad_norm": 70.0, | |
"learning_rate": 9.246731944720675e-06, | |
"loss": 9.8457, | |
"step": 1430 | |
}, | |
{ | |
"epoch": 0.576, | |
"grad_norm": 69.0, | |
"learning_rate": 9.107580246348395e-06, | |
"loss": 9.8384, | |
"step": 1440 | |
}, | |
{ | |
"epoch": 0.58, | |
"grad_norm": 71.5, | |
"learning_rate": 8.968602526975329e-06, | |
"loss": 9.4468, | |
"step": 1450 | |
}, | |
{ | |
"epoch": 0.584, | |
"grad_norm": 152.0, | |
"learning_rate": 8.829825880583228e-06, | |
"loss": 9.6898, | |
"step": 1460 | |
}, | |
{ | |
"epoch": 0.588, | |
"grad_norm": 59.0, | |
"learning_rate": 8.69127736195428e-06, | |
"loss": 9.3791, | |
"step": 1470 | |
}, | |
{ | |
"epoch": 0.592, | |
"grad_norm": 70.0, | |
"learning_rate": 8.552983981396709e-06, | |
"loss": 9.6053, | |
"step": 1480 | |
}, | |
{ | |
"epoch": 0.596, | |
"grad_norm": 63.0, | |
"learning_rate": 8.414972699479076e-06, | |
"loss": 9.5834, | |
"step": 1490 | |
}, | |
{ | |
"epoch": 0.6, | |
"grad_norm": 77.5, | |
"learning_rate": 8.277270421774234e-06, | |
"loss": 9.4602, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 0.604, | |
"grad_norm": 64.0, | |
"learning_rate": 8.139903993614069e-06, | |
"loss": 9.2274, | |
"step": 1510 | |
}, | |
{ | |
"epoch": 0.608, | |
"grad_norm": 60.0, | |
"learning_rate": 8.00290019485593e-06, | |
"loss": 9.2331, | |
"step": 1520 | |
}, | |
{ | |
"epoch": 0.612, | |
"grad_norm": 386.0, | |
"learning_rate": 7.866285734661842e-06, | |
"loss": 9.2324, | |
"step": 1530 | |
}, | |
{ | |
"epoch": 0.616, | |
"grad_norm": 60.0, | |
"learning_rate": 7.730087246291503e-06, | |
"loss": 9.2647, | |
"step": 1540 | |
}, | |
{ | |
"epoch": 0.62, | |
"grad_norm": 58.25, | |
"learning_rate": 7.594331281910082e-06, | |
"loss": 9.117, | |
"step": 1550 | |
}, | |
{ | |
"epoch": 0.624, | |
"grad_norm": 72.0, | |
"learning_rate": 7.4590443074118325e-06, | |
"loss": 8.8587, | |
"step": 1560 | |
}, | |
{ | |
"epoch": 0.628, | |
"grad_norm": 66.5, | |
"learning_rate": 7.324252697260475e-06, | |
"loss": 9.1659, | |
"step": 1570 | |
}, | |
{ | |
"epoch": 0.632, | |
"grad_norm": 65.5, | |
"learning_rate": 7.189982729347491e-06, | |
"loss": 8.8773, | |
"step": 1580 | |
}, | |
{ | |
"epoch": 0.636, | |
"grad_norm": 63.5, | |
"learning_rate": 7.056260579869165e-06, | |
"loss": 8.9672, | |
"step": 1590 | |
}, | |
{ | |
"epoch": 0.64, | |
"grad_norm": 62.75, | |
"learning_rate": 6.923112318223497e-06, | |
"loss": 9.1023, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 0.644, | |
"grad_norm": 63.25, | |
"learning_rate": 6.790563901927907e-06, | |
"loss": 9.4185, | |
"step": 1610 | |
}, | |
{ | |
"epoch": 0.648, | |
"grad_norm": 66.5, | |
"learning_rate": 6.658641171558785e-06, | |
"loss": 9.3091, | |
"step": 1620 | |
}, | |
{ | |
"epoch": 0.652, | |
"grad_norm": 62.5, | |
"learning_rate": 6.52736984571381e-06, | |
"loss": 8.9926, | |
"step": 1630 | |
}, | |
{ | |
"epoch": 0.656, | |
"grad_norm": 76.5, | |
"learning_rate": 6.396775515998055e-06, | |
"loss": 8.8942, | |
"step": 1640 | |
}, | |
{ | |
"epoch": 0.66, | |
"grad_norm": 85.0, | |
"learning_rate": 6.2668836420348535e-06, | |
"loss": 9.101, | |
"step": 1650 | |
}, | |
{ | |
"epoch": 0.664, | |
"grad_norm": 66.0, | |
"learning_rate": 6.137719546502401e-06, | |
"loss": 8.3126, | |
"step": 1660 | |
}, | |
{ | |
"epoch": 0.668, | |
"grad_norm": 58.25, | |
"learning_rate": 6.009308410197048e-06, | |
"loss": 8.6663, | |
"step": 1670 | |
}, | |
{ | |
"epoch": 0.672, | |
"grad_norm": 68.5, | |
"learning_rate": 5.881675267124254e-06, | |
"loss": 8.749, | |
"step": 1680 | |
}, | |
{ | |
"epoch": 0.676, | |
"grad_norm": 55.25, | |
"learning_rate": 5.754844999618144e-06, | |
"loss": 8.449, | |
"step": 1690 | |
}, | |
{ | |
"epoch": 0.68, | |
"grad_norm": 58.5, | |
"learning_rate": 5.628842333490674e-06, | |
"loss": 8.9409, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 0.684, | |
"grad_norm": 58.25, | |
"learning_rate": 5.50369183321126e-06, | |
"loss": 8.4775, | |
"step": 1710 | |
}, | |
{ | |
"epoch": 0.688, | |
"grad_norm": 89.5, | |
"learning_rate": 5.379417897117917e-06, | |
"loss": 8.355, | |
"step": 1720 | |
}, | |
{ | |
"epoch": 0.692, | |
"grad_norm": 57.75, | |
"learning_rate": 5.256044752660709e-06, | |
"loss": 8.4883, | |
"step": 1730 | |
}, | |
{ | |
"epoch": 0.696, | |
"grad_norm": 60.25, | |
"learning_rate": 5.133596451678603e-06, | |
"loss": 8.255, | |
"step": 1740 | |
}, | |
{ | |
"epoch": 0.7, | |
"grad_norm": 61.0, | |
"learning_rate": 5.012096865710494e-06, | |
"loss": 8.4214, | |
"step": 1750 | |
}, | |
{ | |
"epoch": 0.704, | |
"grad_norm": 69.0, | |
"learning_rate": 4.891569681341403e-06, | |
"loss": 8.3666, | |
"step": 1760 | |
}, | |
{ | |
"epoch": 0.708, | |
"grad_norm": 91.5, | |
"learning_rate": 4.772038395584735e-06, | |
"loss": 8.8221, | |
"step": 1770 | |
}, | |
{ | |
"epoch": 0.712, | |
"grad_norm": 60.5, | |
"learning_rate": 4.6535263113014885e-06, | |
"loss": 8.5474, | |
"step": 1780 | |
}, | |
{ | |
"epoch": 0.716, | |
"grad_norm": 69.5, | |
"learning_rate": 4.53605653265731e-06, | |
"loss": 8.0251, | |
"step": 1790 | |
}, | |
{ | |
"epoch": 0.72, | |
"grad_norm": 68.0, | |
"learning_rate": 4.419651960618302e-06, | |
"loss": 7.8395, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 0.724, | |
"grad_norm": 55.75, | |
"learning_rate": 4.304335288486426e-06, | |
"loss": 8.4034, | |
"step": 1810 | |
}, | |
{ | |
"epoch": 0.728, | |
"grad_norm": 93.0, | |
"learning_rate": 4.190128997475402e-06, | |
"loss": 8.3004, | |
"step": 1820 | |
}, | |
{ | |
"epoch": 0.732, | |
"grad_norm": 69.0, | |
"learning_rate": 4.0770553523279535e-06, | |
"loss": 7.9588, | |
"step": 1830 | |
}, | |
{ | |
"epoch": 0.736, | |
"grad_norm": 52.5, | |
"learning_rate": 3.965136396975235e-06, | |
"loss": 7.7649, | |
"step": 1840 | |
}, | |
{ | |
"epoch": 0.74, | |
"grad_norm": 63.5, | |
"learning_rate": 3.854393950239356e-06, | |
"loss": 8.6746, | |
"step": 1850 | |
}, | |
{ | |
"epoch": 0.744, | |
"grad_norm": 58.0, | |
"learning_rate": 3.7448496015797296e-06, | |
"loss": 8.4542, | |
"step": 1860 | |
}, | |
{ | |
"epoch": 0.748, | |
"grad_norm": 66.5, | |
"learning_rate": 3.636524706884181e-06, | |
"loss": 7.9523, | |
"step": 1870 | |
}, | |
{ | |
"epoch": 0.752, | |
"grad_norm": 67.5, | |
"learning_rate": 3.5294403843055604e-06, | |
"loss": 8.3026, | |
"step": 1880 | |
}, | |
{ | |
"epoch": 0.756, | |
"grad_norm": 54.0, | |
"learning_rate": 3.4236175101447265e-06, | |
"loss": 8.2066, | |
"step": 1890 | |
}, | |
{ | |
"epoch": 0.76, | |
"grad_norm": 59.0, | |
"learning_rate": 3.3190767147806825e-06, | |
"loss": 8.4277, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 0.764, | |
"grad_norm": 57.75, | |
"learning_rate": 3.2158383786486204e-06, | |
"loss": 8.2385, | |
"step": 1910 | |
}, | |
{ | |
"epoch": 0.768, | |
"grad_norm": 56.75, | |
"learning_rate": 3.113922628266718e-06, | |
"loss": 8.0253, | |
"step": 1920 | |
}, | |
{ | |
"epoch": 0.772, | |
"grad_norm": 75.5, | |
"learning_rate": 3.013349332312451e-06, | |
"loss": 8.4382, | |
"step": 1930 | |
}, | |
{ | |
"epoch": 0.776, | |
"grad_norm": 81.5, | |
"learning_rate": 2.9141380977491373e-06, | |
"loss": 8.2415, | |
"step": 1940 | |
}, | |
{ | |
"epoch": 0.78, | |
"grad_norm": 61.75, | |
"learning_rate": 2.816308266003541e-06, | |
"loss": 8.0811, | |
"step": 1950 | |
}, | |
{ | |
"epoch": 0.784, | |
"grad_norm": 65.5, | |
"learning_rate": 2.7198789091951903e-06, | |
"loss": 8.1608, | |
"step": 1960 | |
}, | |
{ | |
"epoch": 0.788, | |
"grad_norm": 55.0, | |
"learning_rate": 2.624868826418262e-06, | |
"loss": 8.1292, | |
"step": 1970 | |
}, | |
{ | |
"epoch": 0.792, | |
"grad_norm": 58.75, | |
"learning_rate": 2.5312965400766475e-06, | |
"loss": 7.9587, | |
"step": 1980 | |
}, | |
{ | |
"epoch": 0.796, | |
"grad_norm": 60.5, | |
"learning_rate": 2.4391802922729703e-06, | |
"loss": 7.6301, | |
"step": 1990 | |
}, | |
{ | |
"epoch": 0.8, | |
"grad_norm": 59.0, | |
"learning_rate": 2.3485380412522586e-06, | |
"loss": 8.6694, | |
"step": 2000 | |
} | |
], | |
"logging_steps": 10, | |
"max_steps": 2500, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 9223372036854775807, | |
"save_steps": 500, | |
"stateful_callbacks": { | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": false | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 1.4712053990162432e+18, | |
"train_batch_size": 1, | |
"trial_name": null, | |
"trial_params": null | |
} | |