|
{ |
|
"best_metric": 0.9252543449401855, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.46323103647944414, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0023161551823972205, |
|
"grad_norm": 1.2503989934921265, |
|
"learning_rate": 7e-06, |
|
"loss": 2.2545, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0023161551823972205, |
|
"eval_loss": 3.0283942222595215, |
|
"eval_runtime": 55.3246, |
|
"eval_samples_per_second": 13.141, |
|
"eval_steps_per_second": 3.29, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004632310364794441, |
|
"grad_norm": 1.3826779127120972, |
|
"learning_rate": 1.4e-05, |
|
"loss": 2.3452, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006948465547191662, |
|
"grad_norm": 1.4480987787246704, |
|
"learning_rate": 2.1e-05, |
|
"loss": 2.5906, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.009264620729588882, |
|
"grad_norm": 1.3076990842819214, |
|
"learning_rate": 2.8e-05, |
|
"loss": 2.624, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011580775911986103, |
|
"grad_norm": 1.1654059886932373, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.4437, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013896931094383324, |
|
"grad_norm": 1.1110953092575073, |
|
"learning_rate": 4.2e-05, |
|
"loss": 2.6337, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.016213086276780544, |
|
"grad_norm": 1.1773900985717773, |
|
"learning_rate": 4.899999999999999e-05, |
|
"loss": 2.7596, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.018529241459177764, |
|
"grad_norm": 1.8836665153503418, |
|
"learning_rate": 5.6e-05, |
|
"loss": 2.6959, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.020845396641574986, |
|
"grad_norm": 1.7037103176116943, |
|
"learning_rate": 6.3e-05, |
|
"loss": 2.3329, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.023161551823972205, |
|
"grad_norm": 1.4448015689849854, |
|
"learning_rate": 7e-05, |
|
"loss": 2.3956, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.025477707006369428, |
|
"grad_norm": 1.1605085134506226, |
|
"learning_rate": 6.999521567473641e-05, |
|
"loss": 2.3339, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.027793862188766647, |
|
"grad_norm": 1.33013117313385, |
|
"learning_rate": 6.998086400693241e-05, |
|
"loss": 2.2781, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.030110017371163866, |
|
"grad_norm": 1.4077861309051514, |
|
"learning_rate": 6.995694892019065e-05, |
|
"loss": 2.3166, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03242617255356109, |
|
"grad_norm": 1.2181549072265625, |
|
"learning_rate": 6.99234769526571e-05, |
|
"loss": 2.2675, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03474232773595831, |
|
"grad_norm": 1.1249672174453735, |
|
"learning_rate": 6.988045725523343e-05, |
|
"loss": 2.1643, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03705848291835553, |
|
"grad_norm": 1.5914721488952637, |
|
"learning_rate": 6.982790158907539e-05, |
|
"loss": 2.2916, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03937463810075275, |
|
"grad_norm": 1.4551798105239868, |
|
"learning_rate": 6.976582432237733e-05, |
|
"loss": 2.1199, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04169079328314997, |
|
"grad_norm": 1.2567614316940308, |
|
"learning_rate": 6.969424242644413e-05, |
|
"loss": 2.0909, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04400694846554719, |
|
"grad_norm": 1.2276580333709717, |
|
"learning_rate": 6.961317547105138e-05, |
|
"loss": 1.8407, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04632310364794441, |
|
"grad_norm": 1.128091812133789, |
|
"learning_rate": 6.952264561909527e-05, |
|
"loss": 2.0004, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04863925883034163, |
|
"grad_norm": 1.1621633768081665, |
|
"learning_rate": 6.942267762053337e-05, |
|
"loss": 2.0205, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.050955414012738856, |
|
"grad_norm": 1.2083147764205933, |
|
"learning_rate": 6.931329880561832e-05, |
|
"loss": 1.8645, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05327156919513607, |
|
"grad_norm": 1.1491119861602783, |
|
"learning_rate": 6.919453907742597e-05, |
|
"loss": 2.0392, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.055587724377533294, |
|
"grad_norm": 1.2552692890167236, |
|
"learning_rate": 6.90664309036802e-05, |
|
"loss": 1.9381, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05790387955993052, |
|
"grad_norm": 1.2837193012237549, |
|
"learning_rate": 6.892900930787656e-05, |
|
"loss": 1.8941, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06022003474232773, |
|
"grad_norm": 1.2813526391983032, |
|
"learning_rate": 6.87823118597072e-05, |
|
"loss": 1.9548, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06253618992472496, |
|
"grad_norm": 1.272463083267212, |
|
"learning_rate": 6.862637866478969e-05, |
|
"loss": 1.9528, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06485234510712218, |
|
"grad_norm": 1.2786381244659424, |
|
"learning_rate": 6.846125235370252e-05, |
|
"loss": 1.7617, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0671685002895194, |
|
"grad_norm": 1.199897289276123, |
|
"learning_rate": 6.828697807033038e-05, |
|
"loss": 1.7425, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06948465547191662, |
|
"grad_norm": 1.1612555980682373, |
|
"learning_rate": 6.81036034595222e-05, |
|
"loss": 1.6898, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07180081065431385, |
|
"grad_norm": 1.4991174936294556, |
|
"learning_rate": 6.791117865406564e-05, |
|
"loss": 1.4727, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07411696583671105, |
|
"grad_norm": 1.406917929649353, |
|
"learning_rate": 6.770975626098112e-05, |
|
"loss": 1.5182, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07643312101910828, |
|
"grad_norm": 1.5695204734802246, |
|
"learning_rate": 6.749939134713974e-05, |
|
"loss": 1.6764, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0787492762015055, |
|
"grad_norm": 1.4783178567886353, |
|
"learning_rate": 6.728014142420846e-05, |
|
"loss": 1.4336, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08106543138390272, |
|
"grad_norm": 1.5821142196655273, |
|
"learning_rate": 6.7052066432927e-05, |
|
"loss": 1.7416, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08338158656629995, |
|
"grad_norm": 1.3772616386413574, |
|
"learning_rate": 6.681522872672069e-05, |
|
"loss": 1.5644, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08569774174869717, |
|
"grad_norm": 1.5498988628387451, |
|
"learning_rate": 6.656969305465356e-05, |
|
"loss": 1.5495, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08801389693109438, |
|
"grad_norm": 1.627152681350708, |
|
"learning_rate": 6.631552654372672e-05, |
|
"loss": 1.6281, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0903300521134916, |
|
"grad_norm": 1.6475415229797363, |
|
"learning_rate": 6.60527986805264e-05, |
|
"loss": 1.3677, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09264620729588882, |
|
"grad_norm": 1.7694090604782104, |
|
"learning_rate": 6.578158129222711e-05, |
|
"loss": 1.4586, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09496236247828604, |
|
"grad_norm": 1.7969664335250854, |
|
"learning_rate": 6.550194852695469e-05, |
|
"loss": 1.3842, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09727851766068327, |
|
"grad_norm": 1.6679091453552246, |
|
"learning_rate": 6.521397683351509e-05, |
|
"loss": 1.3623, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09959467284308049, |
|
"grad_norm": 1.8218728303909302, |
|
"learning_rate": 6.491774494049386e-05, |
|
"loss": 1.4426, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.10191082802547771, |
|
"grad_norm": 1.8028229475021362, |
|
"learning_rate": 6.461333383473272e-05, |
|
"loss": 1.5289, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.10422698320787492, |
|
"grad_norm": 1.7978172302246094, |
|
"learning_rate": 6.430082673918849e-05, |
|
"loss": 1.365, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10654313839027214, |
|
"grad_norm": 2.145550489425659, |
|
"learning_rate": 6.398030909018069e-05, |
|
"loss": 1.3449, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10885929357266937, |
|
"grad_norm": 2.116974115371704, |
|
"learning_rate": 6.365186851403423e-05, |
|
"loss": 1.3279, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.11117544875506659, |
|
"grad_norm": 2.0331828594207764, |
|
"learning_rate": 6.331559480312315e-05, |
|
"loss": 1.2943, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.11349160393746381, |
|
"grad_norm": 2.129798650741577, |
|
"learning_rate": 6.297157989132236e-05, |
|
"loss": 1.0358, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11580775911986103, |
|
"grad_norm": 2.835911750793457, |
|
"learning_rate": 6.261991782887377e-05, |
|
"loss": 1.2203, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11580775911986103, |
|
"eval_loss": 1.6897388696670532, |
|
"eval_runtime": 56.0898, |
|
"eval_samples_per_second": 12.961, |
|
"eval_steps_per_second": 3.245, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11812391430225826, |
|
"grad_norm": 2.849514961242676, |
|
"learning_rate": 6.226070475667393e-05, |
|
"loss": 2.0836, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.12044006948465547, |
|
"grad_norm": 2.162397623062134, |
|
"learning_rate": 6.189403887999006e-05, |
|
"loss": 2.0693, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.12275622466705269, |
|
"grad_norm": 1.8029311895370483, |
|
"learning_rate": 6.152002044161171e-05, |
|
"loss": 2.0471, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1250723798494499, |
|
"grad_norm": 1.3188810348510742, |
|
"learning_rate": 6.113875169444539e-05, |
|
"loss": 2.0245, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12738853503184713, |
|
"grad_norm": 1.1737282276153564, |
|
"learning_rate": 6.0750336873559605e-05, |
|
"loss": 1.8553, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12970469021424436, |
|
"grad_norm": 1.0292160511016846, |
|
"learning_rate": 6.035488216768811e-05, |
|
"loss": 1.868, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13202084539664158, |
|
"grad_norm": 0.96671462059021, |
|
"learning_rate": 5.9952495690198894e-05, |
|
"loss": 1.913, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1343370005790388, |
|
"grad_norm": 1.1832921504974365, |
|
"learning_rate": 5.954328744953709e-05, |
|
"loss": 1.7736, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13665315576143602, |
|
"grad_norm": 1.0528168678283691, |
|
"learning_rate": 5.91273693191498e-05, |
|
"loss": 1.9491, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13896931094383325, |
|
"grad_norm": 1.0293359756469727, |
|
"learning_rate": 5.870485500690094e-05, |
|
"loss": 1.7518, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14128546612623047, |
|
"grad_norm": 1.0944894552230835, |
|
"learning_rate": 5.827586002398468e-05, |
|
"loss": 1.7148, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1436016213086277, |
|
"grad_norm": 1.0049891471862793, |
|
"learning_rate": 5.784050165334589e-05, |
|
"loss": 1.6303, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1459177764910249, |
|
"grad_norm": 1.0505611896514893, |
|
"learning_rate": 5.739889891761608e-05, |
|
"loss": 1.6231, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1482339316734221, |
|
"grad_norm": 1.2406843900680542, |
|
"learning_rate": 5.6951172546573794e-05, |
|
"loss": 1.8787, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.15055008685581933, |
|
"grad_norm": 1.1927564144134521, |
|
"learning_rate": 5.6497444944138376e-05, |
|
"loss": 1.7534, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15286624203821655, |
|
"grad_norm": 1.2264256477355957, |
|
"learning_rate": 5.603784015490587e-05, |
|
"loss": 1.6633, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.15518239722061378, |
|
"grad_norm": 1.3675661087036133, |
|
"learning_rate": 5.557248383023655e-05, |
|
"loss": 1.5166, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.157498552403011, |
|
"grad_norm": 1.3764818906784058, |
|
"learning_rate": 5.510150319390302e-05, |
|
"loss": 1.4764, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.15981470758540822, |
|
"grad_norm": 1.3945223093032837, |
|
"learning_rate": 5.4625027007308546e-05, |
|
"loss": 1.5581, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.16213086276780544, |
|
"grad_norm": 1.2444453239440918, |
|
"learning_rate": 5.414318553428494e-05, |
|
"loss": 1.4439, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16444701795020267, |
|
"grad_norm": 1.3003394603729248, |
|
"learning_rate": 5.3656110505479776e-05, |
|
"loss": 1.6489, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.1667631731325999, |
|
"grad_norm": 1.247867226600647, |
|
"learning_rate": 5.316393508234253e-05, |
|
"loss": 1.4745, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1690793283149971, |
|
"grad_norm": 1.249312162399292, |
|
"learning_rate": 5.266679382071953e-05, |
|
"loss": 1.5594, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.17139548349739434, |
|
"grad_norm": 1.2661000490188599, |
|
"learning_rate": 5.216482263406778e-05, |
|
"loss": 1.4676, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.17371163867979156, |
|
"grad_norm": 1.3261014223098755, |
|
"learning_rate": 5.1658158756297576e-05, |
|
"loss": 1.4847, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17602779386218875, |
|
"grad_norm": 1.2597907781600952, |
|
"learning_rate": 5.114694070425407e-05, |
|
"loss": 1.3025, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.17834394904458598, |
|
"grad_norm": 1.215097427368164, |
|
"learning_rate": 5.063130823984823e-05, |
|
"loss": 1.2496, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1806601042269832, |
|
"grad_norm": 1.2596161365509033, |
|
"learning_rate": 5.011140233184724e-05, |
|
"loss": 1.2445, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.18297625940938042, |
|
"grad_norm": 1.4308902025222778, |
|
"learning_rate": 4.958736511733516e-05, |
|
"loss": 1.2937, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.18529241459177764, |
|
"grad_norm": 1.5676943063735962, |
|
"learning_rate": 4.905933986285393e-05, |
|
"loss": 1.1867, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18760856977417487, |
|
"grad_norm": 1.5051430463790894, |
|
"learning_rate": 4.8527470925235824e-05, |
|
"loss": 1.1516, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1899247249565721, |
|
"grad_norm": 1.4236093759536743, |
|
"learning_rate": 4.799190371213772e-05, |
|
"loss": 0.9429, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1922408801389693, |
|
"grad_norm": 1.6276626586914062, |
|
"learning_rate": 4.745278464228808e-05, |
|
"loss": 1.1588, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.19455703532136653, |
|
"grad_norm": 1.5213537216186523, |
|
"learning_rate": 4.69102611054575e-05, |
|
"loss": 1.1604, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.19687319050376376, |
|
"grad_norm": 1.7984035015106201, |
|
"learning_rate": 4.6364481422163926e-05, |
|
"loss": 1.1797, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19918934568616098, |
|
"grad_norm": 1.4924752712249756, |
|
"learning_rate": 4.581559480312316e-05, |
|
"loss": 1.0718, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2015055008685582, |
|
"grad_norm": 1.7816964387893677, |
|
"learning_rate": 4.526375130845627e-05, |
|
"loss": 1.1378, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.20382165605095542, |
|
"grad_norm": 1.7848283052444458, |
|
"learning_rate": 4.4709101806664554e-05, |
|
"loss": 0.9752, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.20613781123335265, |
|
"grad_norm": 1.836548089981079, |
|
"learning_rate": 4.4151797933383685e-05, |
|
"loss": 0.9158, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.20845396641574984, |
|
"grad_norm": 1.678355097770691, |
|
"learning_rate": 4.359199204992797e-05, |
|
"loss": 0.9598, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21077012159814706, |
|
"grad_norm": 1.9656362533569336, |
|
"learning_rate": 4.30298372016363e-05, |
|
"loss": 1.1572, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2130862767805443, |
|
"grad_norm": 2.5709915161132812, |
|
"learning_rate": 4.246548707603114e-05, |
|
"loss": 0.8351, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2154024319629415, |
|
"grad_norm": 1.8190523386001587, |
|
"learning_rate": 4.1899095960801805e-05, |
|
"loss": 0.9647, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.21771858714533873, |
|
"grad_norm": 2.0618791580200195, |
|
"learning_rate": 4.133081870162385e-05, |
|
"loss": 1.0824, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.22003474232773595, |
|
"grad_norm": 2.0086538791656494, |
|
"learning_rate": 4.076081065982569e-05, |
|
"loss": 1.093, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.22235089751013318, |
|
"grad_norm": 2.2027931213378906, |
|
"learning_rate": 4.018922766991447e-05, |
|
"loss": 1.1038, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2246670526925304, |
|
"grad_norm": 1.8367047309875488, |
|
"learning_rate": 3.961622599697241e-05, |
|
"loss": 0.9804, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.22698320787492762, |
|
"grad_norm": 2.6135077476501465, |
|
"learning_rate": 3.9041962293935516e-05, |
|
"loss": 0.9048, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.22929936305732485, |
|
"grad_norm": 2.061969518661499, |
|
"learning_rate": 3.84665935587662e-05, |
|
"loss": 0.8975, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.23161551823972207, |
|
"grad_norm": 2.850947141647339, |
|
"learning_rate": 3.7890277091531636e-05, |
|
"loss": 0.8124, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23161551823972207, |
|
"eval_loss": 1.2259724140167236, |
|
"eval_runtime": 56.0663, |
|
"eval_samples_per_second": 12.967, |
|
"eval_steps_per_second": 3.246, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2339316734221193, |
|
"grad_norm": 1.7626456022262573, |
|
"learning_rate": 3.7313170451399475e-05, |
|
"loss": 1.9503, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2362478286045165, |
|
"grad_norm": 1.505061388015747, |
|
"learning_rate": 3.673543141356278e-05, |
|
"loss": 1.8245, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.23856398378691374, |
|
"grad_norm": 1.3618402481079102, |
|
"learning_rate": 3.6157217926105783e-05, |
|
"loss": 1.7775, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.24088013896931093, |
|
"grad_norm": 1.3016786575317383, |
|
"learning_rate": 3.557868806682255e-05, |
|
"loss": 1.6679, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.24319629415170815, |
|
"grad_norm": 1.2957885265350342, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.6692, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.24551244933410538, |
|
"grad_norm": 1.004072666168213, |
|
"learning_rate": 3.442131193317745e-05, |
|
"loss": 1.521, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2478286045165026, |
|
"grad_norm": 1.0314702987670898, |
|
"learning_rate": 3.384278207389421e-05, |
|
"loss": 1.6253, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.2501447596988998, |
|
"grad_norm": 1.1805181503295898, |
|
"learning_rate": 3.3264568586437216e-05, |
|
"loss": 1.4228, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.25246091488129707, |
|
"grad_norm": 1.028948187828064, |
|
"learning_rate": 3.268682954860052e-05, |
|
"loss": 1.4977, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.25477707006369427, |
|
"grad_norm": 1.0726194381713867, |
|
"learning_rate": 3.210972290846837e-05, |
|
"loss": 1.3065, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25709322524609146, |
|
"grad_norm": 1.1146329641342163, |
|
"learning_rate": 3.15334064412338e-05, |
|
"loss": 1.0397, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2594093804284887, |
|
"grad_norm": 1.1305572986602783, |
|
"learning_rate": 3.0958037706064485e-05, |
|
"loss": 1.2692, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2617255356108859, |
|
"grad_norm": 1.2434415817260742, |
|
"learning_rate": 3.038377400302758e-05, |
|
"loss": 1.3937, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.26404169079328316, |
|
"grad_norm": 1.2750946283340454, |
|
"learning_rate": 2.9810772330085524e-05, |
|
"loss": 1.3832, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.26635784597568035, |
|
"grad_norm": 1.4428255558013916, |
|
"learning_rate": 2.9239189340174306e-05, |
|
"loss": 1.134, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2686740011580776, |
|
"grad_norm": 1.4373363256454468, |
|
"learning_rate": 2.8669181298376163e-05, |
|
"loss": 1.2468, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.2709901563404748, |
|
"grad_norm": 1.548967719078064, |
|
"learning_rate": 2.8100904039198193e-05, |
|
"loss": 1.0152, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.27330631152287205, |
|
"grad_norm": 1.930369257926941, |
|
"learning_rate": 2.7534512923968863e-05, |
|
"loss": 1.2061, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.27562246670526924, |
|
"grad_norm": 1.7386815547943115, |
|
"learning_rate": 2.6970162798363695e-05, |
|
"loss": 1.1742, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2779386218876665, |
|
"grad_norm": 1.7060924768447876, |
|
"learning_rate": 2.640800795007203e-05, |
|
"loss": 1.2716, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2802547770700637, |
|
"grad_norm": 1.5493820905685425, |
|
"learning_rate": 2.5848202066616305e-05, |
|
"loss": 1.2847, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.28257093225246094, |
|
"grad_norm": 1.513087272644043, |
|
"learning_rate": 2.5290898193335446e-05, |
|
"loss": 1.2291, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.28488708743485813, |
|
"grad_norm": 1.405207633972168, |
|
"learning_rate": 2.4736248691543736e-05, |
|
"loss": 1.0475, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2872032426172554, |
|
"grad_norm": 1.4663727283477783, |
|
"learning_rate": 2.4184405196876842e-05, |
|
"loss": 0.9763, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2895193977996526, |
|
"grad_norm": 1.33937406539917, |
|
"learning_rate": 2.363551857783608e-05, |
|
"loss": 0.981, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2918355529820498, |
|
"grad_norm": 1.4397436380386353, |
|
"learning_rate": 2.308973889454249e-05, |
|
"loss": 1.1096, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.294151708164447, |
|
"grad_norm": 1.3392612934112549, |
|
"learning_rate": 2.2547215357711918e-05, |
|
"loss": 0.9199, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2964678633468442, |
|
"grad_norm": 1.3941303491592407, |
|
"learning_rate": 2.2008096287862266e-05, |
|
"loss": 0.885, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.29878401852924147, |
|
"grad_norm": 1.5315736532211304, |
|
"learning_rate": 2.1472529074764177e-05, |
|
"loss": 0.9312, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.30110017371163866, |
|
"grad_norm": 1.461169719696045, |
|
"learning_rate": 2.0940660137146074e-05, |
|
"loss": 0.8541, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3034163288940359, |
|
"grad_norm": 1.5014770030975342, |
|
"learning_rate": 2.041263488266484e-05, |
|
"loss": 0.8172, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3057324840764331, |
|
"grad_norm": 1.8396813869476318, |
|
"learning_rate": 1.988859766815275e-05, |
|
"loss": 0.8973, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.30804863925883036, |
|
"grad_norm": 1.7794278860092163, |
|
"learning_rate": 1.9368691760151773e-05, |
|
"loss": 0.9055, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.31036479444122755, |
|
"grad_norm": 1.544495701789856, |
|
"learning_rate": 1.885305929574593e-05, |
|
"loss": 0.8416, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3126809496236248, |
|
"grad_norm": 1.71176278591156, |
|
"learning_rate": 1.8341841243702424e-05, |
|
"loss": 0.8015, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.314997104806022, |
|
"grad_norm": 1.837409257888794, |
|
"learning_rate": 1.7835177365932225e-05, |
|
"loss": 0.7487, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.31731325998841925, |
|
"grad_norm": 1.8642661571502686, |
|
"learning_rate": 1.7333206179280478e-05, |
|
"loss": 0.8194, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.31962941517081644, |
|
"grad_norm": 1.9652878046035767, |
|
"learning_rate": 1.6836064917657478e-05, |
|
"loss": 0.7471, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.32194557035321364, |
|
"grad_norm": 1.8793476819992065, |
|
"learning_rate": 1.6343889494520224e-05, |
|
"loss": 0.5699, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3242617255356109, |
|
"grad_norm": 1.8087778091430664, |
|
"learning_rate": 1.5856814465715064e-05, |
|
"loss": 0.702, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3265778807180081, |
|
"grad_norm": 1.814977765083313, |
|
"learning_rate": 1.5374972992691458e-05, |
|
"loss": 0.6915, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.32889403590040533, |
|
"grad_norm": 1.9736248254776, |
|
"learning_rate": 1.4898496806096974e-05, |
|
"loss": 0.6077, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.33121019108280253, |
|
"grad_norm": 2.0833942890167236, |
|
"learning_rate": 1.4427516169763444e-05, |
|
"loss": 0.6608, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3335263462651998, |
|
"grad_norm": 2.1390953063964844, |
|
"learning_rate": 1.396215984509412e-05, |
|
"loss": 0.614, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.335842501447597, |
|
"grad_norm": 2.002488851547241, |
|
"learning_rate": 1.3502555055861625e-05, |
|
"loss": 0.6135, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3381586566299942, |
|
"grad_norm": 2.446239471435547, |
|
"learning_rate": 1.3048827453426203e-05, |
|
"loss": 0.7834, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3404748118123914, |
|
"grad_norm": 2.466515302658081, |
|
"learning_rate": 1.2601101082383917e-05, |
|
"loss": 0.7394, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.34279096699478867, |
|
"grad_norm": 2.9753072261810303, |
|
"learning_rate": 1.2159498346654094e-05, |
|
"loss": 0.6323, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.34510712217718587, |
|
"grad_norm": 2.352726459503174, |
|
"learning_rate": 1.1724139976015306e-05, |
|
"loss": 0.5433, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3474232773595831, |
|
"grad_norm": 2.822594404220581, |
|
"learning_rate": 1.1295144993099068e-05, |
|
"loss": 0.6062, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3474232773595831, |
|
"eval_loss": 0.9819753766059875, |
|
"eval_runtime": 56.0681, |
|
"eval_samples_per_second": 12.966, |
|
"eval_steps_per_second": 3.246, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3497394325419803, |
|
"grad_norm": 1.6045421361923218, |
|
"learning_rate": 1.0872630680850196e-05, |
|
"loss": 1.6882, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3520555877243775, |
|
"grad_norm": 1.5667160749435425, |
|
"learning_rate": 1.0456712550462898e-05, |
|
"loss": 1.4824, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.35437174290677476, |
|
"grad_norm": 1.938839316368103, |
|
"learning_rate": 1.0047504309801104e-05, |
|
"loss": 1.4432, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.35668789808917195, |
|
"grad_norm": 1.8820574283599854, |
|
"learning_rate": 9.645117832311886e-06, |
|
"loss": 1.3595, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3590040532715692, |
|
"grad_norm": 1.5141949653625488, |
|
"learning_rate": 9.249663126440394e-06, |
|
"loss": 1.442, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3613202084539664, |
|
"grad_norm": 1.4562164545059204, |
|
"learning_rate": 8.861248305554624e-06, |
|
"loss": 1.4735, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 1.6026912927627563, |
|
"learning_rate": 8.47997955838829e-06, |
|
"loss": 1.2689, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.36595251881876084, |
|
"grad_norm": 1.5428178310394287, |
|
"learning_rate": 8.10596112000994e-06, |
|
"loss": 1.3727, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3682686740011581, |
|
"grad_norm": 1.2998900413513184, |
|
"learning_rate": 7.739295243326067e-06, |
|
"loss": 1.1046, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3705848291835553, |
|
"grad_norm": 1.3930641412734985, |
|
"learning_rate": 7.380082171126228e-06, |
|
"loss": 1.2705, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.37290098436595254, |
|
"grad_norm": 1.318785309791565, |
|
"learning_rate": 7.028420108677635e-06, |
|
"loss": 1.2158, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.37521713954834973, |
|
"grad_norm": 1.42453932762146, |
|
"learning_rate": 6.684405196876842e-06, |
|
"loss": 1.3409, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.377533294730747, |
|
"grad_norm": 1.6251355409622192, |
|
"learning_rate": 6.3481314859657675e-06, |
|
"loss": 1.1197, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3798494499131442, |
|
"grad_norm": 1.3056257963180542, |
|
"learning_rate": 6.019690909819298e-06, |
|
"loss": 1.1391, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3821656050955414, |
|
"grad_norm": 1.4243314266204834, |
|
"learning_rate": 5.6991732608115e-06, |
|
"loss": 1.3049, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3844817602779386, |
|
"grad_norm": 1.621582269668579, |
|
"learning_rate": 5.386666165267256e-06, |
|
"loss": 0.9413, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3867979154603358, |
|
"grad_norm": 1.5309045314788818, |
|
"learning_rate": 5.08225505950613e-06, |
|
"loss": 1.024, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.38911407064273307, |
|
"grad_norm": 1.4823757410049438, |
|
"learning_rate": 4.786023166484913e-06, |
|
"loss": 0.9542, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.39143022582513026, |
|
"grad_norm": 1.5925859212875366, |
|
"learning_rate": 4.498051473045291e-06, |
|
"loss": 1.0656, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.3937463810075275, |
|
"grad_norm": 1.4511696100234985, |
|
"learning_rate": 4.218418707772886e-06, |
|
"loss": 1.1376, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3960625361899247, |
|
"grad_norm": 1.4989674091339111, |
|
"learning_rate": 3.947201319473587e-06, |
|
"loss": 1.1977, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.39837869137232196, |
|
"grad_norm": 1.7597720623016357, |
|
"learning_rate": 3.684473456273278e-06, |
|
"loss": 1.1539, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.40069484655471915, |
|
"grad_norm": 1.5664502382278442, |
|
"learning_rate": 3.4303069453464383e-06, |
|
"loss": 1.035, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.4030110017371164, |
|
"grad_norm": 1.5649906396865845, |
|
"learning_rate": 3.184771273279312e-06, |
|
"loss": 0.8856, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.4053271569195136, |
|
"grad_norm": 1.6556047201156616, |
|
"learning_rate": 2.947933567072987e-06, |
|
"loss": 0.972, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.40764331210191085, |
|
"grad_norm": 1.6981557607650757, |
|
"learning_rate": 2.719858575791534e-06, |
|
"loss": 0.7624, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.40995946728430804, |
|
"grad_norm": 1.6652178764343262, |
|
"learning_rate": 2.500608652860256e-06, |
|
"loss": 0.9003, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.4122756224667053, |
|
"grad_norm": 1.6681879758834839, |
|
"learning_rate": 2.2902437390188737e-06, |
|
"loss": 0.6842, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4145917776491025, |
|
"grad_norm": 1.7632416486740112, |
|
"learning_rate": 2.0888213459343587e-06, |
|
"loss": 0.7585, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.4169079328314997, |
|
"grad_norm": 1.8788822889328003, |
|
"learning_rate": 1.8963965404777875e-06, |
|
"loss": 0.7509, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41922408801389693, |
|
"grad_norm": 1.7253082990646362, |
|
"learning_rate": 1.7130219296696263e-06, |
|
"loss": 0.8523, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.42154024319629413, |
|
"grad_norm": 1.6087950468063354, |
|
"learning_rate": 1.5387476462974824e-06, |
|
"loss": 0.7647, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4238563983786914, |
|
"grad_norm": 1.601576328277588, |
|
"learning_rate": 1.3736213352103147e-06, |
|
"loss": 0.6134, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4261725535610886, |
|
"grad_norm": 1.6278523206710815, |
|
"learning_rate": 1.2176881402928002e-06, |
|
"loss": 0.5783, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.4284887087434858, |
|
"grad_norm": 1.9031941890716553, |
|
"learning_rate": 1.0709906921234367e-06, |
|
"loss": 0.6664, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.430804863925883, |
|
"grad_norm": 1.792665719985962, |
|
"learning_rate": 9.33569096319799e-07, |
|
"loss": 0.5366, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.43312101910828027, |
|
"grad_norm": 2.003882884979248, |
|
"learning_rate": 8.054609225740255e-07, |
|
"loss": 0.7031, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.43543717429067746, |
|
"grad_norm": 1.9855848550796509, |
|
"learning_rate": 6.867011943816724e-07, |
|
"loss": 0.5054, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4377533294730747, |
|
"grad_norm": 1.944218635559082, |
|
"learning_rate": 5.77322379466617e-07, |
|
"loss": 0.5625, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4400694846554719, |
|
"grad_norm": 1.8458359241485596, |
|
"learning_rate": 4.773543809047186e-07, |
|
"loss": 0.7162, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.44238563983786916, |
|
"grad_norm": 1.8968647718429565, |
|
"learning_rate": 3.868245289486027e-07, |
|
"loss": 0.5324, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.44470179502026635, |
|
"grad_norm": 1.8599846363067627, |
|
"learning_rate": 3.0575757355586817e-07, |
|
"loss": 0.6424, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.4470179502026636, |
|
"grad_norm": 2.0066614151000977, |
|
"learning_rate": 2.3417567762266497e-07, |
|
"loss": 0.6599, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.4493341053850608, |
|
"grad_norm": 2.488804340362549, |
|
"learning_rate": 1.7209841092460043e-07, |
|
"loss": 0.748, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.451650260567458, |
|
"grad_norm": 2.2025880813598633, |
|
"learning_rate": 1.1954274476655534e-07, |
|
"loss": 0.6648, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.45396641574985525, |
|
"grad_norm": 2.2725353240966797, |
|
"learning_rate": 7.652304734289127e-08, |
|
"loss": 0.6651, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.45628257093225244, |
|
"grad_norm": 2.63472318649292, |
|
"learning_rate": 4.30510798093342e-08, |
|
"loss": 0.734, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.4585987261146497, |
|
"grad_norm": 2.484386444091797, |
|
"learning_rate": 1.9135993067588284e-08, |
|
"loss": 0.6181, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4609148812970469, |
|
"grad_norm": 2.705622911453247, |
|
"learning_rate": 4.784325263584854e-09, |
|
"loss": 0.4998, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.46323103647944414, |
|
"grad_norm": 3.950650215148926, |
|
"learning_rate": 0.0, |
|
"loss": 0.5755, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.46323103647944414, |
|
"eval_loss": 0.9252543449401855, |
|
"eval_runtime": 56.0402, |
|
"eval_samples_per_second": 12.973, |
|
"eval_steps_per_second": 3.248, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.017027157491712e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|