|
{ |
|
"best_metric": 1.3642938137054443, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 2.150537634408602, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010752688172043012, |
|
"grad_norm": 4.702356815338135, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5608, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010752688172043012, |
|
"eval_loss": 2.5321671962738037, |
|
"eval_runtime": 4.7204, |
|
"eval_samples_per_second": 33.26, |
|
"eval_steps_per_second": 8.474, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.021505376344086023, |
|
"grad_norm": 4.766476631164551, |
|
"learning_rate": 2e-05, |
|
"loss": 2.6608, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03225806451612903, |
|
"grad_norm": 4.727513313293457, |
|
"learning_rate": 3e-05, |
|
"loss": 2.1508, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.043010752688172046, |
|
"grad_norm": 4.70632266998291, |
|
"learning_rate": 4e-05, |
|
"loss": 2.221, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.053763440860215055, |
|
"grad_norm": 3.304126501083374, |
|
"learning_rate": 5e-05, |
|
"loss": 2.2863, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06451612903225806, |
|
"grad_norm": 3.3436529636383057, |
|
"learning_rate": 6e-05, |
|
"loss": 2.0891, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07526881720430108, |
|
"grad_norm": 2.757631540298462, |
|
"learning_rate": 7e-05, |
|
"loss": 2.0318, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08602150537634409, |
|
"grad_norm": 2.5397322177886963, |
|
"learning_rate": 8e-05, |
|
"loss": 1.8757, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0967741935483871, |
|
"grad_norm": 2.240288496017456, |
|
"learning_rate": 9e-05, |
|
"loss": 1.6397, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.10752688172043011, |
|
"grad_norm": 2.0536859035491943, |
|
"learning_rate": 0.0001, |
|
"loss": 1.6601, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11827956989247312, |
|
"grad_norm": 1.8525017499923706, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.5248, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 1.9969803094863892, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 1.4807, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13978494623655913, |
|
"grad_norm": 2.01229190826416, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.4901, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.15053763440860216, |
|
"grad_norm": 1.7899178266525269, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.5082, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.16129032258064516, |
|
"grad_norm": 2.1389129161834717, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.5735, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.17204301075268819, |
|
"grad_norm": 2.066209554672241, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 1.7002, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1827956989247312, |
|
"grad_norm": 2.373577117919922, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 1.4914, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1935483870967742, |
|
"grad_norm": 2.016629695892334, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.3667, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.20430107526881722, |
|
"grad_norm": 2.0558924674987793, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 1.6329, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.21505376344086022, |
|
"grad_norm": 2.0576963424682617, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 1.7884, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.22580645161290322, |
|
"grad_norm": 2.1554651260375977, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 1.606, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.23655913978494625, |
|
"grad_norm": 1.869352102279663, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 1.5538, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.24731182795698925, |
|
"grad_norm": 1.7703973054885864, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.5749, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 1.570709228515625, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.4434, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.26881720430107525, |
|
"grad_norm": 1.6146509647369385, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 1.4106, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.27956989247311825, |
|
"grad_norm": 1.6668318510055542, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 1.5185, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2903225806451613, |
|
"grad_norm": 1.7098801136016846, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 1.4251, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3010752688172043, |
|
"grad_norm": 1.5546528100967407, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 1.4933, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3118279569892473, |
|
"grad_norm": 1.6766256093978882, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.5284, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3225806451612903, |
|
"grad_norm": 1.4459998607635498, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 1.3004, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 1.567784309387207, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 1.4096, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.34408602150537637, |
|
"grad_norm": 1.7961149215698242, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.5655, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3548387096774194, |
|
"grad_norm": 1.873795509338379, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 1.4933, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3655913978494624, |
|
"grad_norm": 1.6129283905029297, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.5071, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3763440860215054, |
|
"grad_norm": 1.649135708808899, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.3613, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 1.5218567848205566, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.584, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3978494623655914, |
|
"grad_norm": 2.0004472732543945, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.481, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.40860215053763443, |
|
"grad_norm": 1.8755414485931396, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 1.5232, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.41935483870967744, |
|
"grad_norm": 1.5692195892333984, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.2805, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.43010752688172044, |
|
"grad_norm": 1.687671184539795, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 1.5781, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.44086021505376344, |
|
"grad_norm": 1.6162201166152954, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 1.6896, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.45161290322580644, |
|
"grad_norm": 1.6264986991882324, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 1.5587, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.46236559139784944, |
|
"grad_norm": 1.5933151245117188, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.3579, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4731182795698925, |
|
"grad_norm": 1.5279178619384766, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.426, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4838709677419355, |
|
"grad_norm": 1.5955722332000732, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 1.6286, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4946236559139785, |
|
"grad_norm": 1.3342571258544922, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 1.3073, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.5053763440860215, |
|
"grad_norm": 1.3443530797958374, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.4065, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 1.672761082649231, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.6361, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5268817204301075, |
|
"grad_norm": 1.5579395294189453, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.4569, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5376344086021505, |
|
"grad_norm": 1.3008219003677368, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.3278, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5376344086021505, |
|
"eval_loss": 1.3941900730133057, |
|
"eval_runtime": 4.7378, |
|
"eval_samples_per_second": 33.138, |
|
"eval_steps_per_second": 8.443, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5483870967741935, |
|
"grad_norm": 1.3945226669311523, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 1.2764, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5591397849462365, |
|
"grad_norm": 1.3870302438735962, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.4109, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5698924731182796, |
|
"grad_norm": 1.3363792896270752, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 1.3709, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5806451612903226, |
|
"grad_norm": 1.4388351440429688, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 1.3211, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5913978494623656, |
|
"grad_norm": 1.3716356754302979, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 1.3606, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.6021505376344086, |
|
"grad_norm": 1.4401875734329224, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 1.4787, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.6129032258064516, |
|
"grad_norm": 1.4469499588012695, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 1.4999, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.6236559139784946, |
|
"grad_norm": 1.427000641822815, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 1.3769, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6344086021505376, |
|
"grad_norm": 1.6219151020050049, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 1.4022, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 1.4375038146972656, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 1.3532, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6559139784946236, |
|
"grad_norm": 1.4883887767791748, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 1.518, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.5331352949142456, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 1.2898, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6774193548387096, |
|
"grad_norm": 1.4444113969802856, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 1.4499, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6881720430107527, |
|
"grad_norm": 1.4317717552185059, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.6092, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6989247311827957, |
|
"grad_norm": 1.9583007097244263, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 1.3679, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.7096774193548387, |
|
"grad_norm": 1.5948585271835327, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 1.5299, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.7204301075268817, |
|
"grad_norm": 1.474169135093689, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.4273, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7311827956989247, |
|
"grad_norm": 1.5605497360229492, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 1.4787, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.7419354838709677, |
|
"grad_norm": 1.6736353635787964, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 1.5762, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7526881720430108, |
|
"grad_norm": 1.4765825271606445, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 1.4682, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7634408602150538, |
|
"grad_norm": 1.3116446733474731, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 1.3492, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 1.3846219778060913, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 1.3566, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7849462365591398, |
|
"grad_norm": 1.4503177404403687, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 1.4169, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7956989247311828, |
|
"grad_norm": 1.5589603185653687, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 1.3105, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.8064516129032258, |
|
"grad_norm": 1.4112577438354492, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 1.4348, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.8172043010752689, |
|
"grad_norm": 1.478387713432312, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 1.4845, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.8279569892473119, |
|
"grad_norm": 1.4269083738327026, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 1.42, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8387096774193549, |
|
"grad_norm": 1.2202898263931274, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 1.4413, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8494623655913979, |
|
"grad_norm": 1.497951865196228, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 1.5208, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8602150537634409, |
|
"grad_norm": 1.2094298601150513, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 1.3276, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8709677419354839, |
|
"grad_norm": 1.1779359579086304, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.183, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.8817204301075269, |
|
"grad_norm": 1.2660930156707764, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 1.3094, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8924731182795699, |
|
"grad_norm": 1.3192017078399658, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 1.1841, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 1.2833420038223267, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.4709, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.9139784946236559, |
|
"grad_norm": 1.2637368440628052, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 1.2868, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.9247311827956989, |
|
"grad_norm": 1.4691823720932007, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.5011, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.9354838709677419, |
|
"grad_norm": 1.3752435445785522, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 1.5095, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.946236559139785, |
|
"grad_norm": 1.3410090208053589, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 1.4567, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.956989247311828, |
|
"grad_norm": 1.488720178604126, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.3362, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.967741935483871, |
|
"grad_norm": 1.4510327577590942, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 1.5497, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.978494623655914, |
|
"grad_norm": 1.362952470779419, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 1.3794, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.989247311827957, |
|
"grad_norm": 1.3994568586349487, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 1.4286, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.45283043384552, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 1.3081, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.010752688172043, |
|
"grad_norm": 1.0466877222061157, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.1259, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.021505376344086, |
|
"grad_norm": 1.0868937969207764, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 1.1437, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.032258064516129, |
|
"grad_norm": 1.0157090425491333, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.0368, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.043010752688172, |
|
"grad_norm": 0.9955579042434692, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.0436, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.053763440860215, |
|
"grad_norm": 1.1000343561172485, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.1144, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.064516129032258, |
|
"grad_norm": 1.1350839138031006, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.1093, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.075268817204301, |
|
"grad_norm": 1.1216412782669067, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.0636, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.075268817204301, |
|
"eval_loss": 1.3642938137054443, |
|
"eval_runtime": 4.7381, |
|
"eval_samples_per_second": 33.135, |
|
"eval_steps_per_second": 8.442, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.086021505376344, |
|
"grad_norm": 0.935893177986145, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 1.046, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.096774193548387, |
|
"grad_norm": 1.0560184717178345, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 1.0836, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.10752688172043, |
|
"grad_norm": 1.1073940992355347, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 1.1118, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.118279569892473, |
|
"grad_norm": 1.1321074962615967, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 1.0707, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.129032258064516, |
|
"grad_norm": 1.1407638788223267, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9936, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.139784946236559, |
|
"grad_norm": 1.505632758140564, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 1.1918, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.1505376344086022, |
|
"grad_norm": 1.1546612977981567, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 1.0016, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.1612903225806452, |
|
"grad_norm": 1.2602618932724, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 1.059, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.1720430107526882, |
|
"grad_norm": 1.1792690753936768, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.9426, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1827956989247312, |
|
"grad_norm": 1.314300298690796, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 1.0949, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.1935483870967742, |
|
"grad_norm": 1.3671072721481323, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 1.0288, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.2043010752688172, |
|
"grad_norm": 1.364042043685913, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 1.0628, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.2150537634408602, |
|
"grad_norm": 1.4929494857788086, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 1.0423, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.2258064516129032, |
|
"grad_norm": 1.4592775106430054, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 1.0178, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.2365591397849462, |
|
"grad_norm": 1.6401259899139404, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 1.2031, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.2473118279569892, |
|
"grad_norm": 1.4892230033874512, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 1.1288, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.2580645161290323, |
|
"grad_norm": 1.740278720855713, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 1.2741, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.2688172043010753, |
|
"grad_norm": 1.8201625347137451, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 1.3326, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.2795698924731183, |
|
"grad_norm": 1.5671497583389282, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 1.0878, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2903225806451613, |
|
"grad_norm": 1.2733285427093506, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.9754, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.3010752688172043, |
|
"grad_norm": 1.3822678327560425, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.8714, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.3118279569892473, |
|
"grad_norm": 1.6007723808288574, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.9819, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.3225806451612903, |
|
"grad_norm": 1.5472182035446167, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.9527, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 1.585017204284668, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.1438, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.3440860215053765, |
|
"grad_norm": 1.5065468549728394, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 1.1306, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.3548387096774195, |
|
"grad_norm": 1.4847522974014282, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 1.0218, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.3655913978494625, |
|
"grad_norm": 1.5103038549423218, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 1.0951, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.3763440860215055, |
|
"grad_norm": 1.432877540588379, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.971, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.3870967741935485, |
|
"grad_norm": 1.4108842611312866, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 1.0357, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3978494623655915, |
|
"grad_norm": 1.3557878732681274, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 1.0886, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.4086021505376345, |
|
"grad_norm": 1.496570110321045, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 1.1439, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.4193548387096775, |
|
"grad_norm": 1.5574095249176025, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 1.1395, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.4301075268817205, |
|
"grad_norm": 1.6914658546447754, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 1.1831, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.4408602150537635, |
|
"grad_norm": 1.6495730876922607, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.1531, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.4516129032258065, |
|
"grad_norm": 1.6482576131820679, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 1.2152, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.4623655913978495, |
|
"grad_norm": 1.3409982919692993, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.8851, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.4731182795698925, |
|
"grad_norm": 1.4901268482208252, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 1.0615, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.4838709677419355, |
|
"grad_norm": 1.6849151849746704, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 1.1009, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.4946236559139785, |
|
"grad_norm": 1.4192452430725098, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.9832, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.5053763440860215, |
|
"grad_norm": 1.598957896232605, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 1.1568, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.5161290322580645, |
|
"grad_norm": 1.4840012788772583, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 1.0706, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.5268817204301075, |
|
"grad_norm": 1.6134722232818604, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 1.123, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.5376344086021505, |
|
"grad_norm": 1.54719877243042, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.0761, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.5483870967741935, |
|
"grad_norm": 1.6617050170898438, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.2028, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.5591397849462365, |
|
"grad_norm": 1.5799815654754639, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 1.1505, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.5698924731182795, |
|
"grad_norm": 1.6557565927505493, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 1.1309, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.5806451612903225, |
|
"grad_norm": 1.5026015043258667, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.0818, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.5913978494623655, |
|
"grad_norm": 1.3791894912719727, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.9874, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.6021505376344085, |
|
"grad_norm": 1.4911445379257202, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.1183, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.6129032258064515, |
|
"grad_norm": 1.651066541671753, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.1194, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.6129032258064515, |
|
"eval_loss": 1.3905954360961914, |
|
"eval_runtime": 4.7674, |
|
"eval_samples_per_second": 32.932, |
|
"eval_steps_per_second": 8.39, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.6236559139784945, |
|
"grad_norm": 1.4255160093307495, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 1.0179, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.6344086021505375, |
|
"grad_norm": 1.4603620767593384, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.8123, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.6451612903225805, |
|
"grad_norm": 1.4084007740020752, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 1.129, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.6559139784946235, |
|
"grad_norm": 1.5454281568527222, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.016, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 1.525089979171753, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 1.0878, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.6774193548387095, |
|
"grad_norm": 1.5995702743530273, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 1.1535, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.6881720430107527, |
|
"grad_norm": 1.5803050994873047, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 1.0537, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.6989247311827957, |
|
"grad_norm": 1.6103012561798096, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 1.1765, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.7096774193548387, |
|
"grad_norm": 1.8218728303909302, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 1.1703, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.7204301075268817, |
|
"grad_norm": 1.4936676025390625, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 1.1043, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.7311827956989247, |
|
"grad_norm": 1.5079119205474854, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 1.019, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.7419354838709677, |
|
"grad_norm": 1.420454740524292, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 1.0531, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.7526881720430108, |
|
"grad_norm": 1.4512486457824707, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 1.0896, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.7634408602150538, |
|
"grad_norm": 1.605732798576355, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 1.0857, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.7741935483870968, |
|
"grad_norm": 1.371429204940796, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.905, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.7849462365591398, |
|
"grad_norm": 1.614424228668213, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 1.0574, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.7956989247311828, |
|
"grad_norm": 1.5107542276382446, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 1.0767, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.8064516129032258, |
|
"grad_norm": 1.4299854040145874, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.9596, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.817204301075269, |
|
"grad_norm": 1.6855710744857788, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 1.1333, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.827956989247312, |
|
"grad_norm": 1.5150099992752075, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 1.0099, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.838709677419355, |
|
"grad_norm": 1.5539478063583374, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 1.04, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.849462365591398, |
|
"grad_norm": 1.502110481262207, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 1.0692, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.860215053763441, |
|
"grad_norm": 1.3206491470336914, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 1.0718, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.870967741935484, |
|
"grad_norm": 1.462929606437683, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.9734, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.881720430107527, |
|
"grad_norm": 1.5733832120895386, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.9554, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.89247311827957, |
|
"grad_norm": 1.4133031368255615, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.9716, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.903225806451613, |
|
"grad_norm": 1.4608386754989624, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.9521, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.913978494623656, |
|
"grad_norm": 1.5045890808105469, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.9913, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.924731182795699, |
|
"grad_norm": 1.577175259590149, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 1.0463, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.935483870967742, |
|
"grad_norm": 1.683937430381775, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 1.2391, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.946236559139785, |
|
"grad_norm": 1.6469801664352417, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.1313, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.956989247311828, |
|
"grad_norm": 1.5587536096572876, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 1.0533, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.967741935483871, |
|
"grad_norm": 1.9218688011169434, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 1.2219, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.978494623655914, |
|
"grad_norm": 1.4584901332855225, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 1.0309, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.989247311827957, |
|
"grad_norm": 1.6153608560562134, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 1.0525, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.873821496963501, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 1.1285, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.010752688172043, |
|
"grad_norm": 1.4225022792816162, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 1.0531, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.021505376344086, |
|
"grad_norm": 1.3832712173461914, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 1.0434, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.032258064516129, |
|
"grad_norm": 1.2817425727844238, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.876, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.043010752688172, |
|
"grad_norm": 1.2483127117156982, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.8236, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.053763440860215, |
|
"grad_norm": 1.3870429992675781, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.909, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.064516129032258, |
|
"grad_norm": 1.273902416229248, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.9588, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.075268817204301, |
|
"grad_norm": 1.3933042287826538, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.932, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.086021505376344, |
|
"grad_norm": 1.327836036682129, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.833, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.096774193548387, |
|
"grad_norm": 1.3181606531143188, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.9239, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.10752688172043, |
|
"grad_norm": 1.4119458198547363, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.9425, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.118279569892473, |
|
"grad_norm": 1.3290451765060425, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.921, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.129032258064516, |
|
"grad_norm": 1.3426759243011475, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.9362, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.139784946236559, |
|
"grad_norm": 1.3566099405288696, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.9784, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.150537634408602, |
|
"grad_norm": 1.3170688152313232, |
|
"learning_rate": 0.0, |
|
"loss": 0.9362, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.150537634408602, |
|
"eval_loss": 1.383666753768921, |
|
"eval_runtime": 4.7303, |
|
"eval_samples_per_second": 33.191, |
|
"eval_steps_per_second": 8.456, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 2 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.495622655279104e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|