|
{ |
|
"best_metric": 1.5972942113876343, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.12101043715020421, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.000605052185751021, |
|
"grad_norm": 8.824080467224121, |
|
"learning_rate": 1e-05, |
|
"loss": 3.857, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000605052185751021, |
|
"eval_loss": 3.967219829559326, |
|
"eval_runtime": 131.6153, |
|
"eval_samples_per_second": 21.153, |
|
"eval_steps_per_second": 5.288, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001210104371502042, |
|
"grad_norm": 9.042195320129395, |
|
"learning_rate": 2e-05, |
|
"loss": 3.4688, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.001815156557253063, |
|
"grad_norm": 9.631072998046875, |
|
"learning_rate": 3e-05, |
|
"loss": 3.6481, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.002420208743004084, |
|
"grad_norm": 10.320423126220703, |
|
"learning_rate": 4e-05, |
|
"loss": 3.9826, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.003025260928755105, |
|
"grad_norm": 8.81252670288086, |
|
"learning_rate": 5e-05, |
|
"loss": 4.0196, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.003630313114506126, |
|
"grad_norm": 7.749195575714111, |
|
"learning_rate": 6e-05, |
|
"loss": 3.2208, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.004235365300257147, |
|
"grad_norm": 8.090449333190918, |
|
"learning_rate": 7e-05, |
|
"loss": 3.27, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.004840417486008168, |
|
"grad_norm": 7.524991035461426, |
|
"learning_rate": 8e-05, |
|
"loss": 2.7439, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00544546967175919, |
|
"grad_norm": 7.981743812561035, |
|
"learning_rate": 9e-05, |
|
"loss": 2.9816, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00605052185751021, |
|
"grad_norm": 5.805087089538574, |
|
"learning_rate": 0.0001, |
|
"loss": 2.9419, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0066555740432612314, |
|
"grad_norm": 7.2685136795043945, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 3.5016, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.007260626229012252, |
|
"grad_norm": 6.021042823791504, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 2.5152, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.007865678414763273, |
|
"grad_norm": 6.932441711425781, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 2.6719, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.008470730600514294, |
|
"grad_norm": 7.189040184020996, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 2.9583, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.009075782786265316, |
|
"grad_norm": 6.571204662322998, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 2.6229, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.009680834972016336, |
|
"grad_norm": 7.477680206298828, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 2.8054, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.010285887157767357, |
|
"grad_norm": 7.641745090484619, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 3.1001, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01089093934351838, |
|
"grad_norm": 9.802349090576172, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 3.0516, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0114959915292694, |
|
"grad_norm": 7.617214202880859, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 1.9733, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01210104371502042, |
|
"grad_norm": 7.413455009460449, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 2.3252, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.012706095900771442, |
|
"grad_norm": 5.545858860015869, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 2.0681, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.013311148086522463, |
|
"grad_norm": 5.678832054138184, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 2.074, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.013916200272273483, |
|
"grad_norm": 5.938708782196045, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 1.7884, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.014521252458024504, |
|
"grad_norm": 5.408363342285156, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 1.874, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.015126304643775526, |
|
"grad_norm": 5.655430793762207, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 2.1017, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.015731356829526547, |
|
"grad_norm": 4.935225963592529, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 1.5395, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.016336409015277567, |
|
"grad_norm": 6.2746782302856445, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 2.0652, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.016941461201028588, |
|
"grad_norm": 4.640069961547852, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 1.5109, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.017546513386779608, |
|
"grad_norm": 5.904468536376953, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.5498, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.018151565572530632, |
|
"grad_norm": 4.789588451385498, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 1.8035, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.018756617758281652, |
|
"grad_norm": 5.2518229484558105, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 2.2286, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.019361669944032673, |
|
"grad_norm": 4.67473030090332, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 1.8263, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.019966722129783693, |
|
"grad_norm": 5.545779705047607, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 1.6349, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.020571774315534714, |
|
"grad_norm": 4.631152629852295, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 1.3875, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.021176826501285734, |
|
"grad_norm": 4.845266342163086, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 1.366, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02178187868703676, |
|
"grad_norm": 5.808229446411133, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.593, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02238693087278778, |
|
"grad_norm": 4.492132186889648, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.1577, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0229919830585388, |
|
"grad_norm": 5.4371747970581055, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 1.7733, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02359703524428982, |
|
"grad_norm": 5.477941036224365, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.6819, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02420208743004084, |
|
"grad_norm": 5.033182621002197, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 1.7058, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02480713961579186, |
|
"grad_norm": 5.059697151184082, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 1.7372, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.025412191801542885, |
|
"grad_norm": 4.732717990875244, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 1.3914, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.026017243987293905, |
|
"grad_norm": 5.424059867858887, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 1.6854, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.026622296173044926, |
|
"grad_norm": 7.047700881958008, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 2.2959, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.027227348358795946, |
|
"grad_norm": 6.0506815910339355, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 2.1825, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.027832400544546967, |
|
"grad_norm": 4.7166595458984375, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 1.5374, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.028437452730297987, |
|
"grad_norm": 5.0626678466796875, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.7514, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.029042504916049008, |
|
"grad_norm": 5.607767581939697, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.0604, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02964755710180003, |
|
"grad_norm": 4.763421058654785, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.7194, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.030252609287551052, |
|
"grad_norm": 5.836249828338623, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 2.4789, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.030252609287551052, |
|
"eval_loss": 1.9229539632797241, |
|
"eval_runtime": 133.0651, |
|
"eval_samples_per_second": 20.922, |
|
"eval_steps_per_second": 5.231, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.030857661473302073, |
|
"grad_norm": 4.283012390136719, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 2.3281, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03146271365905309, |
|
"grad_norm": 4.356929779052734, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 1.9618, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.032067765844804114, |
|
"grad_norm": 4.299388885498047, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 2.3413, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.032672818030555134, |
|
"grad_norm": 4.3752360343933105, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 2.363, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.033277870216306155, |
|
"grad_norm": 5.005733966827393, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 2.5032, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.033882922402057175, |
|
"grad_norm": 4.872354030609131, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 2.3465, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.034487974587808196, |
|
"grad_norm": 4.530215740203857, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 2.2999, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.035093026773559216, |
|
"grad_norm": 4.4950032234191895, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 2.2542, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.035698078959310243, |
|
"grad_norm": 6.2269511222839355, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 2.401, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.036303131145061264, |
|
"grad_norm": 5.563892364501953, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 2.3522, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.036908183330812284, |
|
"grad_norm": 5.401533603668213, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 2.0331, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.037513235516563305, |
|
"grad_norm": 5.7106194496154785, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 2.1083, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.038118287702314325, |
|
"grad_norm": 8.203621864318848, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 2.4249, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.038723339888065346, |
|
"grad_norm": 5.082009792327881, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 1.31, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.039328392073816366, |
|
"grad_norm": 6.3080644607543945, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 1.8694, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03993344425956739, |
|
"grad_norm": 6.578169822692871, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 2.2567, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04053849644531841, |
|
"grad_norm": 15.368583679199219, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.6325, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04114354863106943, |
|
"grad_norm": 7.130876064300537, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 2.3104, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04174860081682045, |
|
"grad_norm": 7.857542514801025, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 2.45, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04235365300257147, |
|
"grad_norm": 5.319846153259277, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 1.6475, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04295870518832249, |
|
"grad_norm": 5.050972938537598, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 1.5534, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04356375737407352, |
|
"grad_norm": 4.796183109283447, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 1.6907, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04416880955982454, |
|
"grad_norm": 5.4603753089904785, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 1.468, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04477386174557556, |
|
"grad_norm": 4.92110013961792, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 1.8974, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04537891393132658, |
|
"grad_norm": 4.946081638336182, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 1.7965, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0459839661170776, |
|
"grad_norm": 4.532042980194092, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 1.4453, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04658901830282862, |
|
"grad_norm": 4.3761725425720215, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 1.4019, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04719407048857964, |
|
"grad_norm": 4.8957295417785645, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 1.7838, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04779912267433066, |
|
"grad_norm": 3.9672493934631348, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 1.3741, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04840417486008168, |
|
"grad_norm": 4.684700012207031, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 1.7226, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0490092270458327, |
|
"grad_norm": 6.024520397186279, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 1.8488, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04961427923158372, |
|
"grad_norm": 5.137774467468262, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 2.1449, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05021933141733474, |
|
"grad_norm": 4.6780290603637695, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 1.6421, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05082438360308577, |
|
"grad_norm": 3.9340310096740723, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 1.3248, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05142943578883679, |
|
"grad_norm": 4.935190200805664, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 1.0658, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05203448797458781, |
|
"grad_norm": 3.860861301422119, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.0084, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05263954016033883, |
|
"grad_norm": 4.6443891525268555, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 1.5877, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05324459234608985, |
|
"grad_norm": 5.341885566711426, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 1.39, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05384964453184087, |
|
"grad_norm": 4.4416422843933105, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 1.5717, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05445469671759189, |
|
"grad_norm": 4.4941325187683105, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 1.6604, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05505974890334291, |
|
"grad_norm": 5.161505699157715, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 1.7155, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.055664801089093933, |
|
"grad_norm": 5.349496841430664, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 1.7318, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.056269853274844954, |
|
"grad_norm": 5.1708149909973145, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 1.5856, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.056874905460595974, |
|
"grad_norm": 4.384572505950928, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.2994, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.057479957646346995, |
|
"grad_norm": 4.298950672149658, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 1.4626, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.058085009832098015, |
|
"grad_norm": 5.496049404144287, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 2.0517, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05869006201784904, |
|
"grad_norm": 4.826742649078369, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.442, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05929511420360006, |
|
"grad_norm": 5.374789237976074, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.3767, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.059900166389351084, |
|
"grad_norm": 4.710012435913086, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.5977, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.060505218575102104, |
|
"grad_norm": 5.694589614868164, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 2.0857, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.060505218575102104, |
|
"eval_loss": 1.7223000526428223, |
|
"eval_runtime": 132.8888, |
|
"eval_samples_per_second": 20.95, |
|
"eval_steps_per_second": 5.237, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.061110270760853125, |
|
"grad_norm": 4.256014347076416, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 1.8564, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.061715322946604145, |
|
"grad_norm": 4.559025287628174, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 2.2042, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.062320375132355166, |
|
"grad_norm": 4.099578857421875, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 2.0992, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06292542731810619, |
|
"grad_norm": 4.300604820251465, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 2.1802, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0635304795038572, |
|
"grad_norm": 4.9460296630859375, |
|
"learning_rate": 5e-05, |
|
"loss": 2.6528, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06413553168960823, |
|
"grad_norm": 4.572192668914795, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 2.1446, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06474058387535925, |
|
"grad_norm": 4.888028621673584, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 2.1126, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06534563606111027, |
|
"grad_norm": 4.279664039611816, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 1.838, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06595068824686129, |
|
"grad_norm": 4.849969387054443, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 2.0839, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06655574043261231, |
|
"grad_norm": 5.360795974731445, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 1.6644, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06716079261836333, |
|
"grad_norm": 4.785168647766113, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 1.9794, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06776584480411435, |
|
"grad_norm": 4.891386032104492, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 1.619, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06837089698986537, |
|
"grad_norm": 6.79011344909668, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 1.9895, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06897594917561639, |
|
"grad_norm": 5.493412971496582, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 1.6992, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06958100136136741, |
|
"grad_norm": 5.8443450927734375, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 1.9605, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07018605354711843, |
|
"grad_norm": 5.376211166381836, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 1.5237, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07079110573286947, |
|
"grad_norm": 6.854538440704346, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 2.1044, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07139615791862049, |
|
"grad_norm": 12.249556541442871, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 1.9499, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07200121010437151, |
|
"grad_norm": 4.763735294342041, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 1.6537, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.07260626229012253, |
|
"grad_norm": 5.7027363777160645, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 1.2904, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07321131447587355, |
|
"grad_norm": 5.195602893829346, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 1.6341, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07381636666162457, |
|
"grad_norm": 4.989786624908447, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 1.6039, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07442141884737559, |
|
"grad_norm": 4.652569770812988, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 1.6898, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07502647103312661, |
|
"grad_norm": 4.230212211608887, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.27, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07563152321887763, |
|
"grad_norm": 4.758596897125244, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 1.6071, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07623657540462865, |
|
"grad_norm": 4.950014114379883, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 1.8124, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07684162759037967, |
|
"grad_norm": 4.507136344909668, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 1.232, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07744667977613069, |
|
"grad_norm": 4.617379665374756, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 1.6234, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07805173196188171, |
|
"grad_norm": 4.021543979644775, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 1.2495, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07865678414763273, |
|
"grad_norm": 3.2041749954223633, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.9134, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07926183633338375, |
|
"grad_norm": 4.642094135284424, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 1.5549, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07986688851913477, |
|
"grad_norm": 4.646106243133545, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 1.372, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.0804719407048858, |
|
"grad_norm": 4.656652450561523, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 1.8913, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.08107699289063681, |
|
"grad_norm": 4.523721694946289, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 1.6731, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08168204507638784, |
|
"grad_norm": 4.131584167480469, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 1.5144, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08228709726213886, |
|
"grad_norm": 4.05930233001709, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 1.3557, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08289214944788988, |
|
"grad_norm": 4.66646671295166, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 1.3276, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.0834972016336409, |
|
"grad_norm": 4.368364334106445, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 1.554, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08410225381939192, |
|
"grad_norm": 4.241589546203613, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 1.4698, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08470730600514294, |
|
"grad_norm": 4.435495376586914, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 1.572, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08531235819089396, |
|
"grad_norm": 4.079944610595703, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 1.0826, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08591741037664498, |
|
"grad_norm": 4.599911689758301, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 1.4314, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08652246256239601, |
|
"grad_norm": 3.965014934539795, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.1389, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08712751474814703, |
|
"grad_norm": 4.101932048797607, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 1.2636, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08773256693389805, |
|
"grad_norm": 4.412632465362549, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 1.4291, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08833761911964907, |
|
"grad_norm": 4.265484809875488, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 1.4033, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0889426713054001, |
|
"grad_norm": 4.544005393981934, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.4107, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08954772349115112, |
|
"grad_norm": 4.222890853881836, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 1.3296, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.09015277567690214, |
|
"grad_norm": 5.043741226196289, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.7521, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.09075782786265316, |
|
"grad_norm": 5.090908050537109, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.8676, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09075782786265316, |
|
"eval_loss": 1.6341633796691895, |
|
"eval_runtime": 132.9165, |
|
"eval_samples_per_second": 20.945, |
|
"eval_steps_per_second": 5.236, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09136288004840418, |
|
"grad_norm": 4.010870456695557, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 2.015, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.0919679322341552, |
|
"grad_norm": 4.418432712554932, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 1.9752, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.09257298441990622, |
|
"grad_norm": 3.9958014488220215, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 2.0753, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09317803660565724, |
|
"grad_norm": 4.166998386383057, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 1.7546, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09378308879140826, |
|
"grad_norm": 3.985368251800537, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 1.6191, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09438814097715928, |
|
"grad_norm": 4.3636369705200195, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 2.0277, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.0949931931629103, |
|
"grad_norm": 4.95756721496582, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 2.3159, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09559824534866132, |
|
"grad_norm": 4.428719997406006, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 1.7416, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09620329753441234, |
|
"grad_norm": 4.474020957946777, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 1.7884, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.09680834972016336, |
|
"grad_norm": 4.405423164367676, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 1.6668, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09741340190591438, |
|
"grad_norm": 6.775367259979248, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 1.8023, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0980184540916654, |
|
"grad_norm": 5.818444728851318, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.1367, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09862350627741642, |
|
"grad_norm": 4.062469005584717, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 1.2394, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09922855846316744, |
|
"grad_norm": 5.237635612487793, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 1.8316, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09983361064891846, |
|
"grad_norm": 5.004766941070557, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 1.7498, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10043866283466948, |
|
"grad_norm": 6.062614440917969, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 2.2138, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.1010437150204205, |
|
"grad_norm": 6.577429294586182, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 1.4752, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.10164876720617154, |
|
"grad_norm": 4.324187755584717, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 1.4536, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.10225381939192256, |
|
"grad_norm": 6.81287145614624, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 1.7088, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.10285887157767358, |
|
"grad_norm": 4.584249019622803, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 1.629, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1034639237634246, |
|
"grad_norm": 3.824648141860962, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 1.2652, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.10406897594917562, |
|
"grad_norm": 4.415689945220947, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 1.8574, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.10467402813492664, |
|
"grad_norm": 6.334531784057617, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 1.3935, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.10527908032067766, |
|
"grad_norm": 3.9672675132751465, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 1.3739, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.10588413250642868, |
|
"grad_norm": 4.651885032653809, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 1.7055, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1064891846921797, |
|
"grad_norm": 4.223604202270508, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 1.3369, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.10709423687793072, |
|
"grad_norm": 4.983737945556641, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 1.7146, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.10769928906368174, |
|
"grad_norm": 5.22819185256958, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 1.7389, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.10830434124943276, |
|
"grad_norm": 3.9994022846221924, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 1.35, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.10890939343518379, |
|
"grad_norm": 4.338124752044678, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 1.4418, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1095144456209348, |
|
"grad_norm": 4.586472511291504, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.6367, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.11011949780668583, |
|
"grad_norm": 4.578819751739502, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 1.6142, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.11072454999243685, |
|
"grad_norm": 3.8915982246398926, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 1.3661, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.11132960217818787, |
|
"grad_norm": 5.164275646209717, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 1.8204, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.11193465436393889, |
|
"grad_norm": 4.32537841796875, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 1.5363, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11253970654968991, |
|
"grad_norm": 4.528438568115234, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 1.1212, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.11314475873544093, |
|
"grad_norm": 4.616623401641846, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 1.6208, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.11374981092119195, |
|
"grad_norm": 3.87123966217041, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.9631, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.11435486310694297, |
|
"grad_norm": 4.51935338973999, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 1.4062, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.11495991529269399, |
|
"grad_norm": 4.743103981018066, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 1.2416, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11556496747844501, |
|
"grad_norm": 4.292260646820068, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 1.4508, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.11617001966419603, |
|
"grad_norm": 4.7620673179626465, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 1.3951, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.11677507184994705, |
|
"grad_norm": 5.560548782348633, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 1.7636, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.11738012403569809, |
|
"grad_norm": 5.016361713409424, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 1.8073, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.1179851762214491, |
|
"grad_norm": 5.063380241394043, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 1.7441, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11859022840720013, |
|
"grad_norm": 4.034510135650635, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 1.2482, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.11919528059295115, |
|
"grad_norm": 5.894397735595703, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 2.3427, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11980033277870217, |
|
"grad_norm": 5.2676310539245605, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 1.5469, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.12040538496445319, |
|
"grad_norm": 4.946696758270264, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 1.1969, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.12101043715020421, |
|
"grad_norm": 5.685734748840332, |
|
"learning_rate": 0.0, |
|
"loss": 1.8717, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12101043715020421, |
|
"eval_loss": 1.5972942113876343, |
|
"eval_runtime": 133.4035, |
|
"eval_samples_per_second": 20.869, |
|
"eval_steps_per_second": 5.217, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1068744294780109e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|