|
{ |
|
"best_metric": 2.142829656600952, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.2946593001841621, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0014732965009208103, |
|
"grad_norm": 0.6813496947288513, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5563, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0014732965009208103, |
|
"eval_loss": 2.918741226196289, |
|
"eval_runtime": 33.5541, |
|
"eval_samples_per_second": 34.094, |
|
"eval_steps_per_second": 8.524, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0029465930018416206, |
|
"grad_norm": 0.8075690269470215, |
|
"learning_rate": 2e-05, |
|
"loss": 2.5972, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004419889502762431, |
|
"grad_norm": 0.8233782649040222, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4961, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005893186003683241, |
|
"grad_norm": 0.8552287817001343, |
|
"learning_rate": 4e-05, |
|
"loss": 2.6314, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.007366482504604052, |
|
"grad_norm": 0.8532387018203735, |
|
"learning_rate": 5e-05, |
|
"loss": 2.6159, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008839779005524863, |
|
"grad_norm": 0.773468554019928, |
|
"learning_rate": 6e-05, |
|
"loss": 2.5155, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.010313075506445672, |
|
"grad_norm": 0.7581596374511719, |
|
"learning_rate": 7e-05, |
|
"loss": 2.4491, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.011786372007366482, |
|
"grad_norm": 0.7836837768554688, |
|
"learning_rate": 8e-05, |
|
"loss": 2.5402, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.013259668508287293, |
|
"grad_norm": 0.7612143158912659, |
|
"learning_rate": 9e-05, |
|
"loss": 2.3494, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.014732965009208104, |
|
"grad_norm": 0.7859139442443848, |
|
"learning_rate": 0.0001, |
|
"loss": 2.3823, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.016206261510128914, |
|
"grad_norm": 0.9450372457504272, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 2.3155, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.017679558011049725, |
|
"grad_norm": 1.120526909828186, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 2.323, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.019152854511970532, |
|
"grad_norm": 0.7395462989807129, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 2.3224, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.020626151012891343, |
|
"grad_norm": 0.9541445374488831, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 2.2665, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.022099447513812154, |
|
"grad_norm": 0.8041669130325317, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 2.4497, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.023572744014732964, |
|
"grad_norm": 0.6719281077384949, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 2.4481, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.025046040515653775, |
|
"grad_norm": 0.6882573366165161, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 2.4399, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.026519337016574586, |
|
"grad_norm": 0.6736729145050049, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 2.2692, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.027992633517495397, |
|
"grad_norm": 0.6062433123588562, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 2.3338, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.029465930018416207, |
|
"grad_norm": 0.6055423617362976, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 2.2801, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.030939226519337018, |
|
"grad_norm": 0.6778382658958435, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 2.3912, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03241252302025783, |
|
"grad_norm": 0.628765344619751, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 2.3412, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03388581952117864, |
|
"grad_norm": 0.6677904725074768, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 2.3754, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03535911602209945, |
|
"grad_norm": 0.6703566312789917, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 2.3854, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03683241252302026, |
|
"grad_norm": 0.6443681716918945, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 2.5166, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.038305709023941065, |
|
"grad_norm": 0.6533526182174683, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 2.4067, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.039779005524861875, |
|
"grad_norm": 0.6256962418556213, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 2.2705, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.041252302025782686, |
|
"grad_norm": 0.6230438351631165, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 2.3179, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0427255985267035, |
|
"grad_norm": 0.6809419393539429, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 2.4417, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04419889502762431, |
|
"grad_norm": 0.6697230935096741, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 2.4118, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04567219152854512, |
|
"grad_norm": 0.6355602145195007, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 2.2859, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04714548802946593, |
|
"grad_norm": 0.6855126023292542, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 2.3763, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04861878453038674, |
|
"grad_norm": 0.6802644729614258, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 2.4461, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.05009208103130755, |
|
"grad_norm": 0.6710078120231628, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 2.4494, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05156537753222836, |
|
"grad_norm": 0.6675485372543335, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 2.3493, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05303867403314917, |
|
"grad_norm": 0.6746523380279541, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 2.3894, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05451197053406998, |
|
"grad_norm": 0.6553740501403809, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 2.1722, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05598526703499079, |
|
"grad_norm": 0.691763699054718, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 2.4571, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.057458563535911604, |
|
"grad_norm": 0.6961422562599182, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 2.2901, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.058931860036832415, |
|
"grad_norm": 0.6852315664291382, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 2.232, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.060405156537753225, |
|
"grad_norm": 0.774763822555542, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 2.3699, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.061878453038674036, |
|
"grad_norm": 0.701396107673645, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 2.3449, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06335174953959484, |
|
"grad_norm": 0.770219624042511, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 2.4661, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06482504604051566, |
|
"grad_norm": 0.7467716932296753, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 2.326, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06629834254143646, |
|
"grad_norm": 0.7156895995140076, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 2.2128, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06777163904235728, |
|
"grad_norm": 0.7966296076774597, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 2.2482, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06924493554327808, |
|
"grad_norm": 0.8367739915847778, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 2.4101, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0707182320441989, |
|
"grad_norm": 0.8258773684501648, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.1884, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0721915285451197, |
|
"grad_norm": 0.9507535099983215, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 2.3239, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07366482504604052, |
|
"grad_norm": 1.0835574865341187, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 2.2306, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07366482504604052, |
|
"eval_loss": 2.2473981380462646, |
|
"eval_runtime": 33.6255, |
|
"eval_samples_per_second": 34.022, |
|
"eval_steps_per_second": 8.505, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07513812154696133, |
|
"grad_norm": 1.010612964630127, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 2.1945, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07661141804788213, |
|
"grad_norm": 0.7759504914283752, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 2.2003, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07808471454880295, |
|
"grad_norm": 0.6330819129943848, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 2.1249, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07955801104972375, |
|
"grad_norm": 0.527708888053894, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 2.0861, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08103130755064457, |
|
"grad_norm": 0.48775210976600647, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 2.1054, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08250460405156537, |
|
"grad_norm": 0.4980153739452362, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 2.0805, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08397790055248619, |
|
"grad_norm": 0.5115144848823547, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 2.2559, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.085451197053407, |
|
"grad_norm": 0.5000446438789368, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 2.0434, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08692449355432781, |
|
"grad_norm": 0.5144779682159424, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 2.0314, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08839779005524862, |
|
"grad_norm": 0.5057157874107361, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 2.1697, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08987108655616943, |
|
"grad_norm": 0.5526983141899109, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 2.1934, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.09134438305709024, |
|
"grad_norm": 0.5022704601287842, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 2.1775, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.09281767955801105, |
|
"grad_norm": 0.519891083240509, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 2.2059, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.09429097605893186, |
|
"grad_norm": 0.552194356918335, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 2.1553, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09576427255985268, |
|
"grad_norm": 0.5596328973770142, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 2.2244, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09723756906077348, |
|
"grad_norm": 0.5474238991737366, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 2.3101, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0987108655616943, |
|
"grad_norm": 0.5688091516494751, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.1229, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1001841620626151, |
|
"grad_norm": 0.5471810102462769, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 2.1483, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.10165745856353592, |
|
"grad_norm": 0.5143526792526245, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 2.1053, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.10313075506445672, |
|
"grad_norm": 0.5542533993721008, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 2.2268, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10460405156537753, |
|
"grad_norm": 0.5456425547599792, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 2.1713, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.10607734806629834, |
|
"grad_norm": 0.5739713907241821, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 2.245, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10755064456721915, |
|
"grad_norm": 0.550366222858429, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 2.2492, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10902394106813997, |
|
"grad_norm": 0.5844371914863586, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 2.3137, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.11049723756906077, |
|
"grad_norm": 0.5785281658172607, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 2.2594, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11197053406998159, |
|
"grad_norm": 0.5865864157676697, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 2.2213, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.11344383057090239, |
|
"grad_norm": 0.5580472350120544, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 2.2876, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11491712707182321, |
|
"grad_norm": 0.6060166358947754, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 2.3195, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.11639042357274401, |
|
"grad_norm": 0.5804003477096558, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 2.2484, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.11786372007366483, |
|
"grad_norm": 0.5879073739051819, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 2.1609, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.11933701657458563, |
|
"grad_norm": 0.5739030838012695, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 2.1838, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.12081031307550645, |
|
"grad_norm": 0.6036490797996521, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 2.1805, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.12228360957642725, |
|
"grad_norm": 0.6051073670387268, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 2.2435, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.12375690607734807, |
|
"grad_norm": 0.5920565724372864, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 2.14, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1252302025782689, |
|
"grad_norm": 0.6635783314704895, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 2.2595, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12670349907918968, |
|
"grad_norm": 0.6474425196647644, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.21, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1281767955801105, |
|
"grad_norm": 0.6599803566932678, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 2.1636, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.12965009208103132, |
|
"grad_norm": 0.6417123079299927, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 2.2529, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1311233885819521, |
|
"grad_norm": 0.6307031512260437, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 2.2475, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.13259668508287292, |
|
"grad_norm": 0.6963887810707092, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 2.3434, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13406998158379374, |
|
"grad_norm": 0.7633985877037048, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 2.3564, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13554327808471456, |
|
"grad_norm": 0.7439932823181152, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 2.3207, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.13701657458563535, |
|
"grad_norm": 0.6978381872177124, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 2.1644, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.13848987108655617, |
|
"grad_norm": 0.790519118309021, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 2.165, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.13996316758747698, |
|
"grad_norm": 0.8105412721633911, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 2.4732, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1414364640883978, |
|
"grad_norm": 0.7491436004638672, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 2.2654, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1429097605893186, |
|
"grad_norm": 0.755374014377594, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 2.3541, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1443830570902394, |
|
"grad_norm": 0.8364269733428955, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 2.2961, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.14585635359116023, |
|
"grad_norm": 0.8708286881446838, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 2.1471, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.14732965009208104, |
|
"grad_norm": 1.0091307163238525, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 2.0601, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14732965009208104, |
|
"eval_loss": 2.177105188369751, |
|
"eval_runtime": 33.6215, |
|
"eval_samples_per_second": 34.026, |
|
"eval_steps_per_second": 8.506, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14880294659300183, |
|
"grad_norm": 0.5215460062026978, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 2.0475, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.15027624309392265, |
|
"grad_norm": 0.6095430850982666, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 2.0592, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.15174953959484347, |
|
"grad_norm": 0.5464235544204712, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 2.0183, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.15322283609576426, |
|
"grad_norm": 0.5505194664001465, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 2.0362, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.15469613259668508, |
|
"grad_norm": 0.5168222188949585, |
|
"learning_rate": 5e-05, |
|
"loss": 2.1467, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1561694290976059, |
|
"grad_norm": 0.5152841806411743, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 2.1926, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1576427255985267, |
|
"grad_norm": 0.49555912613868713, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 2.0815, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1591160220994475, |
|
"grad_norm": 0.4678058624267578, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 2.1123, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.16058931860036832, |
|
"grad_norm": 0.4995936453342438, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 2.0178, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.16206261510128914, |
|
"grad_norm": 0.4865174889564514, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 2.0839, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.16353591160220995, |
|
"grad_norm": 0.46470630168914795, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 2.1032, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.16500920810313074, |
|
"grad_norm": 0.4676339626312256, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 2.2317, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.16648250460405156, |
|
"grad_norm": 0.48408418893814087, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 2.1775, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.16795580110497238, |
|
"grad_norm": 0.5317076444625854, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 2.207, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1694290976058932, |
|
"grad_norm": 0.5113919973373413, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 2.1026, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.170902394106814, |
|
"grad_norm": 0.5057461261749268, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 2.1784, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1723756906077348, |
|
"grad_norm": 0.5517849326133728, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 2.1082, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.17384898710865562, |
|
"grad_norm": 0.5440712571144104, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 2.1231, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.17532228360957644, |
|
"grad_norm": 0.5332390666007996, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 2.1617, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.17679558011049723, |
|
"grad_norm": 0.5297031998634338, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 2.1625, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17826887661141805, |
|
"grad_norm": 0.5557818412780762, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 2.2473, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.17974217311233887, |
|
"grad_norm": 0.5619638562202454, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 2.1909, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.18121546961325966, |
|
"grad_norm": 0.5676457285881042, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 2.2397, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.18268876611418047, |
|
"grad_norm": 0.5630333423614502, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 2.1458, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1841620626151013, |
|
"grad_norm": 0.5573937892913818, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 2.1902, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1856353591160221, |
|
"grad_norm": 0.5926021933555603, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 2.2485, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1871086556169429, |
|
"grad_norm": 0.5963653326034546, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 2.1565, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.18858195211786372, |
|
"grad_norm": 0.6477805376052856, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 2.3577, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.19005524861878453, |
|
"grad_norm": 0.5789662599563599, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 2.16, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.19152854511970535, |
|
"grad_norm": 0.5775110721588135, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 2.1837, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.19300184162062614, |
|
"grad_norm": 0.6224007606506348, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 2.3447, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.19447513812154696, |
|
"grad_norm": 0.6071020364761353, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 2.3363, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.19594843462246778, |
|
"grad_norm": 0.5970646142959595, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 2.2286, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.1974217311233886, |
|
"grad_norm": 0.5931335091590881, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 2.2033, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.19889502762430938, |
|
"grad_norm": 0.6456011533737183, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 2.3814, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2003683241252302, |
|
"grad_norm": 0.6193873882293701, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 2.1756, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.20184162062615102, |
|
"grad_norm": 0.611831545829773, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 2.2781, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.20331491712707184, |
|
"grad_norm": 0.6075538992881775, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 2.2085, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.20478821362799263, |
|
"grad_norm": 0.6648210883140564, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 2.2603, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.20626151012891344, |
|
"grad_norm": 0.6589313745498657, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 2.3493, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.20773480662983426, |
|
"grad_norm": 0.6778343915939331, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 2.2593, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.20920810313075505, |
|
"grad_norm": 0.6866799592971802, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 2.2085, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.21068139963167587, |
|
"grad_norm": 0.6959302425384521, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.1706, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2121546961325967, |
|
"grad_norm": 0.6998744010925293, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 2.349, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2136279926335175, |
|
"grad_norm": 0.7252722382545471, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 2.2642, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2151012891344383, |
|
"grad_norm": 0.7717849016189575, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 2.2799, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2165745856353591, |
|
"grad_norm": 0.7438363432884216, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 2.1361, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.21804788213627993, |
|
"grad_norm": 0.8306782245635986, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 2.2135, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.21952117863720075, |
|
"grad_norm": 0.8231614828109741, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 2.228, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.22099447513812154, |
|
"grad_norm": 0.8910000920295715, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 2.0673, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22099447513812154, |
|
"eval_loss": 2.1498870849609375, |
|
"eval_runtime": 33.6508, |
|
"eval_samples_per_second": 33.996, |
|
"eval_steps_per_second": 8.499, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22246777163904236, |
|
"grad_norm": 0.4497315287590027, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 2.1235, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.22394106813996317, |
|
"grad_norm": 0.48219621181488037, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 2.0459, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.225414364640884, |
|
"grad_norm": 0.4607875645160675, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 2.1236, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.22688766114180478, |
|
"grad_norm": 0.4888078272342682, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 2.0777, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2283609576427256, |
|
"grad_norm": 0.5186671614646912, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 2.1288, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.22983425414364642, |
|
"grad_norm": 0.5025704503059387, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 1.9971, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2313075506445672, |
|
"grad_norm": 0.4730200469493866, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 2.0245, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.23278084714548802, |
|
"grad_norm": 0.5410052537918091, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 2.042, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.23425414364640884, |
|
"grad_norm": 0.4994640648365021, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 1.9262, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.23572744014732966, |
|
"grad_norm": 0.49574676156044006, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 2.1671, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.23720073664825045, |
|
"grad_norm": 0.5467461943626404, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 2.248, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.23867403314917127, |
|
"grad_norm": 0.5036174654960632, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.114, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.24014732965009208, |
|
"grad_norm": 0.516216516494751, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 2.1475, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2416206261510129, |
|
"grad_norm": 0.5705137252807617, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 2.3152, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.2430939226519337, |
|
"grad_norm": 0.5074183940887451, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 2.0175, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2445672191528545, |
|
"grad_norm": 0.5196369290351868, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 2.0298, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.24604051565377533, |
|
"grad_norm": 0.5080201625823975, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 2.1187, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.24751381215469614, |
|
"grad_norm": 0.5384064316749573, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 2.1803, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.24898710865561693, |
|
"grad_norm": 0.5691060423851013, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 2.0255, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2504604051565378, |
|
"grad_norm": 0.550330638885498, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 2.1137, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.25193370165745854, |
|
"grad_norm": 0.5306848883628845, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 2.1299, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.25340699815837936, |
|
"grad_norm": 0.5351966619491577, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 2.1293, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2548802946593002, |
|
"grad_norm": 0.5528244972229004, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 2.1027, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.256353591160221, |
|
"grad_norm": 0.5488362312316895, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 2.1509, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2578268876611418, |
|
"grad_norm": 0.5732300877571106, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 2.2063, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.25930018416206263, |
|
"grad_norm": 0.5619857907295227, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 2.2253, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.26077348066298345, |
|
"grad_norm": 0.5769329071044922, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 2.2133, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.2622467771639042, |
|
"grad_norm": 0.5814343690872192, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 2.154, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.263720073664825, |
|
"grad_norm": 0.6259899139404297, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 2.3186, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.26519337016574585, |
|
"grad_norm": 0.5869902968406677, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 2.2518, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.6089134216308594, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.0913, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2681399631675875, |
|
"grad_norm": 0.5955935716629028, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 2.1964, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.2696132596685083, |
|
"grad_norm": 0.5801297426223755, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 2.128, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2710865561694291, |
|
"grad_norm": 0.606168270111084, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 2.248, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.27255985267034993, |
|
"grad_norm": 0.628900945186615, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 2.0947, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2740331491712707, |
|
"grad_norm": 0.6443435549736023, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 2.1588, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.2755064456721915, |
|
"grad_norm": 0.6042181849479675, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 2.0953, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.27697974217311233, |
|
"grad_norm": 0.6067960858345032, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 2.2178, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.27845303867403315, |
|
"grad_norm": 0.6070239543914795, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 2.2549, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.27992633517495397, |
|
"grad_norm": 0.6234919428825378, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 2.1084, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2813996316758748, |
|
"grad_norm": 0.6733989715576172, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 2.0763, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2828729281767956, |
|
"grad_norm": 0.7105913758277893, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 2.2615, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.28434622467771636, |
|
"grad_norm": 0.718129575252533, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 2.3094, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2858195211786372, |
|
"grad_norm": 0.6917638182640076, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 2.1456, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.287292817679558, |
|
"grad_norm": 0.7489725351333618, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 2.1397, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2887661141804788, |
|
"grad_norm": 0.7198150157928467, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 2.1637, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.29023941068139963, |
|
"grad_norm": 0.758560299873352, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 2.221, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.29171270718232045, |
|
"grad_norm": 0.764299750328064, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 2.2446, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.29318600368324127, |
|
"grad_norm": 0.8316512703895569, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 2.1687, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.2946593001841621, |
|
"grad_norm": 1.0795071125030518, |
|
"learning_rate": 0.0, |
|
"loss": 2.0839, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2946593001841621, |
|
"eval_loss": 2.142829656600952, |
|
"eval_runtime": 33.6479, |
|
"eval_samples_per_second": 33.999, |
|
"eval_steps_per_second": 8.5, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.456501850243072e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|