|
{ |
|
"best_metric": 0.8247767686843872, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.0468055230517201, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0003120368203448007, |
|
"grad_norm": 9.957253456115723, |
|
"learning_rate": 1e-05, |
|
"loss": 2.6627, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003120368203448007, |
|
"eval_loss": 2.7949066162109375, |
|
"eval_runtime": 575.1453, |
|
"eval_samples_per_second": 9.385, |
|
"eval_steps_per_second": 2.347, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006240736406896014, |
|
"grad_norm": 11.108814239501953, |
|
"learning_rate": 2e-05, |
|
"loss": 2.6689, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.000936110461034402, |
|
"grad_norm": 10.682526588439941, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4337, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0012481472813792027, |
|
"grad_norm": 7.091076374053955, |
|
"learning_rate": 4e-05, |
|
"loss": 1.7918, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0015601841017240034, |
|
"grad_norm": 3.724902391433716, |
|
"learning_rate": 5e-05, |
|
"loss": 1.3859, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.001872220922068804, |
|
"grad_norm": 2.5286026000976562, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1309, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0021842577424136048, |
|
"grad_norm": 2.7851521968841553, |
|
"learning_rate": 7e-05, |
|
"loss": 1.1048, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0024962945627584054, |
|
"grad_norm": 1.8212271928787231, |
|
"learning_rate": 8e-05, |
|
"loss": 0.8491, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.002808331383103206, |
|
"grad_norm": 1.6948728561401367, |
|
"learning_rate": 9e-05, |
|
"loss": 0.928, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003120368203448007, |
|
"grad_norm": 1.5595484972000122, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8576, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0034324050237928075, |
|
"grad_norm": 1.3692195415496826, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.8588, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.003744441844137608, |
|
"grad_norm": 1.4032466411590576, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.7998, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004056478664482409, |
|
"grad_norm": 1.3274507522583008, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.8097, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0043685154848272095, |
|
"grad_norm": 1.175102710723877, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.7696, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00468055230517201, |
|
"grad_norm": 1.281258225440979, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.8471, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004992589125516811, |
|
"grad_norm": 1.2209652662277222, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.8182, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.005304625945861612, |
|
"grad_norm": 1.277958869934082, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.7592, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.005616662766206412, |
|
"grad_norm": 1.2684087753295898, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.8921, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.005928699586551213, |
|
"grad_norm": 2.5076284408569336, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.7431, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.006240736406896014, |
|
"grad_norm": 1.1840873956680298, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.8956, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.006552773227240814, |
|
"grad_norm": 1.0883880853652954, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.7756, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.006864810047585615, |
|
"grad_norm": 1.2152539491653442, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.8632, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.007176846867930416, |
|
"grad_norm": 1.468305230140686, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.7961, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.007488883688275216, |
|
"grad_norm": 1.2167303562164307, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.7196, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.007800920508620017, |
|
"grad_norm": 1.2322477102279663, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.7526, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.008112957328964818, |
|
"grad_norm": 1.1364251375198364, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.7615, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.008424994149309618, |
|
"grad_norm": 1.0931193828582764, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.7886, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.008737030969654419, |
|
"grad_norm": 1.1731034517288208, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.7239, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.00904906778999922, |
|
"grad_norm": 1.1136339902877808, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.8517, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.00936110461034402, |
|
"grad_norm": 1.1435471773147583, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.8485, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.009673141430688821, |
|
"grad_norm": 1.1177922487258911, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.8377, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.009985178251033622, |
|
"grad_norm": 1.0039643049240112, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.8118, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.010297215071378422, |
|
"grad_norm": 0.9791098833084106, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.8896, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.010609251891723223, |
|
"grad_norm": 0.9517635107040405, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.8058, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.010921288712068024, |
|
"grad_norm": 1.0164357423782349, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.8149, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.011233325532412824, |
|
"grad_norm": 0.9662339091300964, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.8988, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.011545362352757625, |
|
"grad_norm": 0.9611389636993408, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.766, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.011857399173102426, |
|
"grad_norm": 1.1109344959259033, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.9435, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.012169435993447227, |
|
"grad_norm": 1.1274558305740356, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.9347, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.012481472813792027, |
|
"grad_norm": 1.0689157247543335, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.8265, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.012793509634136828, |
|
"grad_norm": 1.0937086343765259, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.8571, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.013105546454481629, |
|
"grad_norm": 1.0644176006317139, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.7975, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.01341758327482643, |
|
"grad_norm": 1.2639864683151245, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.9379, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01372962009517123, |
|
"grad_norm": 2.6970744132995605, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 1.5578, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.01404165691551603, |
|
"grad_norm": 2.194770574569702, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 1.682, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.014353693735860831, |
|
"grad_norm": 1.9200794696807861, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 1.7524, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.014665730556205632, |
|
"grad_norm": 1.6025694608688354, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 1.6386, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.014977767376550433, |
|
"grad_norm": 1.3628052473068237, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.6543, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.015289804196895233, |
|
"grad_norm": 1.1497654914855957, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 1.4452, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.015601841017240034, |
|
"grad_norm": 1.199021816253662, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 1.6019, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.015601841017240034, |
|
"eval_loss": 0.963438868522644, |
|
"eval_runtime": 576.2596, |
|
"eval_samples_per_second": 9.367, |
|
"eval_steps_per_second": 2.343, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.015913877837584835, |
|
"grad_norm": 1.680629014968872, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.9826, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.016225914657929635, |
|
"grad_norm": 1.1629451513290405, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.8711, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.016537951478274436, |
|
"grad_norm": 0.999272346496582, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.9047, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.016849988298619237, |
|
"grad_norm": 1.0136737823486328, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.7953, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.017162025118964037, |
|
"grad_norm": 0.951803982257843, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.6757, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.017474061939308838, |
|
"grad_norm": 1.0217398405075073, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.822, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.01778609875965364, |
|
"grad_norm": 0.9405883550643921, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.7946, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.01809813557999844, |
|
"grad_norm": 0.9246597290039062, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.7408, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.01841017240034324, |
|
"grad_norm": 1.0739665031433105, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.8104, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.01872220922068804, |
|
"grad_norm": 1.036548376083374, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.7561, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01903424604103284, |
|
"grad_norm": 0.9185912013053894, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.7367, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.019346282861377642, |
|
"grad_norm": 0.9082395434379578, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.8553, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.019658319681722443, |
|
"grad_norm": 0.9209671020507812, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.7051, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.019970356502067244, |
|
"grad_norm": 0.8518406748771667, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.6811, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.020282393322412044, |
|
"grad_norm": 0.8296268582344055, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.6696, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.020594430142756845, |
|
"grad_norm": 0.8943758606910706, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.6595, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.020906466963101646, |
|
"grad_norm": 0.9089400172233582, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.8329, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.021218503783446446, |
|
"grad_norm": 0.9639408588409424, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.7802, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.021530540603791247, |
|
"grad_norm": 0.9582761526107788, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.7514, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.021842577424136048, |
|
"grad_norm": 0.8935718536376953, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.7832, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02215461424448085, |
|
"grad_norm": 0.8567960262298584, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.7493, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.02246665106482565, |
|
"grad_norm": 0.9141445755958557, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.8103, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02277868788517045, |
|
"grad_norm": 0.8256440162658691, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.7201, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02309072470551525, |
|
"grad_norm": 0.9923111796379089, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.7621, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02340276152586005, |
|
"grad_norm": 0.8481928706169128, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.8073, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.02371479834620485, |
|
"grad_norm": 0.882407546043396, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.7526, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.024026835166549652, |
|
"grad_norm": 0.8140597939491272, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.6825, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.024338871986894453, |
|
"grad_norm": 0.8657062649726868, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.7353, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.024650908807239254, |
|
"grad_norm": 0.8869200944900513, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.7921, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.024962945627584054, |
|
"grad_norm": 0.9941065311431885, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.8222, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.025274982447928855, |
|
"grad_norm": 0.9149479269981384, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.7853, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.025587019268273656, |
|
"grad_norm": 0.9197757244110107, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.7227, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.025899056088618456, |
|
"grad_norm": 0.9976024031639099, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.8561, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.026211092908963257, |
|
"grad_norm": 0.8757933974266052, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.7793, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.026523129729308058, |
|
"grad_norm": 1.0709153413772583, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.8835, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02683516654965286, |
|
"grad_norm": 0.8737066388130188, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.8136, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.02714720336999766, |
|
"grad_norm": 0.9159258604049683, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.9143, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.02745924019034246, |
|
"grad_norm": 0.9400917887687683, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.8458, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.02777127701068726, |
|
"grad_norm": 0.8912694454193115, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.7847, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.02808331383103206, |
|
"grad_norm": 0.8344074487686157, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.7255, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.028395350651376862, |
|
"grad_norm": 0.9551566243171692, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.7822, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.028707387471721663, |
|
"grad_norm": 0.8455932140350342, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.7324, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.029019424292066463, |
|
"grad_norm": 0.7846436500549316, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.7129, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.029331461112411264, |
|
"grad_norm": 1.35532546043396, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 1.2972, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.029643497932756065, |
|
"grad_norm": 1.1400768756866455, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 1.112, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.029955534753100865, |
|
"grad_norm": 1.5415581464767456, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 1.441, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.030267571573445666, |
|
"grad_norm": 1.1367700099945068, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 1.3925, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.030579608393790467, |
|
"grad_norm": 0.9753825664520264, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 1.5174, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.030891645214135267, |
|
"grad_norm": 0.905869722366333, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 1.334, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.031203682034480068, |
|
"grad_norm": 0.930263876914978, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.4637, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.031203682034480068, |
|
"eval_loss": 0.8465943932533264, |
|
"eval_runtime": 576.3777, |
|
"eval_samples_per_second": 9.365, |
|
"eval_steps_per_second": 2.342, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03151571885482487, |
|
"grad_norm": 1.0123701095581055, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.8473, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.03182775567516967, |
|
"grad_norm": 1.0247398614883423, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.7315, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.03213979249551447, |
|
"grad_norm": 1.0362675189971924, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.8193, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.03245182931585927, |
|
"grad_norm": 0.8984088897705078, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.772, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.03276386613620407, |
|
"grad_norm": 0.8338197469711304, |
|
"learning_rate": 5e-05, |
|
"loss": 0.6654, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03307590295654887, |
|
"grad_norm": 0.9568160772323608, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.8874, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03338793977689367, |
|
"grad_norm": 0.8923954367637634, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.8197, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.033699976597238473, |
|
"grad_norm": 0.8308312892913818, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.676, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.034012013417583274, |
|
"grad_norm": 0.8796210885047913, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.7891, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.034324050237928075, |
|
"grad_norm": 0.8588724136352539, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.7916, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.034636087058272876, |
|
"grad_norm": 0.7926965951919556, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.6999, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.034948123878617676, |
|
"grad_norm": 0.8319413065910339, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.7745, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.03526016069896248, |
|
"grad_norm": 0.8593688011169434, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.7529, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.03557219751930728, |
|
"grad_norm": 0.7875027656555176, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.6741, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.03588423433965208, |
|
"grad_norm": 0.8281791806221008, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.77, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.03619627115999688, |
|
"grad_norm": 1.3842780590057373, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.7395, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.03650830798034168, |
|
"grad_norm": 0.924323558807373, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.8093, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.03682034480068648, |
|
"grad_norm": 0.8071739673614502, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.6499, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.03713238162103128, |
|
"grad_norm": 0.8322581052780151, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.6888, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.03744441844137608, |
|
"grad_norm": 0.9479567408561707, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.7255, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03775645526172088, |
|
"grad_norm": 0.992094874382019, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.7916, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.03806849208206568, |
|
"grad_norm": 0.8004103302955627, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.6846, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.038380528902410484, |
|
"grad_norm": 0.8908790349960327, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.8101, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.038692565722755284, |
|
"grad_norm": 0.9139007329940796, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.7185, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.039004602543100085, |
|
"grad_norm": 0.7527826428413391, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.7077, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.039316639363444886, |
|
"grad_norm": 0.8849127888679504, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.7982, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.039628676183789686, |
|
"grad_norm": 0.7668061852455139, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.6247, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.03994071300413449, |
|
"grad_norm": 0.8258087635040283, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.695, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04025274982447929, |
|
"grad_norm": 1.0118632316589355, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.8264, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04056478664482409, |
|
"grad_norm": 0.9193443655967712, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.7078, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04087682346516889, |
|
"grad_norm": 0.8520511388778687, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.8946, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.04118886028551369, |
|
"grad_norm": 0.8694193363189697, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.7818, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.04150089710585849, |
|
"grad_norm": 0.9974744915962219, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.7602, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.04181293392620329, |
|
"grad_norm": 0.9424649477005005, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.8939, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.04212497074654809, |
|
"grad_norm": 0.9055785536766052, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.8206, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.04243700756689289, |
|
"grad_norm": 0.8643087148666382, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.7982, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.04274904438723769, |
|
"grad_norm": 0.993396520614624, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.9636, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.043061081207582494, |
|
"grad_norm": 0.9313962459564209, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.7931, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.043373118027927295, |
|
"grad_norm": 0.9032983779907227, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.754, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.043685154848272095, |
|
"grad_norm": 0.804119884967804, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.6343, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.043997191668616896, |
|
"grad_norm": 0.8627309203147888, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.7687, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.0443092284889617, |
|
"grad_norm": 0.9543536305427551, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.8058, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.0446212653093065, |
|
"grad_norm": 0.826997697353363, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.7491, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0449333021296513, |
|
"grad_norm": 0.8707612156867981, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.786, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.0452453389499961, |
|
"grad_norm": 0.8594736456871033, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 1.0945, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0455573757703409, |
|
"grad_norm": 1.065171718597412, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 1.4071, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0458694125906857, |
|
"grad_norm": 0.9744724631309509, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 1.3722, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.0461814494110305, |
|
"grad_norm": 0.9792397618293762, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 1.4104, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.0464934862313753, |
|
"grad_norm": 0.9434669613838196, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 1.3621, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0468055230517201, |
|
"grad_norm": 0.8420989513397217, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 1.4156, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0468055230517201, |
|
"eval_loss": 0.8247767686843872, |
|
"eval_runtime": 576.4167, |
|
"eval_samples_per_second": 9.365, |
|
"eval_steps_per_second": 2.342, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.518629337792512e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|