|
{ |
|
"best_metric": 0.11092506349086761, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-700", |
|
"epoch": 0.7632282202031, |
|
"eval_steps": 100, |
|
"global_step": 714, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010689470871191875, |
|
"grad_norm": 1.8254921436309814, |
|
"learning_rate": 2e-05, |
|
"loss": 3.9246, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0010689470871191875, |
|
"eval_loss": 3.9061317443847656, |
|
"eval_runtime": 286.2861, |
|
"eval_samples_per_second": 4.359, |
|
"eval_steps_per_second": 1.09, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002137894174238375, |
|
"grad_norm": 1.6388949155807495, |
|
"learning_rate": 4e-05, |
|
"loss": 3.7244, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.003206841261357563, |
|
"grad_norm": 1.8553436994552612, |
|
"learning_rate": 6e-05, |
|
"loss": 3.8132, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00427578834847675, |
|
"grad_norm": 1.73699152469635, |
|
"learning_rate": 8e-05, |
|
"loss": 3.5206, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005344735435595938, |
|
"grad_norm": 2.0092246532440186, |
|
"learning_rate": 0.0001, |
|
"loss": 3.2891, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006413682522715126, |
|
"grad_norm": 2.0765933990478516, |
|
"learning_rate": 0.00012, |
|
"loss": 2.8629, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0074826296098343134, |
|
"grad_norm": 1.9038790464401245, |
|
"learning_rate": 0.00014, |
|
"loss": 2.1354, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0085515766969535, |
|
"grad_norm": 1.854448914527893, |
|
"learning_rate": 0.00016, |
|
"loss": 1.6619, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.009620523784072688, |
|
"grad_norm": 2.755218267440796, |
|
"learning_rate": 0.00018, |
|
"loss": 1.3892, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.010689470871191877, |
|
"grad_norm": 2.0359535217285156, |
|
"learning_rate": 0.0002, |
|
"loss": 0.9684, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011758417958311064, |
|
"grad_norm": 1.6178686618804932, |
|
"learning_rate": 0.00019999900431099657, |
|
"loss": 0.7566, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.012827365045430252, |
|
"grad_norm": 1.7248117923736572, |
|
"learning_rate": 0.00019999601726381413, |
|
"loss": 0.3901, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01389631213254944, |
|
"grad_norm": 1.2264153957366943, |
|
"learning_rate": 0.00019999103891793618, |
|
"loss": 0.4864, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.014965259219668627, |
|
"grad_norm": 1.0095995664596558, |
|
"learning_rate": 0.00019998406937250034, |
|
"loss": 0.2396, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.016034206306787813, |
|
"grad_norm": 0.9053572416305542, |
|
"learning_rate": 0.0001999751087662966, |
|
"loss": 0.3481, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.017103153393907, |
|
"grad_norm": 0.7639911770820618, |
|
"learning_rate": 0.00019996415727776455, |
|
"loss": 0.2529, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.018172100481026188, |
|
"grad_norm": 0.9801614880561829, |
|
"learning_rate": 0.00019995121512498968, |
|
"loss": 0.3283, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.019241047568145375, |
|
"grad_norm": 3.5335612297058105, |
|
"learning_rate": 0.0001999362825656992, |
|
"loss": 0.2665, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.020309994655264563, |
|
"grad_norm": 0.8772406578063965, |
|
"learning_rate": 0.00019991935989725675, |
|
"loss": 0.3129, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.021378941742383754, |
|
"grad_norm": 0.4901948571205139, |
|
"learning_rate": 0.00019990044745665672, |
|
"loss": 0.2087, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02244788882950294, |
|
"grad_norm": 0.5716347694396973, |
|
"learning_rate": 0.00019987954562051725, |
|
"loss": 0.2954, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02351683591662213, |
|
"grad_norm": 0.5213301777839661, |
|
"learning_rate": 0.0001998566548050729, |
|
"loss": 0.309, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.024585783003741316, |
|
"grad_norm": 0.7063998579978943, |
|
"learning_rate": 0.00019983177546616633, |
|
"loss": 0.2885, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.025654730090860504, |
|
"grad_norm": 0.6609776020050049, |
|
"learning_rate": 0.00019980490809923926, |
|
"loss": 0.3596, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02672367717797969, |
|
"grad_norm": 0.6100655794143677, |
|
"learning_rate": 0.0001997760532393225, |
|
"loss": 0.2666, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02779262426509888, |
|
"grad_norm": 0.4118332266807556, |
|
"learning_rate": 0.00019974521146102537, |
|
"loss": 0.2019, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.028861571352218066, |
|
"grad_norm": 0.431892067193985, |
|
"learning_rate": 0.00019971238337852427, |
|
"loss": 0.2165, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.029930518439337254, |
|
"grad_norm": 0.4727337956428528, |
|
"learning_rate": 0.00019967756964555045, |
|
"loss": 0.2206, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03099946552645644, |
|
"grad_norm": 0.40187329053878784, |
|
"learning_rate": 0.00019964077095537688, |
|
"loss": 0.2338, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.032068412613575625, |
|
"grad_norm": 0.3421157896518707, |
|
"learning_rate": 0.0001996019880408046, |
|
"loss": 0.1973, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03313735970069481, |
|
"grad_norm": 0.451432466506958, |
|
"learning_rate": 0.00019956122167414807, |
|
"loss": 0.225, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.034206306787814, |
|
"grad_norm": 0.3993541896343231, |
|
"learning_rate": 0.0001995184726672197, |
|
"loss": 0.2093, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03527525387493319, |
|
"grad_norm": 0.40808215737342834, |
|
"learning_rate": 0.00019947374187131384, |
|
"loss": 0.2254, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.036344200962052375, |
|
"grad_norm": 0.4794451892375946, |
|
"learning_rate": 0.00019942703017718975, |
|
"loss": 0.2226, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03741314804917156, |
|
"grad_norm": 0.4402725398540497, |
|
"learning_rate": 0.0001993783385150538, |
|
"loss": 0.2356, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03848209513629075, |
|
"grad_norm": 0.3642333447933197, |
|
"learning_rate": 0.000199327667854541, |
|
"loss": 0.1812, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03955104222340994, |
|
"grad_norm": 0.3686297833919525, |
|
"learning_rate": 0.00019927501920469584, |
|
"loss": 0.2304, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.040619989310529125, |
|
"grad_norm": 0.31539255380630493, |
|
"learning_rate": 0.00019922039361395185, |
|
"loss": 0.1428, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04168893639764832, |
|
"grad_norm": 0.36080244183540344, |
|
"learning_rate": 0.0001991637921701111, |
|
"loss": 0.1526, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04275788348476751, |
|
"grad_norm": 0.44204625487327576, |
|
"learning_rate": 0.00019910521600032227, |
|
"loss": 0.2179, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.043826830571886695, |
|
"grad_norm": 0.323862761259079, |
|
"learning_rate": 0.00019904466627105834, |
|
"loss": 0.1372, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04489577765900588, |
|
"grad_norm": 0.3098718225955963, |
|
"learning_rate": 0.0001989821441880933, |
|
"loss": 0.1665, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04596472474612507, |
|
"grad_norm": 0.3269341289997101, |
|
"learning_rate": 0.0001989176509964781, |
|
"loss": 0.1824, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04703367183324426, |
|
"grad_norm": 0.2592598795890808, |
|
"learning_rate": 0.00019885118798051605, |
|
"loss": 0.1704, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.048102618920363445, |
|
"grad_norm": 0.4089893698692322, |
|
"learning_rate": 0.000198782756463737, |
|
"loss": 0.1892, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04917156600748263, |
|
"grad_norm": 0.2590075731277466, |
|
"learning_rate": 0.00019871235780887113, |
|
"loss": 0.1545, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05024051309460182, |
|
"grad_norm": 0.28687378764152527, |
|
"learning_rate": 0.00019863999341782177, |
|
"loss": 0.1854, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05130946018172101, |
|
"grad_norm": 0.24908822774887085, |
|
"learning_rate": 0.00019856566473163746, |
|
"loss": 0.16, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.052378407268840195, |
|
"grad_norm": 0.28324803709983826, |
|
"learning_rate": 0.00019848937323048337, |
|
"loss": 0.158, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05344735435595938, |
|
"grad_norm": 0.33342692255973816, |
|
"learning_rate": 0.0001984111204336116, |
|
"loss": 0.2, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05451630144307857, |
|
"grad_norm": 0.3774797320365906, |
|
"learning_rate": 0.00019833090789933122, |
|
"loss": 0.2204, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05558524853019776, |
|
"grad_norm": 0.33370718359947205, |
|
"learning_rate": 0.00019824873722497694, |
|
"loss": 0.1605, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.056654195617316945, |
|
"grad_norm": 0.2836022973060608, |
|
"learning_rate": 0.0001981646100468775, |
|
"loss": 0.1768, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05772314270443613, |
|
"grad_norm": 0.3592361807823181, |
|
"learning_rate": 0.00019807852804032305, |
|
"loss": 0.1903, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05879208979155532, |
|
"grad_norm": 0.19539162516593933, |
|
"learning_rate": 0.0001979904929195317, |
|
"loss": 0.1543, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05986103687867451, |
|
"grad_norm": 0.29716816544532776, |
|
"learning_rate": 0.00019790050643761552, |
|
"loss": 0.2106, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.060929983965793695, |
|
"grad_norm": 0.3658367097377777, |
|
"learning_rate": 0.0001978085703865455, |
|
"loss": 0.2197, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06199893105291288, |
|
"grad_norm": 0.30680355429649353, |
|
"learning_rate": 0.00019771468659711595, |
|
"loss": 0.1856, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06306787814003206, |
|
"grad_norm": 0.22984617948532104, |
|
"learning_rate": 0.000197618856938908, |
|
"loss": 0.1519, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06413682522715125, |
|
"grad_norm": 0.23498129844665527, |
|
"learning_rate": 0.0001975210833202524, |
|
"loss": 0.1692, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06520577231427044, |
|
"grad_norm": 0.3555554449558258, |
|
"learning_rate": 0.00019742136768819145, |
|
"loss": 0.179, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06627471940138963, |
|
"grad_norm": 0.3502557575702667, |
|
"learning_rate": 0.00019731971202844036, |
|
"loss": 0.2044, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06734366648850881, |
|
"grad_norm": 0.2852421998977661, |
|
"learning_rate": 0.00019721611836534756, |
|
"loss": 0.1737, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.068412613575628, |
|
"grad_norm": 0.27989548444747925, |
|
"learning_rate": 0.00019711058876185447, |
|
"loss": 0.184, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06948156066274719, |
|
"grad_norm": 0.26199406385421753, |
|
"learning_rate": 0.00019700312531945442, |
|
"loss": 0.1867, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07055050774986638, |
|
"grad_norm": 0.21757884323596954, |
|
"learning_rate": 0.00019689373017815073, |
|
"loss": 0.1509, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07161945483698556, |
|
"grad_norm": 0.23988686501979828, |
|
"learning_rate": 0.00019678240551641426, |
|
"loss": 0.1667, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07268840192410475, |
|
"grad_norm": 0.25003519654273987, |
|
"learning_rate": 0.00019666915355113975, |
|
"loss": 0.1412, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07375734901122394, |
|
"grad_norm": 0.19979026913642883, |
|
"learning_rate": 0.000196553976537602, |
|
"loss": 0.1422, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07482629609834313, |
|
"grad_norm": 0.2231016457080841, |
|
"learning_rate": 0.00019643687676941068, |
|
"loss": 0.1447, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07589524318546231, |
|
"grad_norm": 0.22386683523654938, |
|
"learning_rate": 0.00019631785657846487, |
|
"loss": 0.1405, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0769641902725815, |
|
"grad_norm": 0.24956126511096954, |
|
"learning_rate": 0.00019619691833490643, |
|
"loss": 0.1741, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07803313735970069, |
|
"grad_norm": 0.5022568702697754, |
|
"learning_rate": 0.00019607406444707294, |
|
"loss": 0.3543, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07910208444681988, |
|
"grad_norm": 0.3087567985057831, |
|
"learning_rate": 0.00019594929736144976, |
|
"loss": 0.1787, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08017103153393906, |
|
"grad_norm": 0.27388593554496765, |
|
"learning_rate": 0.0001958226195626211, |
|
"loss": 0.1552, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08123997862105825, |
|
"grad_norm": 0.3442430794239044, |
|
"learning_rate": 0.0001956940335732209, |
|
"loss": 0.1789, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08230892570817744, |
|
"grad_norm": 0.2176661193370819, |
|
"learning_rate": 0.00019556354195388221, |
|
"loss": 0.1834, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08337787279529664, |
|
"grad_norm": 0.3691951632499695, |
|
"learning_rate": 0.0001954311473031864, |
|
"loss": 0.1758, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08444681988241583, |
|
"grad_norm": 0.2579557001590729, |
|
"learning_rate": 0.00019529685225761148, |
|
"loss": 0.1875, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08551576696953501, |
|
"grad_norm": 0.2234066128730774, |
|
"learning_rate": 0.00019516065949147943, |
|
"loss": 0.144, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0865847140566542, |
|
"grad_norm": 0.2428237646818161, |
|
"learning_rate": 0.0001950225717169031, |
|
"loss": 0.1616, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08765366114377339, |
|
"grad_norm": 0.2643418610095978, |
|
"learning_rate": 0.00019488259168373197, |
|
"loss": 0.186, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.08872260823089258, |
|
"grad_norm": 0.27387118339538574, |
|
"learning_rate": 0.00019474072217949768, |
|
"loss": 0.1985, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08979155531801176, |
|
"grad_norm": 0.16810058057308197, |
|
"learning_rate": 0.00019459696602935837, |
|
"loss": 0.1341, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.09086050240513095, |
|
"grad_norm": 0.2106161266565323, |
|
"learning_rate": 0.00019445132609604234, |
|
"loss": 0.146, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09192944949225014, |
|
"grad_norm": 0.21163029968738556, |
|
"learning_rate": 0.00019430380527979123, |
|
"loss": 0.1906, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09299839657936933, |
|
"grad_norm": 0.22357632219791412, |
|
"learning_rate": 0.00019415440651830208, |
|
"loss": 0.1809, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09406734366648851, |
|
"grad_norm": 0.24073223769664764, |
|
"learning_rate": 0.00019400313278666902, |
|
"loss": 0.1523, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.0951362907536077, |
|
"grad_norm": 0.2266823649406433, |
|
"learning_rate": 0.00019384998709732386, |
|
"loss": 0.1661, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09620523784072689, |
|
"grad_norm": 0.1823054999113083, |
|
"learning_rate": 0.0001936949724999762, |
|
"loss": 0.141, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09727418492784608, |
|
"grad_norm": 0.19450423121452332, |
|
"learning_rate": 0.00019353809208155255, |
|
"loss": 0.1409, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09834313201496526, |
|
"grad_norm": 0.36040905117988586, |
|
"learning_rate": 0.00019337934896613516, |
|
"loss": 0.2028, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09941207910208445, |
|
"grad_norm": 0.21639132499694824, |
|
"learning_rate": 0.00019321874631489947, |
|
"loss": 0.1518, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.10048102618920364, |
|
"grad_norm": 0.20086891949176788, |
|
"learning_rate": 0.00019305628732605137, |
|
"loss": 0.1514, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.10154997327632283, |
|
"grad_norm": 0.26318323612213135, |
|
"learning_rate": 0.0001928919752347634, |
|
"loss": 0.1552, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.10261892036344201, |
|
"grad_norm": 0.4378178119659424, |
|
"learning_rate": 0.00019272581331311047, |
|
"loss": 0.2046, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1036878674505612, |
|
"grad_norm": 0.2704489529132843, |
|
"learning_rate": 0.0001925578048700045, |
|
"loss": 0.1898, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.10475681453768039, |
|
"grad_norm": 0.22563497722148895, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.1709, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.10582576162479958, |
|
"grad_norm": 0.23802538216114044, |
|
"learning_rate": 0.0001922162618388708, |
|
"loss": 0.1713, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.10689470871191876, |
|
"grad_norm": 0.36641374230384827, |
|
"learning_rate": 0.00019204273405225587, |
|
"loss": 0.1519, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10689470871191876, |
|
"eval_loss": 0.15769338607788086, |
|
"eval_runtime": 288.7962, |
|
"eval_samples_per_second": 4.321, |
|
"eval_steps_per_second": 1.08, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10796365579903795, |
|
"grad_norm": 0.2594605088233948, |
|
"learning_rate": 0.0001918673733468781, |
|
"loss": 0.1791, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.10903260288615714, |
|
"grad_norm": 0.23555320501327515, |
|
"learning_rate": 0.00019169018321483198, |
|
"loss": 0.1331, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.11010154997327633, |
|
"grad_norm": 0.26566898822784424, |
|
"learning_rate": 0.00019151116718464284, |
|
"loss": 0.1662, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.11117049706039552, |
|
"grad_norm": 0.1984795778989792, |
|
"learning_rate": 0.00019133032882119653, |
|
"loss": 0.1417, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.1122394441475147, |
|
"grad_norm": 0.2048155814409256, |
|
"learning_rate": 0.0001911476717256685, |
|
"loss": 0.155, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11330839123463389, |
|
"grad_norm": 0.2680719792842865, |
|
"learning_rate": 0.00019096319953545185, |
|
"loss": 0.1726, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.11437733832175308, |
|
"grad_norm": 0.25940361618995667, |
|
"learning_rate": 0.0001907769159240853, |
|
"loss": 0.189, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11544628540887227, |
|
"grad_norm": 0.23745258152484894, |
|
"learning_rate": 0.00019058882460117974, |
|
"loss": 0.1602, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11651523249599145, |
|
"grad_norm": 0.1798817664384842, |
|
"learning_rate": 0.00019039892931234435, |
|
"loss": 0.1397, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11758417958311064, |
|
"grad_norm": 0.23396556079387665, |
|
"learning_rate": 0.00019020723383911215, |
|
"loss": 0.1431, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11865312667022983, |
|
"grad_norm": 0.2253292351961136, |
|
"learning_rate": 0.00019001374199886467, |
|
"loss": 0.1432, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.11972207375734902, |
|
"grad_norm": 0.18429440259933472, |
|
"learning_rate": 0.00018981845764475582, |
|
"loss": 0.125, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1207910208444682, |
|
"grad_norm": 0.20196135342121124, |
|
"learning_rate": 0.00018962138466563534, |
|
"loss": 0.1381, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.12185996793158739, |
|
"grad_norm": 0.2396540343761444, |
|
"learning_rate": 0.00018942252698597113, |
|
"loss": 0.1618, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.12292891501870658, |
|
"grad_norm": 0.2343413084745407, |
|
"learning_rate": 0.00018922188856577133, |
|
"loss": 0.1874, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12399786210582577, |
|
"grad_norm": 0.23756933212280273, |
|
"learning_rate": 0.00018901947340050528, |
|
"loss": 0.1758, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12506680919294494, |
|
"grad_norm": 0.22977083921432495, |
|
"learning_rate": 0.0001888152855210241, |
|
"loss": 0.1464, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12613575628006413, |
|
"grad_norm": 0.28528642654418945, |
|
"learning_rate": 0.00018860932899348028, |
|
"loss": 0.235, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1272047033671833, |
|
"grad_norm": 0.2515600025653839, |
|
"learning_rate": 0.0001884016079192468, |
|
"loss": 0.1562, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1282736504543025, |
|
"grad_norm": 0.2021314799785614, |
|
"learning_rate": 0.0001881921264348355, |
|
"loss": 0.1463, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1293425975414217, |
|
"grad_norm": 0.21556632220745087, |
|
"learning_rate": 0.0001879808887118146, |
|
"loss": 0.1441, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.13041154462854088, |
|
"grad_norm": 0.17659486830234528, |
|
"learning_rate": 0.00018776789895672558, |
|
"loss": 0.1228, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.13148049171566006, |
|
"grad_norm": 0.21652792394161224, |
|
"learning_rate": 0.00018755316141099964, |
|
"loss": 0.1225, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.13254943880277925, |
|
"grad_norm": 0.23109202086925507, |
|
"learning_rate": 0.00018733668035087302, |
|
"loss": 0.1547, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.13361838588989844, |
|
"grad_norm": 0.20818117260932922, |
|
"learning_rate": 0.00018711846008730192, |
|
"loss": 0.1387, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13468733297701763, |
|
"grad_norm": 0.25625041127204895, |
|
"learning_rate": 0.00018689850496587674, |
|
"loss": 0.1411, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13575628006413681, |
|
"grad_norm": 0.1805257648229599, |
|
"learning_rate": 0.00018667681936673528, |
|
"loss": 0.1198, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.136825227151256, |
|
"grad_norm": 0.23043088614940643, |
|
"learning_rate": 0.00018645340770447595, |
|
"loss": 0.1705, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1378941742383752, |
|
"grad_norm": 0.28206974267959595, |
|
"learning_rate": 0.00018622827442806933, |
|
"loss": 0.2061, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.13896312132549438, |
|
"grad_norm": 0.43259602785110474, |
|
"learning_rate": 0.00018600142402077006, |
|
"loss": 0.1901, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14003206841261356, |
|
"grad_norm": 0.2347777932882309, |
|
"learning_rate": 0.00018577286100002723, |
|
"loss": 0.1512, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.14110101549973275, |
|
"grad_norm": 0.22234401106834412, |
|
"learning_rate": 0.00018554258991739452, |
|
"loss": 0.1599, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.14216996258685194, |
|
"grad_norm": 0.1948978453874588, |
|
"learning_rate": 0.0001853106153584397, |
|
"loss": 0.1369, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.14323890967397113, |
|
"grad_norm": 0.286856472492218, |
|
"learning_rate": 0.0001850769419426531, |
|
"loss": 0.1469, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.14430785676109031, |
|
"grad_norm": 0.23832087218761444, |
|
"learning_rate": 0.0001848415743233557, |
|
"loss": 0.1647, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1453768038482095, |
|
"grad_norm": 0.20166629552841187, |
|
"learning_rate": 0.0001846045171876065, |
|
"loss": 0.1376, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.1464457509353287, |
|
"grad_norm": 0.21285155415534973, |
|
"learning_rate": 0.0001843657752561092, |
|
"loss": 0.17, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14751469802244788, |
|
"grad_norm": 0.2186753898859024, |
|
"learning_rate": 0.00018412535328311814, |
|
"loss": 0.1369, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14858364510956706, |
|
"grad_norm": 0.23166581988334656, |
|
"learning_rate": 0.00018388325605634353, |
|
"loss": 0.1986, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14965259219668625, |
|
"grad_norm": 0.17363017797470093, |
|
"learning_rate": 0.00018363948839685636, |
|
"loss": 0.1275, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15072153928380544, |
|
"grad_norm": 0.30469027161598206, |
|
"learning_rate": 0.0001833940551589922, |
|
"loss": 0.1953, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.15179048637092463, |
|
"grad_norm": 0.17805826663970947, |
|
"learning_rate": 0.00018314696123025454, |
|
"loss": 0.1276, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.15285943345804381, |
|
"grad_norm": 0.20481310784816742, |
|
"learning_rate": 0.00018289821153121753, |
|
"loss": 0.1551, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.153928380545163, |
|
"grad_norm": 0.22326795756816864, |
|
"learning_rate": 0.000182647811015428, |
|
"loss": 0.1669, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1549973276322822, |
|
"grad_norm": 0.21309794485569, |
|
"learning_rate": 0.00018239576466930668, |
|
"loss": 0.1652, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15606627471940138, |
|
"grad_norm": 0.2599788010120392, |
|
"learning_rate": 0.00018214207751204918, |
|
"loss": 0.181, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.15713522180652056, |
|
"grad_norm": 0.20984572172164917, |
|
"learning_rate": 0.00018188675459552566, |
|
"loss": 0.1296, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.15820416889363975, |
|
"grad_norm": 0.22253574430942535, |
|
"learning_rate": 0.0001816298010041806, |
|
"loss": 0.159, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15927311598075894, |
|
"grad_norm": 0.1939833164215088, |
|
"learning_rate": 0.00018137122185493123, |
|
"loss": 0.1497, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.16034206306787813, |
|
"grad_norm": 0.18843698501586914, |
|
"learning_rate": 0.0001811110222970659, |
|
"loss": 0.1286, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16141101015499731, |
|
"grad_norm": 0.2895407974720001, |
|
"learning_rate": 0.00018084920751214144, |
|
"loss": 0.1761, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.1624799572421165, |
|
"grad_norm": 0.23353318870067596, |
|
"learning_rate": 0.0001805857827138798, |
|
"loss": 0.1605, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.1635489043292357, |
|
"grad_norm": 0.2674696445465088, |
|
"learning_rate": 0.00018032075314806448, |
|
"loss": 0.1676, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.16461785141635488, |
|
"grad_norm": 0.2173846811056137, |
|
"learning_rate": 0.00018005412409243606, |
|
"loss": 0.1391, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.16568679850347406, |
|
"grad_norm": 0.20100249350070953, |
|
"learning_rate": 0.0001797859008565868, |
|
"loss": 0.1254, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16675574559059328, |
|
"grad_norm": 0.22731080651283264, |
|
"learning_rate": 0.0001795160887818553, |
|
"loss": 0.1659, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16782469267771247, |
|
"grad_norm": 0.18832936882972717, |
|
"learning_rate": 0.00017924469324121986, |
|
"loss": 0.1329, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16889363976483165, |
|
"grad_norm": 0.23300229012966156, |
|
"learning_rate": 0.0001789717196391916, |
|
"loss": 0.123, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.16996258685195084, |
|
"grad_norm": 0.21164602041244507, |
|
"learning_rate": 0.00017869717341170675, |
|
"loss": 0.1337, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.17103153393907003, |
|
"grad_norm": 0.1995665431022644, |
|
"learning_rate": 0.00017842106002601856, |
|
"loss": 0.1265, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17210048102618922, |
|
"grad_norm": 0.20902606844902039, |
|
"learning_rate": 0.0001781433849805882, |
|
"loss": 0.1392, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1731694281133084, |
|
"grad_norm": 0.18716943264007568, |
|
"learning_rate": 0.00017786415380497553, |
|
"loss": 0.1187, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.1742383752004276, |
|
"grad_norm": 0.22572395205497742, |
|
"learning_rate": 0.0001775833720597287, |
|
"loss": 0.1428, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.17530732228754678, |
|
"grad_norm": 0.17674115300178528, |
|
"learning_rate": 0.0001773010453362737, |
|
"loss": 0.1292, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.17637626937466597, |
|
"grad_norm": 0.2090616673231125, |
|
"learning_rate": 0.00017701717925680278, |
|
"loss": 0.1726, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.17744521646178515, |
|
"grad_norm": 0.16593225300312042, |
|
"learning_rate": 0.00017673177947416258, |
|
"loss": 0.1201, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17851416354890434, |
|
"grad_norm": 0.19040323793888092, |
|
"learning_rate": 0.00017644485167174164, |
|
"loss": 0.1255, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17958311063602353, |
|
"grad_norm": 0.20446278154850006, |
|
"learning_rate": 0.00017615640156335712, |
|
"loss": 0.1433, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.18065205772314272, |
|
"grad_norm": 0.154877707362175, |
|
"learning_rate": 0.000175866434893141, |
|
"loss": 0.1032, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1817210048102619, |
|
"grad_norm": 0.39736929535865784, |
|
"learning_rate": 0.00017557495743542585, |
|
"loss": 0.1302, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1827899518973811, |
|
"grad_norm": 0.2045171856880188, |
|
"learning_rate": 0.00017528197499462958, |
|
"loss": 0.1431, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.18385889898450028, |
|
"grad_norm": 0.19055134057998657, |
|
"learning_rate": 0.0001749874934051401, |
|
"loss": 0.1465, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.18492784607161947, |
|
"grad_norm": 0.2106601893901825, |
|
"learning_rate": 0.00017469151853119906, |
|
"loss": 0.1355, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.18599679315873865, |
|
"grad_norm": 0.27130061388015747, |
|
"learning_rate": 0.00017439405626678496, |
|
"loss": 0.1614, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.18706574024585784, |
|
"grad_norm": 0.2109244167804718, |
|
"learning_rate": 0.00017409511253549593, |
|
"loss": 0.1509, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18813468733297703, |
|
"grad_norm": 0.17472617328166962, |
|
"learning_rate": 0.00017379469329043167, |
|
"loss": 0.1117, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18920363442009622, |
|
"grad_norm": 0.2262858897447586, |
|
"learning_rate": 0.00017349280451407499, |
|
"loss": 0.1509, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1902725815072154, |
|
"grad_norm": 0.18161039054393768, |
|
"learning_rate": 0.00017318945221817255, |
|
"loss": 0.1182, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1913415285943346, |
|
"grad_norm": 0.17918163537979126, |
|
"learning_rate": 0.00017288464244361528, |
|
"loss": 0.1477, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.19241047568145378, |
|
"grad_norm": 0.22044941782951355, |
|
"learning_rate": 0.00017257838126031797, |
|
"loss": 0.164, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19347942276857297, |
|
"grad_norm": 0.2787477970123291, |
|
"learning_rate": 0.00017227067476709854, |
|
"loss": 0.1934, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.19454836985569215, |
|
"grad_norm": 0.2308286875486374, |
|
"learning_rate": 0.00017196152909155628, |
|
"loss": 0.1649, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.19561731694281134, |
|
"grad_norm": 0.20782075822353363, |
|
"learning_rate": 0.00017165095038995033, |
|
"loss": 0.1595, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.19668626402993053, |
|
"grad_norm": 0.17654070258140564, |
|
"learning_rate": 0.00017133894484707655, |
|
"loss": 0.1111, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.19775521111704972, |
|
"grad_norm": 0.1830965131521225, |
|
"learning_rate": 0.00017102551867614478, |
|
"loss": 0.1184, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1988241582041689, |
|
"grad_norm": 0.151173397898674, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 0.1262, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1998931052912881, |
|
"grad_norm": 0.26543188095092773, |
|
"learning_rate": 0.00017039442944427217, |
|
"loss": 0.1657, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.20096205237840728, |
|
"grad_norm": 0.190846785902977, |
|
"learning_rate": 0.00017007677895070357, |
|
"loss": 0.1426, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.20203099946552647, |
|
"grad_norm": 0.21965020895004272, |
|
"learning_rate": 0.00016975773296357098, |
|
"loss": 0.1327, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.20309994655264565, |
|
"grad_norm": 0.17773884534835815, |
|
"learning_rate": 0.00016943729783628608, |
|
"loss": 0.1629, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.20416889363976484, |
|
"grad_norm": 2.5340681076049805, |
|
"learning_rate": 0.00016911547994992348, |
|
"loss": 0.1637, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.20523784072688403, |
|
"grad_norm": 0.21551626920700073, |
|
"learning_rate": 0.00016879228571309377, |
|
"loss": 0.1599, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.20630678781400322, |
|
"grad_norm": 0.15846356749534607, |
|
"learning_rate": 0.00016846772156181594, |
|
"loss": 0.1375, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2073757349011224, |
|
"grad_norm": 0.14692065119743347, |
|
"learning_rate": 0.00016814179395938913, |
|
"loss": 0.1117, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.2084446819882416, |
|
"grad_norm": 0.3390064537525177, |
|
"learning_rate": 0.00016781450939626387, |
|
"loss": 0.1271, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.20951362907536078, |
|
"grad_norm": 0.17453472316265106, |
|
"learning_rate": 0.00016748587438991303, |
|
"loss": 0.1396, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.21058257616247997, |
|
"grad_norm": 0.19709929823875427, |
|
"learning_rate": 0.00016715589548470185, |
|
"loss": 0.13, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.21165152324959915, |
|
"grad_norm": 0.24145397543907166, |
|
"learning_rate": 0.00016682457925175763, |
|
"loss": 0.1634, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.21272047033671834, |
|
"grad_norm": 0.17630192637443542, |
|
"learning_rate": 0.000166491932288839, |
|
"loss": 0.1299, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.21378941742383753, |
|
"grad_norm": 0.2846260368824005, |
|
"learning_rate": 0.00016615796122020443, |
|
"loss": 0.164, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21378941742383753, |
|
"eval_loss": 0.1369618922472, |
|
"eval_runtime": 288.8956, |
|
"eval_samples_per_second": 4.32, |
|
"eval_steps_per_second": 1.08, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21485836451095672, |
|
"grad_norm": 0.18763378262519836, |
|
"learning_rate": 0.0001658226726964803, |
|
"loss": 0.1481, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2159273115980759, |
|
"grad_norm": 0.18099422752857208, |
|
"learning_rate": 0.00016548607339452853, |
|
"loss": 0.1275, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.2169962586851951, |
|
"grad_norm": 0.22163395583629608, |
|
"learning_rate": 0.0001651481700173136, |
|
"loss": 0.1657, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.21806520577231428, |
|
"grad_norm": 0.1837584376335144, |
|
"learning_rate": 0.00016480896929376907, |
|
"loss": 0.133, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.21913415285943347, |
|
"grad_norm": 0.23913772404193878, |
|
"learning_rate": 0.0001644684779786635, |
|
"loss": 0.1439, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.22020309994655266, |
|
"grad_norm": 0.24784860014915466, |
|
"learning_rate": 0.0001641267028524661, |
|
"loss": 0.1588, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.22127204703367184, |
|
"grad_norm": 0.189157634973526, |
|
"learning_rate": 0.00016378365072121156, |
|
"loss": 0.154, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.22234099412079103, |
|
"grad_norm": 0.18977932631969452, |
|
"learning_rate": 0.00016343932841636456, |
|
"loss": 0.1338, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.22340994120791022, |
|
"grad_norm": 0.1805882751941681, |
|
"learning_rate": 0.00016309374279468376, |
|
"loss": 0.1333, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.2244788882950294, |
|
"grad_norm": 0.17292280495166779, |
|
"learning_rate": 0.0001627469007380852, |
|
"loss": 0.1269, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2255478353821486, |
|
"grad_norm": 0.20466209948062897, |
|
"learning_rate": 0.00016239880915350536, |
|
"loss": 0.1548, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.22661678246926778, |
|
"grad_norm": 0.18790069222450256, |
|
"learning_rate": 0.00016204947497276345, |
|
"loss": 0.1228, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.22768572955638697, |
|
"grad_norm": 0.17624615132808685, |
|
"learning_rate": 0.00016169890515242355, |
|
"loss": 0.1152, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.22875467664350616, |
|
"grad_norm": 0.18744930624961853, |
|
"learning_rate": 0.00016134710667365596, |
|
"loss": 0.1446, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.22982362373062534, |
|
"grad_norm": 0.18070746958255768, |
|
"learning_rate": 0.0001609940865420982, |
|
"loss": 0.1391, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.23089257081774453, |
|
"grad_norm": 0.16776110231876373, |
|
"learning_rate": 0.00016063985178771555, |
|
"loss": 0.1237, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.23196151790486372, |
|
"grad_norm": 0.2151951640844345, |
|
"learning_rate": 0.00016028440946466095, |
|
"loss": 0.1541, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2330304649919829, |
|
"grad_norm": 0.17011284828186035, |
|
"learning_rate": 0.0001599277666511347, |
|
"loss": 0.1127, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.2340994120791021, |
|
"grad_norm": 0.15342286229133606, |
|
"learning_rate": 0.00015956993044924334, |
|
"loss": 0.12, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.23516835916622128, |
|
"grad_norm": 0.15606354176998138, |
|
"learning_rate": 0.00015921090798485832, |
|
"loss": 0.1177, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.23623730625334047, |
|
"grad_norm": 0.18545080721378326, |
|
"learning_rate": 0.00015885070640747395, |
|
"loss": 0.1296, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.23730625334045966, |
|
"grad_norm": 0.21306568384170532, |
|
"learning_rate": 0.0001584893328900653, |
|
"loss": 0.1224, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.23837520042757884, |
|
"grad_norm": 0.19317717850208282, |
|
"learning_rate": 0.00015812679462894513, |
|
"loss": 0.144, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.23944414751469803, |
|
"grad_norm": 0.14700594544410706, |
|
"learning_rate": 0.0001577630988436206, |
|
"loss": 0.1212, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.24051309460181722, |
|
"grad_norm": 0.20297332108020782, |
|
"learning_rate": 0.00015739825277664955, |
|
"loss": 0.1477, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2415820416889364, |
|
"grad_norm": 0.21095982193946838, |
|
"learning_rate": 0.0001570322636934964, |
|
"loss": 0.1258, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.2426509887760556, |
|
"grad_norm": 0.24124930799007416, |
|
"learning_rate": 0.00015666513888238726, |
|
"loss": 0.1861, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.24371993586317478, |
|
"grad_norm": 0.18464890122413635, |
|
"learning_rate": 0.00015629688565416478, |
|
"loss": 0.1393, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.24478888295029397, |
|
"grad_norm": 0.1888304352760315, |
|
"learning_rate": 0.00015592751134214282, |
|
"loss": 0.1408, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.24585783003741316, |
|
"grad_norm": 0.16777703166007996, |
|
"learning_rate": 0.00015555702330196023, |
|
"loss": 0.113, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.24692677712453234, |
|
"grad_norm": 0.21285921335220337, |
|
"learning_rate": 0.0001551854289114343, |
|
"loss": 0.1451, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.24799572421165153, |
|
"grad_norm": 0.2046755701303482, |
|
"learning_rate": 0.000154812735570414, |
|
"loss": 0.1059, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.24906467129877072, |
|
"grad_norm": 0.20832888782024384, |
|
"learning_rate": 0.0001544389507006326, |
|
"loss": 0.1333, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2501336183858899, |
|
"grad_norm": 0.15345333516597748, |
|
"learning_rate": 0.00015406408174555976, |
|
"loss": 0.1083, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.25120256547300907, |
|
"grad_norm": 0.18688637018203735, |
|
"learning_rate": 0.00015368813617025343, |
|
"loss": 0.1225, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.25227151256012825, |
|
"grad_norm": 0.22301477193832397, |
|
"learning_rate": 0.00015331112146121104, |
|
"loss": 0.1515, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.25334045964724744, |
|
"grad_norm": 0.21012060344219208, |
|
"learning_rate": 0.00015293304512622072, |
|
"loss": 0.1566, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2544094067343666, |
|
"grad_norm": 0.17001067101955414, |
|
"learning_rate": 0.00015255391469421128, |
|
"loss": 0.1249, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.2554783538214858, |
|
"grad_norm": 0.23301930725574493, |
|
"learning_rate": 0.00015217373771510292, |
|
"loss": 0.1624, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.256547300908605, |
|
"grad_norm": 0.18844428658485413, |
|
"learning_rate": 0.00015179252175965633, |
|
"loss": 0.1299, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2576162479957242, |
|
"grad_norm": 0.20432229340076447, |
|
"learning_rate": 0.00015141027441932216, |
|
"loss": 0.1499, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2586851950828434, |
|
"grad_norm": 0.21431198716163635, |
|
"learning_rate": 0.00015102700330609, |
|
"loss": 0.1414, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.25975414216996257, |
|
"grad_norm": 0.185688316822052, |
|
"learning_rate": 0.00015064271605233636, |
|
"loss": 0.1243, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.26082308925708175, |
|
"grad_norm": 0.1799941509962082, |
|
"learning_rate": 0.00015025742031067317, |
|
"loss": 0.1382, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.26189203634420094, |
|
"grad_norm": 0.1698986291885376, |
|
"learning_rate": 0.0001498711237537951, |
|
"loss": 0.132, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.26296098343132013, |
|
"grad_norm": 0.17504070699214935, |
|
"learning_rate": 0.00014948383407432678, |
|
"loss": 0.1386, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.2640299305184393, |
|
"grad_norm": 0.28007063269615173, |
|
"learning_rate": 0.00014909555898466974, |
|
"loss": 0.1891, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.2650988776055585, |
|
"grad_norm": 0.17352139949798584, |
|
"learning_rate": 0.00014870630621684872, |
|
"loss": 0.1375, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.2661678246926777, |
|
"grad_norm": 0.16270796954631805, |
|
"learning_rate": 0.00014831608352235774, |
|
"loss": 0.1329, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.2672367717797969, |
|
"grad_norm": 0.1661270558834076, |
|
"learning_rate": 0.0001479248986720057, |
|
"loss": 0.129, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.26830571886691607, |
|
"grad_norm": 0.15105728805065155, |
|
"learning_rate": 0.00014753275945576165, |
|
"loss": 0.1299, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.26937466595403525, |
|
"grad_norm": 0.21838223934173584, |
|
"learning_rate": 0.0001471396736825998, |
|
"loss": 0.1563, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.27044361304115444, |
|
"grad_norm": 0.16052576899528503, |
|
"learning_rate": 0.00014674564918034367, |
|
"loss": 0.1301, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.27151256012827363, |
|
"grad_norm": 0.1738349050283432, |
|
"learning_rate": 0.00014635069379551055, |
|
"loss": 0.1432, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.2725815072153928, |
|
"grad_norm": 0.1612621247768402, |
|
"learning_rate": 0.00014595481539315518, |
|
"loss": 0.1129, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.273650454302512, |
|
"grad_norm": 0.17328664660453796, |
|
"learning_rate": 0.00014555802185671298, |
|
"loss": 0.1414, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.2747194013896312, |
|
"grad_norm": 0.21222056448459625, |
|
"learning_rate": 0.0001451603210878431, |
|
"loss": 0.1222, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.2757883484767504, |
|
"grad_norm": 0.23580165207386017, |
|
"learning_rate": 0.00014476172100627127, |
|
"loss": 0.1539, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.27685729556386957, |
|
"grad_norm": 0.19674788415431976, |
|
"learning_rate": 0.00014436222954963178, |
|
"loss": 0.1318, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.27792624265098875, |
|
"grad_norm": 0.180185005068779, |
|
"learning_rate": 0.00014396185467330973, |
|
"loss": 0.1174, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.27899518973810794, |
|
"grad_norm": 0.17812158167362213, |
|
"learning_rate": 0.00014356060435028226, |
|
"loss": 0.1169, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.28006413682522713, |
|
"grad_norm": 0.17591345310211182, |
|
"learning_rate": 0.00014315848657096004, |
|
"loss": 0.1132, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.2811330839123463, |
|
"grad_norm": 0.20523187518119812, |
|
"learning_rate": 0.00014275550934302823, |
|
"loss": 0.1358, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.2822020309994655, |
|
"grad_norm": 0.18519556522369385, |
|
"learning_rate": 0.00014235168069128657, |
|
"loss": 0.1382, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.2832709780865847, |
|
"grad_norm": 0.2633715271949768, |
|
"learning_rate": 0.00014194700865749008, |
|
"loss": 0.143, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2843399251737039, |
|
"grad_norm": 0.30586305260658264, |
|
"learning_rate": 0.00014154150130018866, |
|
"loss": 0.1788, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.28540887226082307, |
|
"grad_norm": 0.14305737614631653, |
|
"learning_rate": 0.0001411351666945666, |
|
"loss": 0.0905, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.28647781934794225, |
|
"grad_norm": 0.19511297345161438, |
|
"learning_rate": 0.00014072801293228188, |
|
"loss": 0.1424, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.28754676643506144, |
|
"grad_norm": 0.3783949911594391, |
|
"learning_rate": 0.00014032004812130497, |
|
"loss": 0.1346, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.28861571352218063, |
|
"grad_norm": 0.19596309959888458, |
|
"learning_rate": 0.00013991128038575741, |
|
"loss": 0.1384, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2896846606092998, |
|
"grad_norm": 0.204111248254776, |
|
"learning_rate": 0.00013950171786575002, |
|
"loss": 0.1577, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.290753607696419, |
|
"grad_norm": 0.19096602499485016, |
|
"learning_rate": 0.00013909136871722067, |
|
"loss": 0.1333, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.2918225547835382, |
|
"grad_norm": 0.21785670518875122, |
|
"learning_rate": 0.00013868024111177206, |
|
"loss": 0.1247, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.2928915018706574, |
|
"grad_norm": 0.21137268841266632, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.1231, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.29396044895777657, |
|
"grad_norm": 0.21565985679626465, |
|
"learning_rate": 0.00013785568329387508, |
|
"loss": 0.1514, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.29502939604489575, |
|
"grad_norm": 0.1645229011774063, |
|
"learning_rate": 0.00013744226950148972, |
|
"loss": 0.1204, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.29609834313201494, |
|
"grad_norm": 0.18113082647323608, |
|
"learning_rate": 0.0001370281100919842, |
|
"loss": 0.1374, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.29716729021913413, |
|
"grad_norm": 0.15095637738704681, |
|
"learning_rate": 0.00013661321331283796, |
|
"loss": 0.0931, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.2982362373062533, |
|
"grad_norm": 0.17453165352344513, |
|
"learning_rate": 0.00013619758742621418, |
|
"loss": 0.1336, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2993051843933725, |
|
"grad_norm": 0.1741105020046234, |
|
"learning_rate": 0.00013578124070879534, |
|
"loss": 0.1263, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3003741314804917, |
|
"grad_norm": 0.17308761179447174, |
|
"learning_rate": 0.0001353641814516185, |
|
"loss": 0.1254, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3014430785676109, |
|
"grad_norm": 0.15845873951911926, |
|
"learning_rate": 0.00013494641795990986, |
|
"loss": 0.1217, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.30251202565473007, |
|
"grad_norm": 0.16994373500347137, |
|
"learning_rate": 0.00013452795855291977, |
|
"loss": 0.1282, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.30358097274184925, |
|
"grad_norm": 0.1903841644525528, |
|
"learning_rate": 0.00013410881156375684, |
|
"loss": 0.1397, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.30464991982896844, |
|
"grad_norm": 0.17168985307216644, |
|
"learning_rate": 0.000133688985339222, |
|
"loss": 0.1322, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.30571886691608763, |
|
"grad_norm": 0.17627929151058197, |
|
"learning_rate": 0.00013326848823964243, |
|
"loss": 0.1262, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.3067878140032068, |
|
"grad_norm": 0.18915101885795593, |
|
"learning_rate": 0.00013284732863870475, |
|
"loss": 0.1422, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.307856761090326, |
|
"grad_norm": 0.15876813232898712, |
|
"learning_rate": 0.00013242551492328875, |
|
"loss": 0.1197, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3089257081774452, |
|
"grad_norm": 0.15107540786266327, |
|
"learning_rate": 0.00013200305549329995, |
|
"loss": 0.1146, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.3099946552645644, |
|
"grad_norm": 0.16001242399215698, |
|
"learning_rate": 0.0001315799587615025, |
|
"loss": 0.1027, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.31106360235168357, |
|
"grad_norm": 0.166344553232193, |
|
"learning_rate": 0.00013115623315335172, |
|
"loss": 0.1282, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.31213254943880275, |
|
"grad_norm": 0.17358538508415222, |
|
"learning_rate": 0.0001307318871068261, |
|
"loss": 0.1266, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.31320149652592194, |
|
"grad_norm": 0.1970827579498291, |
|
"learning_rate": 0.00013030692907225956, |
|
"loss": 0.1049, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.31427044361304113, |
|
"grad_norm": 0.19751529395580292, |
|
"learning_rate": 0.00012988136751217291, |
|
"loss": 0.1383, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.3153393907001603, |
|
"grad_norm": 0.24741291999816895, |
|
"learning_rate": 0.00012945521090110547, |
|
"loss": 0.158, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3164083377872795, |
|
"grad_norm": 0.16440382599830627, |
|
"learning_rate": 0.00012902846772544624, |
|
"loss": 0.1189, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.3174772848743987, |
|
"grad_norm": 0.2023131549358368, |
|
"learning_rate": 0.00012860114648326502, |
|
"loss": 0.1633, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.3185462319615179, |
|
"grad_norm": 0.2175939977169037, |
|
"learning_rate": 0.00012817325568414297, |
|
"loss": 0.1672, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.31961517904863707, |
|
"grad_norm": 0.17920686304569244, |
|
"learning_rate": 0.00012774480384900345, |
|
"loss": 0.1236, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.32068412613575625, |
|
"grad_norm": 0.18356698751449585, |
|
"learning_rate": 0.000127315799509942, |
|
"loss": 0.1199, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32068412613575625, |
|
"eval_loss": 0.13152572512626648, |
|
"eval_runtime": 289.0306, |
|
"eval_samples_per_second": 4.318, |
|
"eval_steps_per_second": 1.079, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32175307322287544, |
|
"grad_norm": 0.17195501923561096, |
|
"learning_rate": 0.00012688625121005668, |
|
"loss": 0.1167, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.32282202030999463, |
|
"grad_norm": 0.22263203561306, |
|
"learning_rate": 0.0001264561675032779, |
|
"loss": 0.1406, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.3238909673971138, |
|
"grad_norm": 0.18973477184772491, |
|
"learning_rate": 0.000126025556954198, |
|
"loss": 0.1294, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.324959914484233, |
|
"grad_norm": 0.15628819167613983, |
|
"learning_rate": 0.00012559442813790076, |
|
"loss": 0.1193, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3260288615713522, |
|
"grad_norm": 0.16901010274887085, |
|
"learning_rate": 0.00012516278963979057, |
|
"loss": 0.1426, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3270978086584714, |
|
"grad_norm": 0.1784602552652359, |
|
"learning_rate": 0.00012473065005542155, |
|
"loss": 0.1152, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.32816675574559057, |
|
"grad_norm": 0.2367001473903656, |
|
"learning_rate": 0.0001242980179903264, |
|
"loss": 0.1611, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.32923570283270975, |
|
"grad_norm": 0.21481367945671082, |
|
"learning_rate": 0.00012386490205984488, |
|
"loss": 0.1334, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.33030464991982894, |
|
"grad_norm": 0.32029592990875244, |
|
"learning_rate": 0.00012343131088895235, |
|
"loss": 0.1074, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.33137359700694813, |
|
"grad_norm": 0.1740630567073822, |
|
"learning_rate": 0.00012299725311208808, |
|
"loss": 0.0963, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3324425440940673, |
|
"grad_norm": 0.17548616230487823, |
|
"learning_rate": 0.00012256273737298312, |
|
"loss": 0.1164, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.33351149118118656, |
|
"grad_norm": 0.18481452763080597, |
|
"learning_rate": 0.00012212777232448838, |
|
"loss": 0.1345, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.33458043826830575, |
|
"grad_norm": 0.18825702369213104, |
|
"learning_rate": 0.00012169236662840212, |
|
"loss": 0.1528, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.33564938535542493, |
|
"grad_norm": 0.17717814445495605, |
|
"learning_rate": 0.00012125652895529766, |
|
"loss": 0.1194, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.3367183324425441, |
|
"grad_norm": 0.25791436433792114, |
|
"learning_rate": 0.0001208202679843506, |
|
"loss": 0.1718, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3377872795296633, |
|
"grad_norm": 0.1630530208349228, |
|
"learning_rate": 0.00012038359240316589, |
|
"loss": 0.1178, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.3388562266167825, |
|
"grad_norm": 0.16724711656570435, |
|
"learning_rate": 0.00011994651090760505, |
|
"loss": 0.147, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.3399251737039017, |
|
"grad_norm": 0.1993224173784256, |
|
"learning_rate": 0.00011950903220161285, |
|
"loss": 0.1492, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.34099412079102087, |
|
"grad_norm": 0.15769435465335846, |
|
"learning_rate": 0.00011907116499704401, |
|
"loss": 0.1246, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.34206306787814006, |
|
"grad_norm": 0.1998382806777954, |
|
"learning_rate": 0.0001186329180134898, |
|
"loss": 0.1245, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.34313201496525925, |
|
"grad_norm": 0.20432628691196442, |
|
"learning_rate": 0.00011819429997810418, |
|
"loss": 0.164, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.34420096205237843, |
|
"grad_norm": 0.315298467874527, |
|
"learning_rate": 0.00011775531962543036, |
|
"loss": 0.1371, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.3452699091394976, |
|
"grad_norm": 0.18633940815925598, |
|
"learning_rate": 0.00011731598569722642, |
|
"loss": 0.1138, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.3463388562266168, |
|
"grad_norm": 0.1944500356912613, |
|
"learning_rate": 0.0001168763069422916, |
|
"loss": 0.1379, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.347407803313736, |
|
"grad_norm": 0.22939671576023102, |
|
"learning_rate": 0.000116436292116292, |
|
"loss": 0.1119, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3484767504008552, |
|
"grad_norm": 0.175564244389534, |
|
"learning_rate": 0.00011599594998158602, |
|
"loss": 0.1322, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.34954569748797437, |
|
"grad_norm": 0.17719417810440063, |
|
"learning_rate": 0.00011555528930705013, |
|
"loss": 0.158, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.35061464457509356, |
|
"grad_norm": 0.17542250454425812, |
|
"learning_rate": 0.00011511431886790407, |
|
"loss": 0.1422, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.35168359166221275, |
|
"grad_norm": 0.16461284458637238, |
|
"learning_rate": 0.00011467304744553618, |
|
"loss": 0.1319, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.35275253874933193, |
|
"grad_norm": 0.2131093442440033, |
|
"learning_rate": 0.00011423148382732853, |
|
"loss": 0.1429, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3538214858364511, |
|
"grad_norm": 0.17762120068073273, |
|
"learning_rate": 0.00011378963680648187, |
|
"loss": 0.1444, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.3548904329235703, |
|
"grad_norm": 0.1586885303258896, |
|
"learning_rate": 0.00011334751518184061, |
|
"loss": 0.1067, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.3559593800106895, |
|
"grad_norm": 0.1545344591140747, |
|
"learning_rate": 0.00011290512775771758, |
|
"loss": 0.1382, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.3570283270978087, |
|
"grad_norm": 0.15905602276325226, |
|
"learning_rate": 0.0001124624833437186, |
|
"loss": 0.1121, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.35809727418492787, |
|
"grad_norm": 0.22019295394420624, |
|
"learning_rate": 0.0001120195907545672, |
|
"loss": 0.1273, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.35916622127204706, |
|
"grad_norm": 0.15988142788410187, |
|
"learning_rate": 0.00011157645880992902, |
|
"loss": 0.1064, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.36023516835916625, |
|
"grad_norm": 0.17529579997062683, |
|
"learning_rate": 0.0001111330963342361, |
|
"loss": 0.12, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.36130411544628543, |
|
"grad_norm": 0.15964984893798828, |
|
"learning_rate": 0.00011068951215651132, |
|
"loss": 0.124, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.3623730625334046, |
|
"grad_norm": 0.14573580026626587, |
|
"learning_rate": 0.00011024571511019235, |
|
"loss": 0.1018, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.3634420096205238, |
|
"grad_norm": 0.1717827022075653, |
|
"learning_rate": 0.0001098017140329561, |
|
"loss": 0.1214, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.364510956707643, |
|
"grad_norm": 0.1566823571920395, |
|
"learning_rate": 0.00010935751776654224, |
|
"loss": 0.1022, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.3655799037947622, |
|
"grad_norm": 0.16118775308132172, |
|
"learning_rate": 0.0001089131351565776, |
|
"loss": 0.1235, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.36664885088188137, |
|
"grad_norm": 0.16470201313495636, |
|
"learning_rate": 0.00010846857505239973, |
|
"loss": 0.112, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.36771779796900056, |
|
"grad_norm": 0.14973486959934235, |
|
"learning_rate": 0.00010802384630688078, |
|
"loss": 0.125, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.36878674505611975, |
|
"grad_norm": 0.18552237749099731, |
|
"learning_rate": 0.00010757895777625118, |
|
"loss": 0.1203, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.36985569214323893, |
|
"grad_norm": 0.2491609752178192, |
|
"learning_rate": 0.00010713391831992323, |
|
"loss": 0.1705, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.3709246392303581, |
|
"grad_norm": 0.20768113434314728, |
|
"learning_rate": 0.00010668873680031486, |
|
"loss": 0.1483, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.3719935863174773, |
|
"grad_norm": 0.20317085087299347, |
|
"learning_rate": 0.00010624342208267292, |
|
"loss": 0.1364, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.3730625334045965, |
|
"grad_norm": 0.16608084738254547, |
|
"learning_rate": 0.00010579798303489675, |
|
"loss": 0.1046, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.3741314804917157, |
|
"grad_norm": 0.2229534536600113, |
|
"learning_rate": 0.00010535242852736151, |
|
"loss": 0.1065, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.37520042757883487, |
|
"grad_norm": 0.15245893597602844, |
|
"learning_rate": 0.00010490676743274181, |
|
"loss": 0.1015, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.37626937466595406, |
|
"grad_norm": 0.17506876587867737, |
|
"learning_rate": 0.00010446100862583459, |
|
"loss": 0.1406, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.37733832175307325, |
|
"grad_norm": 0.15572670102119446, |
|
"learning_rate": 0.00010401516098338271, |
|
"loss": 0.1031, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.37840726884019243, |
|
"grad_norm": 0.17110490798950195, |
|
"learning_rate": 0.00010356923338389806, |
|
"loss": 0.1194, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.3794762159273116, |
|
"grad_norm": 0.20592443645000458, |
|
"learning_rate": 0.00010312323470748478, |
|
"loss": 0.1529, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3805451630144308, |
|
"grad_norm": 0.18145787715911865, |
|
"learning_rate": 0.00010267717383566246, |
|
"loss": 0.1253, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.38161411010155, |
|
"grad_norm": 0.15736672282218933, |
|
"learning_rate": 0.00010223105965118913, |
|
"loss": 0.1104, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.3826830571886692, |
|
"grad_norm": 0.17708028852939606, |
|
"learning_rate": 0.0001017849010378846, |
|
"loss": 0.1253, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.38375200427578837, |
|
"grad_norm": 0.20931871235370636, |
|
"learning_rate": 0.0001013387068804534, |
|
"loss": 0.1364, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.38482095136290756, |
|
"grad_norm": 0.200309619307518, |
|
"learning_rate": 0.00010089248606430775, |
|
"loss": 0.1235, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.38588989845002675, |
|
"grad_norm": 0.19272620975971222, |
|
"learning_rate": 0.00010044624747539094, |
|
"loss": 0.1662, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.38695884553714593, |
|
"grad_norm": 0.1405670940876007, |
|
"learning_rate": 0.0001, |
|
"loss": 0.1104, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.3880277926242651, |
|
"grad_norm": 0.15820738673210144, |
|
"learning_rate": 9.95537525246091e-05, |
|
"loss": 0.1158, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.3890967397113843, |
|
"grad_norm": 0.17021127045154572, |
|
"learning_rate": 9.910751393569229e-05, |
|
"loss": 0.1366, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.3901656867985035, |
|
"grad_norm": 0.19209320843219757, |
|
"learning_rate": 9.866129311954664e-05, |
|
"loss": 0.1362, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3912346338856227, |
|
"grad_norm": 0.23325751721858978, |
|
"learning_rate": 9.821509896211539e-05, |
|
"loss": 0.1575, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.39230358097274187, |
|
"grad_norm": 0.1879902184009552, |
|
"learning_rate": 9.776894034881089e-05, |
|
"loss": 0.153, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.39337252805986106, |
|
"grad_norm": 0.15710167586803436, |
|
"learning_rate": 9.732282616433756e-05, |
|
"loss": 0.1192, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.39444147514698025, |
|
"grad_norm": 0.13521148264408112, |
|
"learning_rate": 9.687676529251526e-05, |
|
"loss": 0.1033, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.39551042223409943, |
|
"grad_norm": 0.16447614133358002, |
|
"learning_rate": 9.643076661610196e-05, |
|
"loss": 0.1361, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3965793693212186, |
|
"grad_norm": 0.17937707901000977, |
|
"learning_rate": 9.598483901661731e-05, |
|
"loss": 0.0982, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.3976483164083378, |
|
"grad_norm": 0.16494877636432648, |
|
"learning_rate": 9.553899137416545e-05, |
|
"loss": 0.1331, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.398717263495457, |
|
"grad_norm": 0.12207219749689102, |
|
"learning_rate": 9.509323256725821e-05, |
|
"loss": 0.0951, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.3997862105825762, |
|
"grad_norm": 0.15613038837909698, |
|
"learning_rate": 9.464757147263849e-05, |
|
"loss": 0.1296, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.40085515766969537, |
|
"grad_norm": 0.15942755341529846, |
|
"learning_rate": 9.420201696510332e-05, |
|
"loss": 0.1099, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.40192410475681456, |
|
"grad_norm": 0.13835251331329346, |
|
"learning_rate": 9.37565779173271e-05, |
|
"loss": 0.0892, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.40299305184393375, |
|
"grad_norm": 0.17169111967086792, |
|
"learning_rate": 9.331126319968514e-05, |
|
"loss": 0.1262, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.40406199893105293, |
|
"grad_norm": 0.16025681793689728, |
|
"learning_rate": 9.286608168007678e-05, |
|
"loss": 0.1191, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4051309460181721, |
|
"grad_norm": 0.14073841273784637, |
|
"learning_rate": 9.242104222374886e-05, |
|
"loss": 0.0932, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.4061998931052913, |
|
"grad_norm": 0.15486131608486176, |
|
"learning_rate": 9.197615369311925e-05, |
|
"loss": 0.0997, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4072688401924105, |
|
"grad_norm": 0.1486068218946457, |
|
"learning_rate": 9.153142494760028e-05, |
|
"loss": 0.1164, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.4083377872795297, |
|
"grad_norm": 0.14497411251068115, |
|
"learning_rate": 9.108686484342241e-05, |
|
"loss": 0.0991, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4094067343666489, |
|
"grad_norm": 0.18235650658607483, |
|
"learning_rate": 9.06424822334578e-05, |
|
"loss": 0.1332, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.41047568145376806, |
|
"grad_norm": 0.17887920141220093, |
|
"learning_rate": 9.019828596704394e-05, |
|
"loss": 0.1429, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.41154462854088725, |
|
"grad_norm": 0.16443820297718048, |
|
"learning_rate": 8.975428488980765e-05, |
|
"loss": 0.1039, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.41261357562800643, |
|
"grad_norm": 0.17346209287643433, |
|
"learning_rate": 8.931048784348875e-05, |
|
"loss": 0.1193, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.4136825227151256, |
|
"grad_norm": 0.14269988238811493, |
|
"learning_rate": 8.886690366576394e-05, |
|
"loss": 0.0907, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.4147514698022448, |
|
"grad_norm": 0.18494229018688202, |
|
"learning_rate": 8.842354119007099e-05, |
|
"loss": 0.1241, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.415820416889364, |
|
"grad_norm": 0.14606322348117828, |
|
"learning_rate": 8.798040924543281e-05, |
|
"loss": 0.0939, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.4168893639764832, |
|
"grad_norm": 0.1913435161113739, |
|
"learning_rate": 8.753751665628141e-05, |
|
"loss": 0.1224, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4179583110636024, |
|
"grad_norm": 0.16582347452640533, |
|
"learning_rate": 8.709487224228246e-05, |
|
"loss": 0.1047, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.41902725815072156, |
|
"grad_norm": 0.1860746145248413, |
|
"learning_rate": 8.665248481815941e-05, |
|
"loss": 0.1451, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.42009620523784075, |
|
"grad_norm": 0.18656174838542938, |
|
"learning_rate": 8.621036319351814e-05, |
|
"loss": 0.1295, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.42116515232495993, |
|
"grad_norm": 0.16289152204990387, |
|
"learning_rate": 8.57685161726715e-05, |
|
"loss": 0.1104, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.4222340994120791, |
|
"grad_norm": 0.1636841893196106, |
|
"learning_rate": 8.532695255446383e-05, |
|
"loss": 0.1181, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4233030464991983, |
|
"grad_norm": 0.1686917096376419, |
|
"learning_rate": 8.488568113209593e-05, |
|
"loss": 0.1181, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4243719935863175, |
|
"grad_norm": 0.14516602456569672, |
|
"learning_rate": 8.444471069294988e-05, |
|
"loss": 0.0933, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.4254409406734367, |
|
"grad_norm": 0.16165108978748322, |
|
"learning_rate": 8.400405001841399e-05, |
|
"loss": 0.1376, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.4265098877605559, |
|
"grad_norm": 0.2126784473657608, |
|
"learning_rate": 8.3563707883708e-05, |
|
"loss": 0.1541, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.42757883484767506, |
|
"grad_norm": 0.2085818201303482, |
|
"learning_rate": 8.312369305770843e-05, |
|
"loss": 0.1493, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.42757883484767506, |
|
"eval_loss": 0.1267417073249817, |
|
"eval_runtime": 288.9107, |
|
"eval_samples_per_second": 4.32, |
|
"eval_steps_per_second": 1.08, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.42864778193479425, |
|
"grad_norm": 0.15687406063079834, |
|
"learning_rate": 8.268401430277362e-05, |
|
"loss": 0.1135, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.42971672902191344, |
|
"grad_norm": 0.233999103307724, |
|
"learning_rate": 8.224468037456969e-05, |
|
"loss": 0.1453, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.4307856761090326, |
|
"grad_norm": 0.18340054154396057, |
|
"learning_rate": 8.180570002189583e-05, |
|
"loss": 0.1268, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.4318546231961518, |
|
"grad_norm": 0.16533297300338745, |
|
"learning_rate": 8.136708198651021e-05, |
|
"loss": 0.1216, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.432923570283271, |
|
"grad_norm": 0.1564950942993164, |
|
"learning_rate": 8.0928835002956e-05, |
|
"loss": 0.1088, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.4339925173703902, |
|
"grad_norm": 0.17598888278007507, |
|
"learning_rate": 8.049096779838719e-05, |
|
"loss": 0.1117, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.4350614644575094, |
|
"grad_norm": 0.16911205649375916, |
|
"learning_rate": 8.005348909239499e-05, |
|
"loss": 0.1226, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.43613041154462856, |
|
"grad_norm": 0.17374524474143982, |
|
"learning_rate": 7.961640759683416e-05, |
|
"loss": 0.1104, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.43719935863174775, |
|
"grad_norm": 0.14513778686523438, |
|
"learning_rate": 7.917973201564943e-05, |
|
"loss": 0.0912, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.43826830571886694, |
|
"grad_norm": 0.16398054361343384, |
|
"learning_rate": 7.874347104470234e-05, |
|
"loss": 0.1062, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4393372528059861, |
|
"grad_norm": 0.17156217992305756, |
|
"learning_rate": 7.830763337159789e-05, |
|
"loss": 0.1221, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.4404061998931053, |
|
"grad_norm": 0.15586666762828827, |
|
"learning_rate": 7.787222767551164e-05, |
|
"loss": 0.1188, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.4414751469802245, |
|
"grad_norm": 0.20987504720687866, |
|
"learning_rate": 7.743726262701693e-05, |
|
"loss": 0.1527, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.4425440940673437, |
|
"grad_norm": 0.18425814807415009, |
|
"learning_rate": 7.700274688791196e-05, |
|
"loss": 0.1495, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.4436130411544629, |
|
"grad_norm": 0.1313970386981964, |
|
"learning_rate": 7.656868911104766e-05, |
|
"loss": 0.0865, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.44468198824158206, |
|
"grad_norm": 0.12412843853235245, |
|
"learning_rate": 7.613509794015517e-05, |
|
"loss": 0.0849, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.44575093532870125, |
|
"grad_norm": 0.18094252049922943, |
|
"learning_rate": 7.570198200967362e-05, |
|
"loss": 0.1186, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.44681988241582044, |
|
"grad_norm": 0.1528427004814148, |
|
"learning_rate": 7.526934994457844e-05, |
|
"loss": 0.127, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.4478888295029396, |
|
"grad_norm": 0.16211071610450745, |
|
"learning_rate": 7.483721036020948e-05, |
|
"loss": 0.1268, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.4489577765900588, |
|
"grad_norm": 0.1652361899614334, |
|
"learning_rate": 7.440557186209926e-05, |
|
"loss": 0.11, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.450026723677178, |
|
"grad_norm": 0.17334988713264465, |
|
"learning_rate": 7.3974443045802e-05, |
|
"loss": 0.1316, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.4510956707642972, |
|
"grad_norm": 0.1573891043663025, |
|
"learning_rate": 7.354383249672212e-05, |
|
"loss": 0.129, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.4521646178514164, |
|
"grad_norm": 0.17436981201171875, |
|
"learning_rate": 7.311374878994334e-05, |
|
"loss": 0.1294, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.45323356493853556, |
|
"grad_norm": 0.22541721165180206, |
|
"learning_rate": 7.268420049005807e-05, |
|
"loss": 0.1435, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.45430251202565475, |
|
"grad_norm": 0.19348768889904022, |
|
"learning_rate": 7.225519615099659e-05, |
|
"loss": 0.117, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.45537145911277394, |
|
"grad_norm": 0.1548422873020172, |
|
"learning_rate": 7.182674431585704e-05, |
|
"loss": 0.1094, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.4564404061998931, |
|
"grad_norm": 0.17469367384910583, |
|
"learning_rate": 7.139885351673503e-05, |
|
"loss": 0.1235, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.4575093532870123, |
|
"grad_norm": 0.20104283094406128, |
|
"learning_rate": 7.097153227455379e-05, |
|
"loss": 0.1051, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.4585783003741315, |
|
"grad_norm": 0.15787599980831146, |
|
"learning_rate": 7.054478909889455e-05, |
|
"loss": 0.1212, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.4596472474612507, |
|
"grad_norm": 0.1746998131275177, |
|
"learning_rate": 7.011863248782711e-05, |
|
"loss": 0.1163, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4607161945483699, |
|
"grad_norm": 0.15664790570735931, |
|
"learning_rate": 6.969307092774047e-05, |
|
"loss": 0.1072, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.46178514163548906, |
|
"grad_norm": 0.18459376692771912, |
|
"learning_rate": 6.92681128931739e-05, |
|
"loss": 0.1225, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.46285408872260825, |
|
"grad_norm": 0.18352118134498596, |
|
"learning_rate": 6.884376684664832e-05, |
|
"loss": 0.125, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.46392303580972744, |
|
"grad_norm": 0.16230523586273193, |
|
"learning_rate": 6.842004123849752e-05, |
|
"loss": 0.1244, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.4649919828968466, |
|
"grad_norm": 0.15808574855327606, |
|
"learning_rate": 6.799694450670008e-05, |
|
"loss": 0.1047, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4660609299839658, |
|
"grad_norm": 0.1523110568523407, |
|
"learning_rate": 6.757448507671128e-05, |
|
"loss": 0.1243, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.467129877071085, |
|
"grad_norm": 0.15416131913661957, |
|
"learning_rate": 6.715267136129525e-05, |
|
"loss": 0.0946, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.4681988241582042, |
|
"grad_norm": 0.170943483710289, |
|
"learning_rate": 6.673151176035762e-05, |
|
"loss": 0.1148, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.4692677712453234, |
|
"grad_norm": 0.13973015546798706, |
|
"learning_rate": 6.6311014660778e-05, |
|
"loss": 0.0978, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.47033671833244256, |
|
"grad_norm": 0.1479904055595398, |
|
"learning_rate": 6.589118843624315e-05, |
|
"loss": 0.1199, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.47140566541956175, |
|
"grad_norm": 0.14838270843029022, |
|
"learning_rate": 6.547204144708025e-05, |
|
"loss": 0.1009, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.47247461250668094, |
|
"grad_norm": 0.18144488334655762, |
|
"learning_rate": 6.505358204009017e-05, |
|
"loss": 0.0992, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.4735435595938001, |
|
"grad_norm": 0.15086080133914948, |
|
"learning_rate": 6.463581854838152e-05, |
|
"loss": 0.0959, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.4746125066809193, |
|
"grad_norm": 0.15263791382312775, |
|
"learning_rate": 6.421875929120469e-05, |
|
"loss": 0.1061, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.4756814537680385, |
|
"grad_norm": 0.18711231648921967, |
|
"learning_rate": 6.380241257378585e-05, |
|
"loss": 0.1395, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4767504008551577, |
|
"grad_norm": 0.16735321283340454, |
|
"learning_rate": 6.338678668716209e-05, |
|
"loss": 0.1099, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.4778193479422769, |
|
"grad_norm": 0.17839953303337097, |
|
"learning_rate": 6.297188990801584e-05, |
|
"loss": 0.1316, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.47888829502939606, |
|
"grad_norm": 0.14725211262702942, |
|
"learning_rate": 6.25577304985103e-05, |
|
"loss": 0.1034, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.47995724211651525, |
|
"grad_norm": 0.1480267494916916, |
|
"learning_rate": 6.214431670612494e-05, |
|
"loss": 0.109, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.48102618920363444, |
|
"grad_norm": 0.17610524594783783, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 0.1159, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4820951362907536, |
|
"grad_norm": 0.17769086360931396, |
|
"learning_rate": 6.131975888822793e-05, |
|
"loss": 0.1002, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.4831640833778728, |
|
"grad_norm": 0.1791263222694397, |
|
"learning_rate": 6.090863128277938e-05, |
|
"loss": 0.1291, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.484233030464992, |
|
"grad_norm": 0.20171315968036652, |
|
"learning_rate": 6.049828213425002e-05, |
|
"loss": 0.1383, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.4853019775521112, |
|
"grad_norm": 0.182616725564003, |
|
"learning_rate": 6.008871961424258e-05, |
|
"loss": 0.1145, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.4863709246392304, |
|
"grad_norm": 0.21832366287708282, |
|
"learning_rate": 5.967995187869505e-05, |
|
"loss": 0.1562, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.48743987172634956, |
|
"grad_norm": 0.1896723210811615, |
|
"learning_rate": 5.9271987067718125e-05, |
|
"loss": 0.1156, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.48850881881346875, |
|
"grad_norm": 0.1710599958896637, |
|
"learning_rate": 5.8864833305433457e-05, |
|
"loss": 0.1077, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.48957776590058794, |
|
"grad_norm": 0.17611974477767944, |
|
"learning_rate": 5.845849869981137e-05, |
|
"loss": 0.0891, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.4906467129877071, |
|
"grad_norm": 0.20914289355278015, |
|
"learning_rate": 5.805299134250991e-05, |
|
"loss": 0.1424, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.4917156600748263, |
|
"grad_norm": 0.17185574769973755, |
|
"learning_rate": 5.7648319308713464e-05, |
|
"loss": 0.1043, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4927846071619455, |
|
"grad_norm": 0.17580777406692505, |
|
"learning_rate": 5.7244490656971815e-05, |
|
"loss": 0.0928, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.4938535542490647, |
|
"grad_norm": 0.13630133867263794, |
|
"learning_rate": 5.684151342903992e-05, |
|
"loss": 0.096, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.4949225013361839, |
|
"grad_norm": 0.13813966512680054, |
|
"learning_rate": 5.643939564971779e-05, |
|
"loss": 0.0951, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.49599144842330306, |
|
"grad_norm": 0.15008163452148438, |
|
"learning_rate": 5.603814532669032e-05, |
|
"loss": 0.0785, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.49706039551042225, |
|
"grad_norm": 0.17430590093135834, |
|
"learning_rate": 5.5637770450368186e-05, |
|
"loss": 0.0941, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.49812934259754144, |
|
"grad_norm": 0.131693497300148, |
|
"learning_rate": 5.5238278993728756e-05, |
|
"loss": 0.0869, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.4991982896846606, |
|
"grad_norm": 0.14627690613269806, |
|
"learning_rate": 5.483967891215691e-05, |
|
"loss": 0.1071, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.5002672367717798, |
|
"grad_norm": 0.20751060545444489, |
|
"learning_rate": 5.4441978143287066e-05, |
|
"loss": 0.113, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.501336183858899, |
|
"grad_norm": 0.16088207066059113, |
|
"learning_rate": 5.4045184606844834e-05, |
|
"loss": 0.1283, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5024051309460181, |
|
"grad_norm": 0.15302957594394684, |
|
"learning_rate": 5.364930620448946e-05, |
|
"loss": 0.1173, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5034740780331374, |
|
"grad_norm": 0.16366977989673615, |
|
"learning_rate": 5.3254350819656374e-05, |
|
"loss": 0.1184, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5045430251202565, |
|
"grad_norm": 0.18926464021205902, |
|
"learning_rate": 5.286032631740023e-05, |
|
"loss": 0.1403, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.5056119722073757, |
|
"grad_norm": 0.21518391370773315, |
|
"learning_rate": 5.246724054423834e-05, |
|
"loss": 0.1348, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.5066809192944949, |
|
"grad_norm": 0.19751004874706268, |
|
"learning_rate": 5.207510132799436e-05, |
|
"loss": 0.127, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.5077498663816141, |
|
"grad_norm": 0.23470665514469147, |
|
"learning_rate": 5.168391647764227e-05, |
|
"loss": 0.139, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5088188134687333, |
|
"grad_norm": 0.18137036263942719, |
|
"learning_rate": 5.1293693783151275e-05, |
|
"loss": 0.1213, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.5098877605558525, |
|
"grad_norm": 0.16304810345172882, |
|
"learning_rate": 5.090444101533029e-05, |
|
"loss": 0.1206, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.5109567076429716, |
|
"grad_norm": 0.16015806794166565, |
|
"learning_rate": 5.051616592567323e-05, |
|
"loss": 0.114, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.5120256547300909, |
|
"grad_norm": 0.18064187467098236, |
|
"learning_rate": 5.012887624620496e-05, |
|
"loss": 0.1103, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.51309460181721, |
|
"grad_norm": 0.18429364264011383, |
|
"learning_rate": 4.9742579689326874e-05, |
|
"loss": 0.1344, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5141635489043292, |
|
"grad_norm": 0.13880470395088196, |
|
"learning_rate": 4.9357283947663676e-05, |
|
"loss": 0.097, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.5152324959914484, |
|
"grad_norm": 0.15282510221004486, |
|
"learning_rate": 4.8972996693910054e-05, |
|
"loss": 0.0952, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.5163014430785676, |
|
"grad_norm": 0.1321898102760315, |
|
"learning_rate": 4.8589725580677835e-05, |
|
"loss": 0.0896, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.5173703901656868, |
|
"grad_norm": 0.15336064994335175, |
|
"learning_rate": 4.82074782403437e-05, |
|
"loss": 0.1185, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.518439337252806, |
|
"grad_norm": 0.14170211553573608, |
|
"learning_rate": 4.7826262284897095e-05, |
|
"loss": 0.1037, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5195082843399251, |
|
"grad_norm": 0.16489258408546448, |
|
"learning_rate": 4.7446085305788725e-05, |
|
"loss": 0.1238, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.5205772314270444, |
|
"grad_norm": 0.16994819045066833, |
|
"learning_rate": 4.706695487377932e-05, |
|
"loss": 0.1086, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.5216461785141635, |
|
"grad_norm": 0.17084138095378876, |
|
"learning_rate": 4.668887853878896e-05, |
|
"loss": 0.1088, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.5227151256012827, |
|
"grad_norm": 0.169681578874588, |
|
"learning_rate": 4.6311863829746596e-05, |
|
"loss": 0.1055, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.5237840726884019, |
|
"grad_norm": 0.16820192337036133, |
|
"learning_rate": 4.593591825444028e-05, |
|
"loss": 0.0947, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5248530197755211, |
|
"grad_norm": 0.1566527783870697, |
|
"learning_rate": 4.55610492993674e-05, |
|
"loss": 0.0913, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.5259219668626403, |
|
"grad_norm": 0.20646369457244873, |
|
"learning_rate": 4.518726442958599e-05, |
|
"loss": 0.1707, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.5269909139497595, |
|
"grad_norm": 0.17909252643585205, |
|
"learning_rate": 4.4814571088565735e-05, |
|
"loss": 0.1444, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.5280598610368786, |
|
"grad_norm": 0.15825694799423218, |
|
"learning_rate": 4.444297669803981e-05, |
|
"loss": 0.1026, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.5291288081239979, |
|
"grad_norm": 0.16258923709392548, |
|
"learning_rate": 4.407248865785716e-05, |
|
"loss": 0.1118, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.530197755211117, |
|
"grad_norm": 0.3853148818016052, |
|
"learning_rate": 4.370311434583525e-05, |
|
"loss": 0.1833, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.5312667022982362, |
|
"grad_norm": 0.1683734655380249, |
|
"learning_rate": 4.333486111761279e-05, |
|
"loss": 0.1154, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.5323356493853554, |
|
"grad_norm": 0.12579625844955444, |
|
"learning_rate": 4.296773630650358e-05, |
|
"loss": 0.0924, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.5334045964724746, |
|
"grad_norm": 0.15746884047985077, |
|
"learning_rate": 4.260174722335046e-05, |
|
"loss": 0.1052, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.5344735435595938, |
|
"grad_norm": 0.15484918653964996, |
|
"learning_rate": 4.223690115637944e-05, |
|
"loss": 0.1023, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5344735435595938, |
|
"eval_loss": 0.1166808158159256, |
|
"eval_runtime": 288.9178, |
|
"eval_samples_per_second": 4.32, |
|
"eval_steps_per_second": 1.08, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.535542490646713, |
|
"grad_norm": 0.16423402726650238, |
|
"learning_rate": 4.187320537105489e-05, |
|
"loss": 0.1262, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.5366114377338321, |
|
"grad_norm": 0.19593016803264618, |
|
"learning_rate": 4.15106671099347e-05, |
|
"loss": 0.1401, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.5376803848209514, |
|
"grad_norm": 0.13396374881267548, |
|
"learning_rate": 4.114929359252606e-05, |
|
"loss": 0.1002, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.5387493319080705, |
|
"grad_norm": 0.18291138112545013, |
|
"learning_rate": 4.0789092015141714e-05, |
|
"loss": 0.1332, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.5398182789951897, |
|
"grad_norm": 0.16943904757499695, |
|
"learning_rate": 4.0430069550756665e-05, |
|
"loss": 0.1018, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5408872260823089, |
|
"grad_norm": 0.1741015464067459, |
|
"learning_rate": 4.007223334886531e-05, |
|
"loss": 0.1224, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.5419561731694281, |
|
"grad_norm": 0.13967184722423553, |
|
"learning_rate": 3.971559053533906e-05, |
|
"loss": 0.105, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.5430251202565473, |
|
"grad_norm": 0.1619994342327118, |
|
"learning_rate": 3.9360148212284475e-05, |
|
"loss": 0.0997, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.5440940673436665, |
|
"grad_norm": 0.1519555002450943, |
|
"learning_rate": 3.900591345790181e-05, |
|
"loss": 0.1045, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.5451630144307856, |
|
"grad_norm": 0.16195788979530334, |
|
"learning_rate": 3.865289332634407e-05, |
|
"loss": 0.1041, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5462319615179049, |
|
"grad_norm": 0.1591130644083023, |
|
"learning_rate": 3.8301094847576446e-05, |
|
"loss": 0.1128, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.547300908605024, |
|
"grad_norm": 0.15072457492351532, |
|
"learning_rate": 3.7950525027236585e-05, |
|
"loss": 0.1148, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.5483698556921432, |
|
"grad_norm": 0.16123512387275696, |
|
"learning_rate": 3.760119084649468e-05, |
|
"loss": 0.1088, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.5494388027792624, |
|
"grad_norm": 0.152877539396286, |
|
"learning_rate": 3.725309926191479e-05, |
|
"loss": 0.1065, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.5505077498663816, |
|
"grad_norm": 0.23673762381076813, |
|
"learning_rate": 3.690625720531627e-05, |
|
"loss": 0.1095, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5515766969535008, |
|
"grad_norm": 0.13103598356246948, |
|
"learning_rate": 3.6560671583635467e-05, |
|
"loss": 0.095, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.55264564404062, |
|
"grad_norm": 0.1740058958530426, |
|
"learning_rate": 3.621634927878846e-05, |
|
"loss": 0.1247, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.5537145911277391, |
|
"grad_norm": 0.20583245158195496, |
|
"learning_rate": 3.5873297147533915e-05, |
|
"loss": 0.1513, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.5547835382148584, |
|
"grad_norm": 0.1914791762828827, |
|
"learning_rate": 3.553152202133651e-05, |
|
"loss": 0.1267, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.5558524853019775, |
|
"grad_norm": 0.2231132984161377, |
|
"learning_rate": 3.519103070623096e-05, |
|
"loss": 0.148, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5569214323890967, |
|
"grad_norm": 0.14965134859085083, |
|
"learning_rate": 3.485182998268642e-05, |
|
"loss": 0.1011, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.5579903794762159, |
|
"grad_norm": 0.1701858639717102, |
|
"learning_rate": 3.45139266054715e-05, |
|
"loss": 0.1145, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.5590593265633351, |
|
"grad_norm": 0.17237675189971924, |
|
"learning_rate": 3.4177327303519765e-05, |
|
"loss": 0.1403, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.5601282736504543, |
|
"grad_norm": 0.17186829447746277, |
|
"learning_rate": 3.384203877979559e-05, |
|
"loss": 0.1114, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.5611972207375735, |
|
"grad_norm": 0.17262551188468933, |
|
"learning_rate": 3.3508067711161004e-05, |
|
"loss": 0.0962, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5622661678246926, |
|
"grad_norm": 0.1815585047006607, |
|
"learning_rate": 3.3175420748242406e-05, |
|
"loss": 0.1222, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.5633351149118119, |
|
"grad_norm": 0.20134767889976501, |
|
"learning_rate": 3.2844104515298155e-05, |
|
"loss": 0.1498, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.564404061998931, |
|
"grad_norm": 0.12754414975643158, |
|
"learning_rate": 3.2514125610086955e-05, |
|
"loss": 0.0829, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.5654730090860502, |
|
"grad_norm": 0.15976329147815704, |
|
"learning_rate": 3.218549060373615e-05, |
|
"loss": 0.0966, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.5665419561731694, |
|
"grad_norm": 0.17927618324756622, |
|
"learning_rate": 3.185820604061088e-05, |
|
"loss": 0.1234, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5676109032602886, |
|
"grad_norm": 0.14203406870365143, |
|
"learning_rate": 3.153227843818405e-05, |
|
"loss": 0.0785, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.5686798503474078, |
|
"grad_norm": 0.18028829991817474, |
|
"learning_rate": 3.1207714286906256e-05, |
|
"loss": 0.1068, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.569748797434527, |
|
"grad_norm": 0.1877748668193817, |
|
"learning_rate": 3.088452005007656e-05, |
|
"loss": 0.1479, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.5708177445216461, |
|
"grad_norm": 0.20719939470291138, |
|
"learning_rate": 3.056270216371395e-05, |
|
"loss": 0.164, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.5718866916087654, |
|
"grad_norm": 0.16265811026096344, |
|
"learning_rate": 3.0242267036429027e-05, |
|
"loss": 0.0998, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5729556386958845, |
|
"grad_norm": 0.18794025480747223, |
|
"learning_rate": 2.9923221049296446e-05, |
|
"loss": 0.142, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.5740245857830037, |
|
"grad_norm": 0.142957404255867, |
|
"learning_rate": 2.9605570555727835e-05, |
|
"loss": 0.1258, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.5750935328701229, |
|
"grad_norm": 0.16433711349964142, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 0.1155, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.5761624799572421, |
|
"grad_norm": 0.16358301043510437, |
|
"learning_rate": 2.897448132385525e-05, |
|
"loss": 0.1072, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.5772314270443613, |
|
"grad_norm": 0.14673899114131927, |
|
"learning_rate": 2.8661055152923456e-05, |
|
"loss": 0.1165, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5783003741314805, |
|
"grad_norm": 0.15978941321372986, |
|
"learning_rate": 2.8349049610049683e-05, |
|
"loss": 0.0882, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.5793693212185996, |
|
"grad_norm": 0.16689325869083405, |
|
"learning_rate": 2.8038470908443714e-05, |
|
"loss": 0.1183, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.5804382683057189, |
|
"grad_norm": 0.19823519885540009, |
|
"learning_rate": 2.7729325232901494e-05, |
|
"loss": 0.1188, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.581507215392838, |
|
"grad_norm": 0.14368073642253876, |
|
"learning_rate": 2.742161873968202e-05, |
|
"loss": 0.0981, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.5825761624799572, |
|
"grad_norm": 0.16314393281936646, |
|
"learning_rate": 2.7115357556384756e-05, |
|
"loss": 0.0958, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5836451095670764, |
|
"grad_norm": 0.15624821186065674, |
|
"learning_rate": 2.681054778182748e-05, |
|
"loss": 0.1096, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.5847140566541956, |
|
"grad_norm": 0.1835729330778122, |
|
"learning_rate": 2.650719548592502e-05, |
|
"loss": 0.1118, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.5857830037413148, |
|
"grad_norm": 0.18949463963508606, |
|
"learning_rate": 2.6205306709568354e-05, |
|
"loss": 0.1127, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.586851950828434, |
|
"grad_norm": 0.18184354901313782, |
|
"learning_rate": 2.5904887464504114e-05, |
|
"loss": 0.0934, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.5879208979155531, |
|
"grad_norm": 0.2154887169599533, |
|
"learning_rate": 2.5605943733215042e-05, |
|
"loss": 0.1444, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5889898450026724, |
|
"grad_norm": 0.14278879761695862, |
|
"learning_rate": 2.530848146880097e-05, |
|
"loss": 0.1012, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.5900587920897915, |
|
"grad_norm": 0.15597015619277954, |
|
"learning_rate": 2.501250659485992e-05, |
|
"loss": 0.1025, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.5911277391769107, |
|
"grad_norm": 0.1862352341413498, |
|
"learning_rate": 2.4718025005370414e-05, |
|
"loss": 0.1163, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.5921966862640299, |
|
"grad_norm": 0.15955589711666107, |
|
"learning_rate": 2.4425042564574184e-05, |
|
"loss": 0.1337, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.5932656333511491, |
|
"grad_norm": 0.1371564120054245, |
|
"learning_rate": 2.4133565106859003e-05, |
|
"loss": 0.0946, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5943345804382683, |
|
"grad_norm": 0.13667546212673187, |
|
"learning_rate": 2.38435984366429e-05, |
|
"loss": 0.0941, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.5954035275253875, |
|
"grad_norm": 0.12957549095153809, |
|
"learning_rate": 2.3555148328258368e-05, |
|
"loss": 0.0998, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.5964724746125066, |
|
"grad_norm": 0.1276206374168396, |
|
"learning_rate": 2.3268220525837437e-05, |
|
"loss": 0.0895, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.5975414216996259, |
|
"grad_norm": 0.15202417969703674, |
|
"learning_rate": 2.2982820743197243e-05, |
|
"loss": 0.0923, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.598610368786745, |
|
"grad_norm": 0.17828796803951263, |
|
"learning_rate": 2.26989546637263e-05, |
|
"loss": 0.12, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5996793158738642, |
|
"grad_norm": 0.17945897579193115, |
|
"learning_rate": 2.2416627940271297e-05, |
|
"loss": 0.1285, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.6007482629609834, |
|
"grad_norm": 0.18568158149719238, |
|
"learning_rate": 2.2135846195024513e-05, |
|
"loss": 0.15, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.6018172100481026, |
|
"grad_norm": 0.1976700872182846, |
|
"learning_rate": 2.1856615019411797e-05, |
|
"loss": 0.1337, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.6028861571352218, |
|
"grad_norm": 0.20092736184597015, |
|
"learning_rate": 2.157893997398146e-05, |
|
"loss": 0.1435, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.603955104222341, |
|
"grad_norm": 0.16839931905269623, |
|
"learning_rate": 2.130282658829328e-05, |
|
"loss": 0.0981, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6050240513094601, |
|
"grad_norm": 0.16764996945858002, |
|
"learning_rate": 2.1028280360808407e-05, |
|
"loss": 0.0963, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.6060929983965794, |
|
"grad_norm": 0.13916774094104767, |
|
"learning_rate": 2.075530675878016e-05, |
|
"loss": 0.0937, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.6071619454836985, |
|
"grad_norm": 0.1933029145002365, |
|
"learning_rate": 2.0483911218144715e-05, |
|
"loss": 0.1366, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.6082308925708177, |
|
"grad_norm": 0.15825247764587402, |
|
"learning_rate": 2.0214099143413212e-05, |
|
"loss": 0.0996, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.6092998396579369, |
|
"grad_norm": 0.1718830019235611, |
|
"learning_rate": 1.994587590756397e-05, |
|
"loss": 0.102, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6103687867450561, |
|
"grad_norm": 0.18970075249671936, |
|
"learning_rate": 1.967924685193552e-05, |
|
"loss": 0.1178, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.6114377338321753, |
|
"grad_norm": 0.16414959728717804, |
|
"learning_rate": 1.941421728612023e-05, |
|
"loss": 0.1052, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.6125066809192945, |
|
"grad_norm": 0.1575910896062851, |
|
"learning_rate": 1.915079248785858e-05, |
|
"loss": 0.1141, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.6135756280064136, |
|
"grad_norm": 0.18138492107391357, |
|
"learning_rate": 1.8888977702934085e-05, |
|
"loss": 0.1054, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.6146445750935329, |
|
"grad_norm": 0.15937894582748413, |
|
"learning_rate": 1.862877814506878e-05, |
|
"loss": 0.104, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.615713522180652, |
|
"grad_norm": 0.2073594182729721, |
|
"learning_rate": 1.837019899581943e-05, |
|
"loss": 0.1216, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.6167824692677712, |
|
"grad_norm": 0.14365796744823456, |
|
"learning_rate": 1.8113245404474354e-05, |
|
"loss": 0.0961, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.6178514163548904, |
|
"grad_norm": 0.18027973175048828, |
|
"learning_rate": 1.7857922487950874e-05, |
|
"loss": 0.1319, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.6189203634420096, |
|
"grad_norm": 0.18247497081756592, |
|
"learning_rate": 1.760423533069332e-05, |
|
"loss": 0.1246, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.6199893105291288, |
|
"grad_norm": 0.21188899874687195, |
|
"learning_rate": 1.7352188984572026e-05, |
|
"loss": 0.1493, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.621058257616248, |
|
"grad_norm": 0.17170637845993042, |
|
"learning_rate": 1.710178846878251e-05, |
|
"loss": 0.0808, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.6221272047033671, |
|
"grad_norm": 0.13367417454719543, |
|
"learning_rate": 1.6853038769745467e-05, |
|
"loss": 0.0853, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.6231961517904864, |
|
"grad_norm": 0.15833276510238647, |
|
"learning_rate": 1.6605944841007802e-05, |
|
"loss": 0.1133, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.6242650988776055, |
|
"grad_norm": 0.14467398822307587, |
|
"learning_rate": 1.6360511603143648e-05, |
|
"loss": 0.0959, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.6253340459647247, |
|
"grad_norm": 0.15642966330051422, |
|
"learning_rate": 1.6116743943656488e-05, |
|
"loss": 0.1015, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6264029930518439, |
|
"grad_norm": 0.18850955367088318, |
|
"learning_rate": 1.587464671688187e-05, |
|
"loss": 0.1258, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.6274719401389631, |
|
"grad_norm": 0.16557101905345917, |
|
"learning_rate": 1.56342247438908e-05, |
|
"loss": 0.119, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.6285408872260823, |
|
"grad_norm": 0.13995720446109772, |
|
"learning_rate": 1.5395482812393514e-05, |
|
"loss": 0.1172, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.6296098343132015, |
|
"grad_norm": 0.21847642958164215, |
|
"learning_rate": 1.5158425676644329e-05, |
|
"loss": 0.1504, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.6306787814003206, |
|
"grad_norm": 0.14314381778240204, |
|
"learning_rate": 1.4923058057346929e-05, |
|
"loss": 0.0987, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6317477284874399, |
|
"grad_norm": 0.17072826623916626, |
|
"learning_rate": 1.4689384641560311e-05, |
|
"loss": 0.1182, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.632816675574559, |
|
"grad_norm": 0.18338800966739655, |
|
"learning_rate": 1.4457410082605483e-05, |
|
"loss": 0.1188, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.6338856226616782, |
|
"grad_norm": 0.13059154152870178, |
|
"learning_rate": 1.42271389999728e-05, |
|
"loss": 0.0729, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.6349545697487974, |
|
"grad_norm": 0.17279797792434692, |
|
"learning_rate": 1.3998575979229944e-05, |
|
"loss": 0.0968, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.6360235168359166, |
|
"grad_norm": 0.17134664952754974, |
|
"learning_rate": 1.3771725571930672e-05, |
|
"loss": 0.1149, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6370924639230358, |
|
"grad_norm": 0.3472153842449188, |
|
"learning_rate": 1.3546592295524074e-05, |
|
"loss": 0.1259, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.638161411010155, |
|
"grad_norm": 0.1609698385000229, |
|
"learning_rate": 1.332318063326471e-05, |
|
"loss": 0.1091, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.6392303580972741, |
|
"grad_norm": 0.16347070038318634, |
|
"learning_rate": 1.3101495034123313e-05, |
|
"loss": 0.121, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.6402993051843934, |
|
"grad_norm": 0.14996081590652466, |
|
"learning_rate": 1.2881539912698082e-05, |
|
"loss": 0.0921, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.6413682522715125, |
|
"grad_norm": 0.15875977277755737, |
|
"learning_rate": 1.2663319649127026e-05, |
|
"loss": 0.1015, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6413682522715125, |
|
"eval_loss": 0.11237961053848267, |
|
"eval_runtime": 288.8569, |
|
"eval_samples_per_second": 4.32, |
|
"eval_steps_per_second": 1.08, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6424371993586317, |
|
"grad_norm": 0.19052091240882874, |
|
"learning_rate": 1.2446838589000397e-05, |
|
"loss": 0.1107, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.6435061464457509, |
|
"grad_norm": 0.16737191379070282, |
|
"learning_rate": 1.2232101043274436e-05, |
|
"loss": 0.1163, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.6445750935328701, |
|
"grad_norm": 0.14812670648097992, |
|
"learning_rate": 1.2019111288185437e-05, |
|
"loss": 0.0909, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.6456440406199893, |
|
"grad_norm": 0.143129363656044, |
|
"learning_rate": 1.1807873565164506e-05, |
|
"loss": 0.0955, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.6467129877071085, |
|
"grad_norm": 0.1692836433649063, |
|
"learning_rate": 1.1598392080753206e-05, |
|
"loss": 0.1016, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6477819347942276, |
|
"grad_norm": 0.13897959887981415, |
|
"learning_rate": 1.139067100651976e-05, |
|
"loss": 0.0999, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.6488508818813469, |
|
"grad_norm": 0.18241965770721436, |
|
"learning_rate": 1.1184714478975934e-05, |
|
"loss": 0.1129, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.649919828968466, |
|
"grad_norm": 0.1324148029088974, |
|
"learning_rate": 1.0980526599494733e-05, |
|
"loss": 0.0854, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.6509887760555852, |
|
"grad_norm": 0.17263776063919067, |
|
"learning_rate": 1.0778111434228689e-05, |
|
"loss": 0.1312, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.6520577231427044, |
|
"grad_norm": 0.14975489675998688, |
|
"learning_rate": 1.057747301402887e-05, |
|
"loss": 0.1075, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6531266702298236, |
|
"grad_norm": 0.13988271355628967, |
|
"learning_rate": 1.0378615334364694e-05, |
|
"loss": 0.0856, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.6541956173169428, |
|
"grad_norm": 0.2114977240562439, |
|
"learning_rate": 1.0181542355244167e-05, |
|
"loss": 0.1277, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.655264564404062, |
|
"grad_norm": 0.1697227954864502, |
|
"learning_rate": 9.98625800113534e-06, |
|
"loss": 0.1142, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.6563335114911811, |
|
"grad_norm": 0.17489199340343475, |
|
"learning_rate": 9.792766160887868e-06, |
|
"loss": 0.1246, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.6574024585783004, |
|
"grad_norm": 0.17671118676662445, |
|
"learning_rate": 9.601070687655667e-06, |
|
"loss": 0.1258, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6584714056654195, |
|
"grad_norm": 0.15085704624652863, |
|
"learning_rate": 9.411175398820271e-06, |
|
"loss": 0.1179, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.6595403527525387, |
|
"grad_norm": 0.18436650931835175, |
|
"learning_rate": 9.223084075914712e-06, |
|
"loss": 0.1252, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.6606092998396579, |
|
"grad_norm": 0.16538633406162262, |
|
"learning_rate": 9.036800464548157e-06, |
|
"loss": 0.1066, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.6616782469267771, |
|
"grad_norm": 0.14048168063163757, |
|
"learning_rate": 8.852328274331545e-06, |
|
"loss": 0.0866, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.6627471940138963, |
|
"grad_norm": 0.22049780189990997, |
|
"learning_rate": 8.669671178803485e-06, |
|
"loss": 0.1782, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6638161411010155, |
|
"grad_norm": 0.15474286675453186, |
|
"learning_rate": 8.488832815357173e-06, |
|
"loss": 0.1086, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.6648850881881346, |
|
"grad_norm": 0.14860907196998596, |
|
"learning_rate": 8.309816785168034e-06, |
|
"loss": 0.0887, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.6659540352752539, |
|
"grad_norm": 0.1767965406179428, |
|
"learning_rate": 8.132626653121912e-06, |
|
"loss": 0.1028, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.6670229823623731, |
|
"grad_norm": 0.1610213965177536, |
|
"learning_rate": 7.95726594774413e-06, |
|
"loss": 0.1179, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.6680919294494923, |
|
"grad_norm": 0.1768268495798111, |
|
"learning_rate": 7.783738161129206e-06, |
|
"loss": 0.1308, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6691608765366115, |
|
"grad_norm": 0.1573677510023117, |
|
"learning_rate": 7.612046748871327e-06, |
|
"loss": 0.1099, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.6702298236237306, |
|
"grad_norm": 0.1720394343137741, |
|
"learning_rate": 7.44219512999551e-06, |
|
"loss": 0.1102, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.6712987707108499, |
|
"grad_norm": 0.13623464107513428, |
|
"learning_rate": 7.2741866868895395e-06, |
|
"loss": 0.0867, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.672367717797969, |
|
"grad_norm": 0.18556751310825348, |
|
"learning_rate": 7.108024765236588e-06, |
|
"loss": 0.0872, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.6734366648850882, |
|
"grad_norm": 0.1814941018819809, |
|
"learning_rate": 6.943712673948644e-06, |
|
"loss": 0.1179, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6745056119722074, |
|
"grad_norm": 0.1475355327129364, |
|
"learning_rate": 6.78125368510053e-06, |
|
"loss": 0.0925, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.6755745590593266, |
|
"grad_norm": 0.15053223073482513, |
|
"learning_rate": 6.620651033864844e-06, |
|
"loss": 0.1029, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.6766435061464458, |
|
"grad_norm": 0.17404505610466003, |
|
"learning_rate": 6.461907918447463e-06, |
|
"loss": 0.1339, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.677712453233565, |
|
"grad_norm": 0.165951207280159, |
|
"learning_rate": 6.3050275000238414e-06, |
|
"loss": 0.12, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.6787814003206841, |
|
"grad_norm": 0.1419910490512848, |
|
"learning_rate": 6.150012902676139e-06, |
|
"loss": 0.0969, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6798503474078034, |
|
"grad_norm": 0.17707513272762299, |
|
"learning_rate": 5.996867213330992e-06, |
|
"loss": 0.1325, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.6809192944949225, |
|
"grad_norm": 0.15850454568862915, |
|
"learning_rate": 5.8455934816979305e-06, |
|
"loss": 0.0944, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.6819882415820417, |
|
"grad_norm": 0.17115013301372528, |
|
"learning_rate": 5.696194720208792e-06, |
|
"loss": 0.1229, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.6830571886691609, |
|
"grad_norm": 0.1440781056880951, |
|
"learning_rate": 5.54867390395768e-06, |
|
"loss": 0.093, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.6841261357562801, |
|
"grad_norm": 0.18968208134174347, |
|
"learning_rate": 5.403033970641647e-06, |
|
"loss": 0.1386, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6851950828433993, |
|
"grad_norm": 0.14848604798316956, |
|
"learning_rate": 5.259277820502306e-06, |
|
"loss": 0.0903, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.6862640299305185, |
|
"grad_norm": 0.16736701130867004, |
|
"learning_rate": 5.1174083162680465e-06, |
|
"loss": 0.1156, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.6873329770176376, |
|
"grad_norm": 0.15795965492725372, |
|
"learning_rate": 4.9774282830969296e-06, |
|
"loss": 0.1229, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.6884019241047569, |
|
"grad_norm": 0.16274042427539825, |
|
"learning_rate": 4.839340508520562e-06, |
|
"loss": 0.1149, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.689470871191876, |
|
"grad_norm": 0.14476804435253143, |
|
"learning_rate": 4.703147742388536e-06, |
|
"loss": 0.0914, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6905398182789952, |
|
"grad_norm": 0.15442495048046112, |
|
"learning_rate": 4.568852696813619e-06, |
|
"loss": 0.0897, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.6916087653661144, |
|
"grad_norm": 0.15678077936172485, |
|
"learning_rate": 4.436458046117831e-06, |
|
"loss": 0.0956, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.6926777124532336, |
|
"grad_norm": 0.15000145137310028, |
|
"learning_rate": 4.305966426779118e-06, |
|
"loss": 0.1062, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.6937466595403528, |
|
"grad_norm": 0.1456059366464615, |
|
"learning_rate": 4.177380437378886e-06, |
|
"loss": 0.0894, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.694815606627472, |
|
"grad_norm": 0.14858050644397736, |
|
"learning_rate": 4.050702638550275e-06, |
|
"loss": 0.1073, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6958845537145911, |
|
"grad_norm": 0.12687133252620697, |
|
"learning_rate": 3.9259355529270625e-06, |
|
"loss": 0.1037, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.6969535008017104, |
|
"grad_norm": 0.16253836452960968, |
|
"learning_rate": 3.8030816650935776e-06, |
|
"loss": 0.1209, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.6980224478888295, |
|
"grad_norm": 0.13301688432693481, |
|
"learning_rate": 3.682143421535156e-06, |
|
"loss": 0.094, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.6990913949759487, |
|
"grad_norm": 0.15833823382854462, |
|
"learning_rate": 3.5631232305893046e-06, |
|
"loss": 0.1151, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.7001603420630679, |
|
"grad_norm": 0.1909790337085724, |
|
"learning_rate": 3.446023462398018e-06, |
|
"loss": 0.1443, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.7012292891501871, |
|
"grad_norm": 0.14769120514392853, |
|
"learning_rate": 3.330846448860259e-06, |
|
"loss": 0.1063, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.7022982362373063, |
|
"grad_norm": 0.14119353890419006, |
|
"learning_rate": 3.2175944835857617e-06, |
|
"loss": 0.0852, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.7033671833244255, |
|
"grad_norm": 0.16330832242965698, |
|
"learning_rate": 3.1062698218492724e-06, |
|
"loss": 0.1177, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.7044361304115446, |
|
"grad_norm": 0.15180334448814392, |
|
"learning_rate": 2.996874680545603e-06, |
|
"loss": 0.1117, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.7055050774986639, |
|
"grad_norm": 0.13674339652061462, |
|
"learning_rate": 2.889411238145545e-06, |
|
"loss": 0.0722, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.706574024585783, |
|
"grad_norm": 0.15879607200622559, |
|
"learning_rate": 2.7838816346524634e-06, |
|
"loss": 0.0888, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.7076429716729022, |
|
"grad_norm": 0.1543118804693222, |
|
"learning_rate": 2.6802879715596585e-06, |
|
"loss": 0.1114, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.7087119187600214, |
|
"grad_norm": 0.17156855762004852, |
|
"learning_rate": 2.5786323118085596e-06, |
|
"loss": 0.1223, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.7097808658471406, |
|
"grad_norm": 0.18760466575622559, |
|
"learning_rate": 2.4789166797476228e-06, |
|
"loss": 0.1395, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.7108498129342598, |
|
"grad_norm": 0.18010297417640686, |
|
"learning_rate": 2.381143061092006e-06, |
|
"loss": 0.1258, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.711918760021379, |
|
"grad_norm": 0.18845847249031067, |
|
"learning_rate": 2.2853134028840594e-06, |
|
"loss": 0.1218, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.7129877071084981, |
|
"grad_norm": 0.1728360801935196, |
|
"learning_rate": 2.191429613454499e-06, |
|
"loss": 0.1337, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.7140566541956174, |
|
"grad_norm": 0.11702633649110794, |
|
"learning_rate": 2.0994935623844692e-06, |
|
"loss": 0.084, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.7151256012827365, |
|
"grad_norm": 0.14172986149787903, |
|
"learning_rate": 2.009507080468298e-06, |
|
"loss": 0.0913, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.7161945483698557, |
|
"grad_norm": 0.17234764993190765, |
|
"learning_rate": 1.921471959676957e-06, |
|
"loss": 0.12, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7172634954569749, |
|
"grad_norm": 0.17920809984207153, |
|
"learning_rate": 1.8353899531224906e-06, |
|
"loss": 0.1267, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.7183324425440941, |
|
"grad_norm": 0.14638297259807587, |
|
"learning_rate": 1.751262775023077e-06, |
|
"loss": 0.111, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.7194013896312133, |
|
"grad_norm": 0.13949543237686157, |
|
"learning_rate": 1.6690921006687899e-06, |
|
"loss": 0.0887, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.7204703367183325, |
|
"grad_norm": 0.19536994397640228, |
|
"learning_rate": 1.5888795663883904e-06, |
|
"loss": 0.1467, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.7215392838054516, |
|
"grad_norm": 0.15846571326255798, |
|
"learning_rate": 1.5106267695166543e-06, |
|
"loss": 0.1128, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7226082308925709, |
|
"grad_norm": 0.13756738603115082, |
|
"learning_rate": 1.4343352683625411e-06, |
|
"loss": 0.0804, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.72367717797969, |
|
"grad_norm": 0.1360546052455902, |
|
"learning_rate": 1.3600065821782526e-06, |
|
"loss": 0.0979, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.7247461250668092, |
|
"grad_norm": 0.15744271874427795, |
|
"learning_rate": 1.2876421911288905e-06, |
|
"loss": 0.1013, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.7258150721539284, |
|
"grad_norm": 0.13687777519226074, |
|
"learning_rate": 1.217243536263013e-06, |
|
"loss": 0.0861, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.7268840192410476, |
|
"grad_norm": 0.1810121387243271, |
|
"learning_rate": 1.148812019483958e-06, |
|
"loss": 0.1093, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.7279529663281668, |
|
"grad_norm": 0.1584986299276352, |
|
"learning_rate": 1.0823490035218987e-06, |
|
"loss": 0.1093, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.729021913415286, |
|
"grad_norm": 0.15906885266304016, |
|
"learning_rate": 1.0178558119067315e-06, |
|
"loss": 0.1173, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.7300908605024051, |
|
"grad_norm": 0.18237455189228058, |
|
"learning_rate": 9.553337289416653e-07, |
|
"loss": 0.1355, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.7311598075895244, |
|
"grad_norm": 0.12682956457138062, |
|
"learning_rate": 8.947839996777285e-07, |
|
"loss": 0.0717, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.7322287546766435, |
|
"grad_norm": 0.160029336810112, |
|
"learning_rate": 8.362078298889021e-07, |
|
"loss": 0.1009, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.7332977017637627, |
|
"grad_norm": 0.15527096390724182, |
|
"learning_rate": 7.796063860481595e-07, |
|
"loss": 0.115, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.7343666488508819, |
|
"grad_norm": 0.15923753380775452, |
|
"learning_rate": 7.249807953041865e-07, |
|
"loss": 0.0966, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.7354355959380011, |
|
"grad_norm": 0.17644928395748138, |
|
"learning_rate": 6.723321454590092e-07, |
|
"loss": 0.1159, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.7365045430251203, |
|
"grad_norm": 0.1382478028535843, |
|
"learning_rate": 6.216614849462343e-07, |
|
"loss": 0.1078, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.7375734901122395, |
|
"grad_norm": 0.17270506918430328, |
|
"learning_rate": 5.729698228102653e-07, |
|
"loss": 0.0972, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7386424371993586, |
|
"grad_norm": 0.14153295755386353, |
|
"learning_rate": 5.262581286861634e-07, |
|
"loss": 0.0839, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.7397113842864779, |
|
"grad_norm": 0.13959170877933502, |
|
"learning_rate": 4.815273327803182e-07, |
|
"loss": 0.0961, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.740780331373597, |
|
"grad_norm": 0.17043890058994293, |
|
"learning_rate": 4.387783258519518e-07, |
|
"loss": 0.1042, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.7418492784607162, |
|
"grad_norm": 0.1593395173549652, |
|
"learning_rate": 3.9801195919541014e-07, |
|
"loss": 0.1103, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.7429182255478354, |
|
"grad_norm": 0.14094416797161102, |
|
"learning_rate": 3.5922904462314387e-07, |
|
"loss": 0.1123, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7439871726349546, |
|
"grad_norm": 0.15978014469146729, |
|
"learning_rate": 3.224303544495766e-07, |
|
"loss": 0.1089, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.7450561197220738, |
|
"grad_norm": 0.14494763314723969, |
|
"learning_rate": 2.876166214757392e-07, |
|
"loss": 0.0909, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.746125066809193, |
|
"grad_norm": 0.14322949945926666, |
|
"learning_rate": 2.547885389746485e-07, |
|
"loss": 0.0928, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.7471940138963121, |
|
"grad_norm": 0.13338381052017212, |
|
"learning_rate": 2.2394676067752918e-07, |
|
"loss": 0.0905, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.7482629609834314, |
|
"grad_norm": 0.1393570601940155, |
|
"learning_rate": 1.9509190076074657e-07, |
|
"loss": 0.1039, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7482629609834314, |
|
"eval_loss": 0.11092506349086761, |
|
"eval_runtime": 289.0283, |
|
"eval_samples_per_second": 4.318, |
|
"eval_steps_per_second": 1.079, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7493319080705505, |
|
"grad_norm": 0.16759201884269714, |
|
"learning_rate": 1.6822453383367186e-07, |
|
"loss": 0.1136, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.7504008551576697, |
|
"grad_norm": 0.149668887257576, |
|
"learning_rate": 1.4334519492711362e-07, |
|
"loss": 0.1026, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.7514698022447889, |
|
"grad_norm": 0.14984771609306335, |
|
"learning_rate": 1.204543794827595e-07, |
|
"loss": 0.0942, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.7525387493319081, |
|
"grad_norm": 0.15921737253665924, |
|
"learning_rate": 9.955254334328423e-08, |
|
"loss": 0.1044, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.7536076964190273, |
|
"grad_norm": 0.14484407007694244, |
|
"learning_rate": 8.064010274324573e-08, |
|
"loss": 0.0849, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7546766435061465, |
|
"grad_norm": 0.1964549571275711, |
|
"learning_rate": 6.37174343008251e-08, |
|
"loss": 0.125, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.7557455905932656, |
|
"grad_norm": 0.1271902620792389, |
|
"learning_rate": 4.878487501033258e-08, |
|
"loss": 0.0783, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.7568145376803849, |
|
"grad_norm": 0.155805766582489, |
|
"learning_rate": 3.584272223546847e-08, |
|
"loss": 0.0972, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.757883484767504, |
|
"grad_norm": 0.15836969017982483, |
|
"learning_rate": 2.4891233703394634e-08, |
|
"loss": 0.095, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.7589524318546232, |
|
"grad_norm": 0.1573915183544159, |
|
"learning_rate": 1.593062749967178e-08, |
|
"loss": 0.1093, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7600213789417424, |
|
"grad_norm": 0.15478433668613434, |
|
"learning_rate": 8.961082063829729e-09, |
|
"loss": 0.0977, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.7610903260288616, |
|
"grad_norm": 0.1593605875968933, |
|
"learning_rate": 3.982736185859093e-09, |
|
"loss": 0.1136, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.7621592731159808, |
|
"grad_norm": 0.1565905064344406, |
|
"learning_rate": 9.956890034468202e-10, |
|
"loss": 0.0978, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.7632282202031, |
|
"grad_norm": 0.1467771828174591, |
|
"learning_rate": 0.0, |
|
"loss": 0.1067, |
|
"step": 714 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 714, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 2, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.6866682847966003e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|