|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1316, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007598784194528875, |
|
"grad_norm": 5.392373980748419, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 1.1593, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001519756838905775, |
|
"grad_norm": 5.615475781504103, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.1588, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0022796352583586625, |
|
"grad_norm": 4.914069653695411, |
|
"learning_rate": 1.5e-06, |
|
"loss": 1.1712, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00303951367781155, |
|
"grad_norm": 4.069148923342371, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.1347, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.003799392097264438, |
|
"grad_norm": 4.589236750514864, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.1706, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.004559270516717325, |
|
"grad_norm": 4.9434297504777005, |
|
"learning_rate": 3e-06, |
|
"loss": 1.1772, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.005319148936170213, |
|
"grad_norm": 3.4416977119500243, |
|
"learning_rate": 3.5e-06, |
|
"loss": 1.0584, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0060790273556231, |
|
"grad_norm": 4.070486113809039, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.1407, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.006838905775075988, |
|
"grad_norm": 2.7096606367740073, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.088, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.007598784194528876, |
|
"grad_norm": 2.889884611574226, |
|
"learning_rate": 5e-06, |
|
"loss": 1.0615, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008358662613981762, |
|
"grad_norm": 3.0319033761268064, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 1.0594, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00911854103343465, |
|
"grad_norm": 3.1212228696636144, |
|
"learning_rate": 6e-06, |
|
"loss": 0.954, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.009878419452887538, |
|
"grad_norm": 3.711991792308233, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 1.142, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.010638297872340425, |
|
"grad_norm": 2.439421741044695, |
|
"learning_rate": 7e-06, |
|
"loss": 0.9602, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.011398176291793313, |
|
"grad_norm": 1.9628371660327635, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.9027, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0121580547112462, |
|
"grad_norm": 3.257417507004413, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.039, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.012917933130699088, |
|
"grad_norm": 2.4146003969998846, |
|
"learning_rate": 8.5e-06, |
|
"loss": 1.0201, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.013677811550151976, |
|
"grad_norm": 3.09862075736547, |
|
"learning_rate": 9e-06, |
|
"loss": 1.057, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.014437689969604863, |
|
"grad_norm": 2.7183441077347377, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.9938, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.015197568389057751, |
|
"grad_norm": 1.9806218870322116, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9274, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015957446808510637, |
|
"grad_norm": 2.213785716942658, |
|
"learning_rate": 1.0500000000000001e-05, |
|
"loss": 0.9291, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.016717325227963525, |
|
"grad_norm": 2.2031307140835095, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.9623, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.017477203647416412, |
|
"grad_norm": 2.143276503422077, |
|
"learning_rate": 1.15e-05, |
|
"loss": 0.9017, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0182370820668693, |
|
"grad_norm": 2.5934685165822136, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.0336, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.018996960486322188, |
|
"grad_norm": 1.7265298518991723, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.826, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.019756838905775075, |
|
"grad_norm": 1.9637629397975633, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 1.0266, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.020516717325227963, |
|
"grad_norm": 1.6548259502003555, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 0.8499, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02127659574468085, |
|
"grad_norm": 1.496950294252413, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.9429, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.022036474164133738, |
|
"grad_norm": 1.9111993504389688, |
|
"learning_rate": 1.45e-05, |
|
"loss": 1.0238, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.022796352583586626, |
|
"grad_norm": 2.118572184185085, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.8923, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.023556231003039513, |
|
"grad_norm": 1.7079943693934316, |
|
"learning_rate": 1.55e-05, |
|
"loss": 0.986, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0243161094224924, |
|
"grad_norm": 1.4156759204322658, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.9132, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02507598784194529, |
|
"grad_norm": 1.8467608530833703, |
|
"learning_rate": 1.65e-05, |
|
"loss": 0.9626, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.025835866261398176, |
|
"grad_norm": 2.292348819555964, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.9339, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.026595744680851064, |
|
"grad_norm": 2.025279283460441, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 0.9731, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02735562310030395, |
|
"grad_norm": 1.7197081377527985, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.8812, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02811550151975684, |
|
"grad_norm": 1.4784991973389618, |
|
"learning_rate": 1.8500000000000002e-05, |
|
"loss": 0.8295, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.028875379939209727, |
|
"grad_norm": 1.7999946297063045, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.942, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.029635258358662615, |
|
"grad_norm": 1.57035411083628, |
|
"learning_rate": 1.95e-05, |
|
"loss": 0.9143, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.030395136778115502, |
|
"grad_norm": 1.4742382818502906, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8261, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03115501519756839, |
|
"grad_norm": 1.685470546563188, |
|
"learning_rate": 1.9999969691239106e-05, |
|
"loss": 0.9266, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.031914893617021274, |
|
"grad_norm": 1.819891227503006, |
|
"learning_rate": 1.999987876514015e-05, |
|
"loss": 0.7966, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03267477203647416, |
|
"grad_norm": 1.6403215187550444, |
|
"learning_rate": 1.9999727222254298e-05, |
|
"loss": 0.8766, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03343465045592705, |
|
"grad_norm": 1.661111203316323, |
|
"learning_rate": 1.999951506350017e-05, |
|
"loss": 0.8838, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03419452887537994, |
|
"grad_norm": 1.8665661460800034, |
|
"learning_rate": 1.999924229016382e-05, |
|
"loss": 0.9154, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.034954407294832825, |
|
"grad_norm": 1.2156637252590854, |
|
"learning_rate": 1.999890890389873e-05, |
|
"loss": 0.8792, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03571428571428571, |
|
"grad_norm": 1.5973421574876991, |
|
"learning_rate": 1.9998514906725805e-05, |
|
"loss": 1.0, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 1.8290400076895916, |
|
"learning_rate": 1.9998060301033363e-05, |
|
"loss": 0.8365, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03723404255319149, |
|
"grad_norm": 1.5044749699898898, |
|
"learning_rate": 1.9997545089577105e-05, |
|
"loss": 0.8659, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.037993920972644375, |
|
"grad_norm": 1.5176053017436755, |
|
"learning_rate": 1.9996969275480116e-05, |
|
"loss": 0.828, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03875379939209726, |
|
"grad_norm": 1.6475722779572208, |
|
"learning_rate": 1.9996332862232843e-05, |
|
"loss": 0.7738, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03951367781155015, |
|
"grad_norm": 1.3692033978549916, |
|
"learning_rate": 1.9995635853693057e-05, |
|
"loss": 0.7984, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04027355623100304, |
|
"grad_norm": 1.5086570356724653, |
|
"learning_rate": 1.999487825408586e-05, |
|
"loss": 0.8643, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.041033434650455926, |
|
"grad_norm": 1.7307732470201098, |
|
"learning_rate": 1.999406006800363e-05, |
|
"loss": 0.9023, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04179331306990881, |
|
"grad_norm": 1.4814422822505877, |
|
"learning_rate": 1.9993181300406006e-05, |
|
"loss": 0.9016, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 1.649565494890097, |
|
"learning_rate": 1.999224195661986e-05, |
|
"loss": 0.9152, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04331306990881459, |
|
"grad_norm": 1.428811652134738, |
|
"learning_rate": 1.9991242042339265e-05, |
|
"loss": 0.8827, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.044072948328267476, |
|
"grad_norm": 1.6088153906886453, |
|
"learning_rate": 1.999018156362545e-05, |
|
"loss": 0.8855, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.044832826747720364, |
|
"grad_norm": 1.21757428725773, |
|
"learning_rate": 1.998906052690677e-05, |
|
"loss": 0.8445, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04559270516717325, |
|
"grad_norm": 1.4622543628469789, |
|
"learning_rate": 1.9987878938978684e-05, |
|
"loss": 0.8945, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04635258358662614, |
|
"grad_norm": 1.3459144100217668, |
|
"learning_rate": 1.9986636807003676e-05, |
|
"loss": 0.7925, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04711246200607903, |
|
"grad_norm": 1.8084721790905864, |
|
"learning_rate": 1.998533413851124e-05, |
|
"loss": 0.9144, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.047872340425531915, |
|
"grad_norm": 1.563168210948487, |
|
"learning_rate": 1.9983970941397837e-05, |
|
"loss": 0.9009, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0486322188449848, |
|
"grad_norm": 1.2514060627145838, |
|
"learning_rate": 1.9982547223926826e-05, |
|
"loss": 0.8268, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04939209726443769, |
|
"grad_norm": 1.5896045682936804, |
|
"learning_rate": 1.998106299472843e-05, |
|
"loss": 0.8506, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05015197568389058, |
|
"grad_norm": 1.4741152211547537, |
|
"learning_rate": 1.997951826279968e-05, |
|
"loss": 0.86, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.050911854103343465, |
|
"grad_norm": 1.70832273064182, |
|
"learning_rate": 1.9977913037504355e-05, |
|
"loss": 0.9067, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05167173252279635, |
|
"grad_norm": 1.3751246290228405, |
|
"learning_rate": 1.997624732857294e-05, |
|
"loss": 0.8035, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05243161094224924, |
|
"grad_norm": 1.34252014451877, |
|
"learning_rate": 1.9974521146102535e-05, |
|
"loss": 0.8675, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05319148936170213, |
|
"grad_norm": 1.4143421357527768, |
|
"learning_rate": 1.9972734500556847e-05, |
|
"loss": 0.8564, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.053951367781155016, |
|
"grad_norm": 1.6403313489416589, |
|
"learning_rate": 1.997088740276607e-05, |
|
"loss": 0.7951, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0547112462006079, |
|
"grad_norm": 1.219845844216479, |
|
"learning_rate": 1.9968979863926857e-05, |
|
"loss": 0.8174, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05547112462006079, |
|
"grad_norm": 1.59139585827731, |
|
"learning_rate": 1.996701189560223e-05, |
|
"loss": 0.8493, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05623100303951368, |
|
"grad_norm": 1.4699277770542065, |
|
"learning_rate": 1.9964983509721527e-05, |
|
"loss": 0.9078, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.056990881458966566, |
|
"grad_norm": 1.5574253270254241, |
|
"learning_rate": 1.9962894718580325e-05, |
|
"loss": 0.7197, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.057750759878419454, |
|
"grad_norm": 1.5509163193218476, |
|
"learning_rate": 1.9960745534840357e-05, |
|
"loss": 0.8237, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05851063829787234, |
|
"grad_norm": 1.4408780150628306, |
|
"learning_rate": 1.9958535971529434e-05, |
|
"loss": 1.0039, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05927051671732523, |
|
"grad_norm": 1.453487595280164, |
|
"learning_rate": 1.9956266042041394e-05, |
|
"loss": 0.8981, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06003039513677812, |
|
"grad_norm": 1.4539780740287995, |
|
"learning_rate": 1.995393576013598e-05, |
|
"loss": 0.849, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.060790273556231005, |
|
"grad_norm": 1.4456479361644199, |
|
"learning_rate": 1.995154513993878e-05, |
|
"loss": 0.818, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06155015197568389, |
|
"grad_norm": 1.5015246396319477, |
|
"learning_rate": 1.9949094195941152e-05, |
|
"loss": 0.8152, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.06231003039513678, |
|
"grad_norm": 2.184036159077668, |
|
"learning_rate": 1.99465829430001e-05, |
|
"loss": 0.7642, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.06306990881458967, |
|
"grad_norm": 1.4495582363572672, |
|
"learning_rate": 1.9944011396338223e-05, |
|
"loss": 0.8664, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06382978723404255, |
|
"grad_norm": 1.5529791956246375, |
|
"learning_rate": 1.9941379571543597e-05, |
|
"loss": 0.815, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.06458966565349544, |
|
"grad_norm": 1.543607432288304, |
|
"learning_rate": 1.9938687484569694e-05, |
|
"loss": 0.7809, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06534954407294832, |
|
"grad_norm": 1.4084163251743675, |
|
"learning_rate": 1.993593515173528e-05, |
|
"loss": 0.8651, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06610942249240122, |
|
"grad_norm": 1.5092848953424813, |
|
"learning_rate": 1.99331225897243e-05, |
|
"loss": 0.8116, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0668693009118541, |
|
"grad_norm": 1.198520037496096, |
|
"learning_rate": 1.993024981558583e-05, |
|
"loss": 0.7682, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.067629179331307, |
|
"grad_norm": 1.338492147841421, |
|
"learning_rate": 1.9927316846733902e-05, |
|
"loss": 0.8698, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06838905775075987, |
|
"grad_norm": 1.21422692756782, |
|
"learning_rate": 1.9924323700947446e-05, |
|
"loss": 0.8362, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06914893617021277, |
|
"grad_norm": 1.4137231652492608, |
|
"learning_rate": 1.9921270396370175e-05, |
|
"loss": 0.9347, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06990881458966565, |
|
"grad_norm": 1.4021530103978004, |
|
"learning_rate": 1.991815695151046e-05, |
|
"loss": 0.8722, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.07066869300911854, |
|
"grad_norm": 1.595914265986467, |
|
"learning_rate": 1.9914983385241235e-05, |
|
"loss": 0.8869, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 1.343571323940674, |
|
"learning_rate": 1.991174971679987e-05, |
|
"loss": 0.7631, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.07218844984802432, |
|
"grad_norm": 1.4708678977572762, |
|
"learning_rate": 1.990845596578807e-05, |
|
"loss": 0.8463, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0729483282674772, |
|
"grad_norm": 1.332039881729776, |
|
"learning_rate": 1.9905102152171728e-05, |
|
"loss": 0.9065, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.0737082066869301, |
|
"grad_norm": 1.2277713334137743, |
|
"learning_rate": 1.990168829628083e-05, |
|
"loss": 0.752, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.07446808510638298, |
|
"grad_norm": 1.6099144553470797, |
|
"learning_rate": 1.989821441880933e-05, |
|
"loss": 0.9118, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07522796352583587, |
|
"grad_norm": 1.5664820017750818, |
|
"learning_rate": 1.989468054081501e-05, |
|
"loss": 0.8291, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07598784194528875, |
|
"grad_norm": 1.4140769704353136, |
|
"learning_rate": 1.9891086683719362e-05, |
|
"loss": 0.9577, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07674772036474165, |
|
"grad_norm": 1.3864767137337772, |
|
"learning_rate": 1.988743286930746e-05, |
|
"loss": 0.8285, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07750759878419453, |
|
"grad_norm": 1.5455729545188828, |
|
"learning_rate": 1.988371911972782e-05, |
|
"loss": 0.8088, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07826747720364742, |
|
"grad_norm": 1.3171862271945194, |
|
"learning_rate": 1.987994545749227e-05, |
|
"loss": 0.7693, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0790273556231003, |
|
"grad_norm": 1.3893324250233694, |
|
"learning_rate": 1.9876111905475816e-05, |
|
"loss": 0.7947, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0797872340425532, |
|
"grad_norm": 1.4024118979965372, |
|
"learning_rate": 1.98722184869165e-05, |
|
"loss": 0.8308, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08054711246200608, |
|
"grad_norm": 2.012851667754775, |
|
"learning_rate": 1.9868265225415263e-05, |
|
"loss": 0.8298, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.08130699088145897, |
|
"grad_norm": 1.3482414109433463, |
|
"learning_rate": 1.9864252144935795e-05, |
|
"loss": 0.7672, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.08206686930091185, |
|
"grad_norm": 1.2317674879997527, |
|
"learning_rate": 1.9860179269804394e-05, |
|
"loss": 0.7817, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.08282674772036475, |
|
"grad_norm": 1.3981203786062606, |
|
"learning_rate": 1.985604662470982e-05, |
|
"loss": 0.6889, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.08358662613981763, |
|
"grad_norm": 1.303454367915867, |
|
"learning_rate": 1.9851854234703146e-05, |
|
"loss": 0.8761, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08434650455927052, |
|
"grad_norm": 1.3103465623740185, |
|
"learning_rate": 1.9847602125197597e-05, |
|
"loss": 0.882, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 1.2873199820635277, |
|
"learning_rate": 1.984329032196841e-05, |
|
"loss": 0.8828, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.0858662613981763, |
|
"grad_norm": 1.3713946170441742, |
|
"learning_rate": 1.9838918851152668e-05, |
|
"loss": 0.811, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.08662613981762918, |
|
"grad_norm": 1.2812852657951437, |
|
"learning_rate": 1.9834487739249146e-05, |
|
"loss": 0.7949, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.08738601823708207, |
|
"grad_norm": 1.5623819005523776, |
|
"learning_rate": 1.982999701311814e-05, |
|
"loss": 0.8683, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08814589665653495, |
|
"grad_norm": 1.6172726857032396, |
|
"learning_rate": 1.982544669998132e-05, |
|
"loss": 0.847, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.08890577507598785, |
|
"grad_norm": 1.3213604777791341, |
|
"learning_rate": 1.982083682742156e-05, |
|
"loss": 0.798, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08966565349544073, |
|
"grad_norm": 1.3084947029055982, |
|
"learning_rate": 1.9816167423382766e-05, |
|
"loss": 0.8813, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.09042553191489362, |
|
"grad_norm": 1.523391465498402, |
|
"learning_rate": 1.9811438516169703e-05, |
|
"loss": 0.9239, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.0911854103343465, |
|
"grad_norm": 1.238259855411423, |
|
"learning_rate": 1.9806650134447837e-05, |
|
"loss": 0.8683, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0919452887537994, |
|
"grad_norm": 1.3192009377288854, |
|
"learning_rate": 1.9801802307243153e-05, |
|
"loss": 0.7316, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.09270516717325228, |
|
"grad_norm": 2.0096495892585815, |
|
"learning_rate": 1.9796895063941978e-05, |
|
"loss": 0.7898, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.09346504559270517, |
|
"grad_norm": 1.3969255987787645, |
|
"learning_rate": 1.97919284342908e-05, |
|
"loss": 0.8277, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.09422492401215805, |
|
"grad_norm": 1.397460771205679, |
|
"learning_rate": 1.9786902448396102e-05, |
|
"loss": 0.9012, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.09498480243161095, |
|
"grad_norm": 1.4257091875606565, |
|
"learning_rate": 1.9781817136724166e-05, |
|
"loss": 0.8008, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09574468085106383, |
|
"grad_norm": 1.244569600044323, |
|
"learning_rate": 1.9776672530100886e-05, |
|
"loss": 0.8717, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.09650455927051672, |
|
"grad_norm": 1.188946081032126, |
|
"learning_rate": 1.9771468659711595e-05, |
|
"loss": 0.7327, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0972644376899696, |
|
"grad_norm": 1.554362846503631, |
|
"learning_rate": 1.976620555710087e-05, |
|
"loss": 0.8527, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.0980243161094225, |
|
"grad_norm": 1.555728719202439, |
|
"learning_rate": 1.9760883254172327e-05, |
|
"loss": 0.7737, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.09878419452887538, |
|
"grad_norm": 1.503153607995042, |
|
"learning_rate": 1.975550178318845e-05, |
|
"loss": 0.8309, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09954407294832827, |
|
"grad_norm": 1.3172064729258408, |
|
"learning_rate": 1.9750061176770385e-05, |
|
"loss": 0.7211, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.10030395136778116, |
|
"grad_norm": 1.3853806174052035, |
|
"learning_rate": 1.9744561467897735e-05, |
|
"loss": 0.7606, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.10106382978723404, |
|
"grad_norm": 1.2585010964278525, |
|
"learning_rate": 1.9739002689908377e-05, |
|
"loss": 0.8745, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.10182370820668693, |
|
"grad_norm": 1.3911881806666273, |
|
"learning_rate": 1.9733384876498248e-05, |
|
"loss": 0.7501, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.10258358662613981, |
|
"grad_norm": 1.251211848009401, |
|
"learning_rate": 1.9727708061721132e-05, |
|
"loss": 0.8233, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1033434650455927, |
|
"grad_norm": 1.2337054183810516, |
|
"learning_rate": 1.972197227998848e-05, |
|
"loss": 0.826, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.10410334346504559, |
|
"grad_norm": 1.5041090979198204, |
|
"learning_rate": 1.9716177566069174e-05, |
|
"loss": 0.8783, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.10486322188449848, |
|
"grad_norm": 1.5101046814770216, |
|
"learning_rate": 1.9710323955089343e-05, |
|
"loss": 0.7804, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.10562310030395136, |
|
"grad_norm": 1.5072660387561263, |
|
"learning_rate": 1.9704411482532116e-05, |
|
"loss": 0.734, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.10638297872340426, |
|
"grad_norm": 1.2921081964890746, |
|
"learning_rate": 1.9698440184237442e-05, |
|
"loss": 0.849, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.10714285714285714, |
|
"grad_norm": 1.541853671980931, |
|
"learning_rate": 1.9692410096401852e-05, |
|
"loss": 0.8224, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.10790273556231003, |
|
"grad_norm": 1.3071587615374576, |
|
"learning_rate": 1.968632125557824e-05, |
|
"loss": 0.7587, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.10866261398176291, |
|
"grad_norm": 1.1838862172669808, |
|
"learning_rate": 1.968017369867565e-05, |
|
"loss": 0.7568, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.1094224924012158, |
|
"grad_norm": 1.4366564156184205, |
|
"learning_rate": 1.9673967462959052e-05, |
|
"loss": 0.7987, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.11018237082066869, |
|
"grad_norm": 1.3671489418655876, |
|
"learning_rate": 1.966770258604911e-05, |
|
"loss": 0.7297, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.11094224924012158, |
|
"grad_norm": 1.4206705119179035, |
|
"learning_rate": 1.9661379105921948e-05, |
|
"loss": 0.8031, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.11170212765957446, |
|
"grad_norm": 1.2580241072105247, |
|
"learning_rate": 1.9654997060908946e-05, |
|
"loss": 0.8033, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.11246200607902736, |
|
"grad_norm": 1.4646612072754077, |
|
"learning_rate": 1.9648556489696472e-05, |
|
"loss": 0.7896, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.11322188449848024, |
|
"grad_norm": 1.6275151468853166, |
|
"learning_rate": 1.9642057431325675e-05, |
|
"loss": 0.8652, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.11398176291793313, |
|
"grad_norm": 1.3685803210010863, |
|
"learning_rate": 1.963549992519223e-05, |
|
"loss": 0.7809, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11474164133738601, |
|
"grad_norm": 1.5354586417497063, |
|
"learning_rate": 1.9628884011046123e-05, |
|
"loss": 0.7323, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.11550151975683891, |
|
"grad_norm": 1.3889822903157472, |
|
"learning_rate": 1.9622209728991382e-05, |
|
"loss": 0.8355, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.11626139817629179, |
|
"grad_norm": 1.2631071084505394, |
|
"learning_rate": 1.9615477119485855e-05, |
|
"loss": 0.7515, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.11702127659574468, |
|
"grad_norm": 1.4221865919993402, |
|
"learning_rate": 1.9608686223340944e-05, |
|
"loss": 0.8088, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.11778115501519756, |
|
"grad_norm": 1.273295713297955, |
|
"learning_rate": 1.9601837081721387e-05, |
|
"loss": 0.766, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.11854103343465046, |
|
"grad_norm": 1.2893316371044268, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.8533, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.11930091185410334, |
|
"grad_norm": 1.3268146271180419, |
|
"learning_rate": 1.958796422848233e-05, |
|
"loss": 0.6719, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.12006079027355623, |
|
"grad_norm": 1.1960561447896023, |
|
"learning_rate": 1.9580940600956636e-05, |
|
"loss": 0.7584, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.12082066869300911, |
|
"grad_norm": 1.4411873524033865, |
|
"learning_rate": 1.9573858896143376e-05, |
|
"loss": 0.8278, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.12158054711246201, |
|
"grad_norm": 1.3416181531983082, |
|
"learning_rate": 1.9566719156970095e-05, |
|
"loss": 0.8561, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12234042553191489, |
|
"grad_norm": 1.2036183203443838, |
|
"learning_rate": 1.955952142671612e-05, |
|
"loss": 0.7858, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.12310030395136778, |
|
"grad_norm": 1.1870046808780073, |
|
"learning_rate": 1.9552265749012306e-05, |
|
"loss": 0.7806, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.12386018237082067, |
|
"grad_norm": 1.222302205896466, |
|
"learning_rate": 1.9544952167840777e-05, |
|
"loss": 0.8627, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.12462006079027356, |
|
"grad_norm": 1.5806660739206777, |
|
"learning_rate": 1.9537580727534643e-05, |
|
"loss": 0.783, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.12537993920972645, |
|
"grad_norm": 1.3781176998478113, |
|
"learning_rate": 1.953015147277776e-05, |
|
"loss": 0.8269, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.12613981762917933, |
|
"grad_norm": 1.4195510675367895, |
|
"learning_rate": 1.9522664448604417e-05, |
|
"loss": 0.8748, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.12689969604863222, |
|
"grad_norm": 1.5174887968385815, |
|
"learning_rate": 1.9515119700399107e-05, |
|
"loss": 0.8833, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.1276595744680851, |
|
"grad_norm": 1.3438248097416066, |
|
"learning_rate": 1.9507517273896224e-05, |
|
"loss": 0.8357, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.128419452887538, |
|
"grad_norm": 1.3500264760925325, |
|
"learning_rate": 1.9499857215179788e-05, |
|
"loss": 0.7527, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.12917933130699089, |
|
"grad_norm": 1.2983392832133898, |
|
"learning_rate": 1.949213957068318e-05, |
|
"loss": 0.8242, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12993920972644377, |
|
"grad_norm": 1.6869508051753024, |
|
"learning_rate": 1.9484364387188848e-05, |
|
"loss": 0.8743, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.13069908814589665, |
|
"grad_norm": 1.4958875336688886, |
|
"learning_rate": 1.9476531711828027e-05, |
|
"loss": 0.8884, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.13145896656534956, |
|
"grad_norm": 1.339278362169919, |
|
"learning_rate": 1.9468641592080452e-05, |
|
"loss": 0.7361, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.13221884498480244, |
|
"grad_norm": 1.2137954227618424, |
|
"learning_rate": 1.9460694075774082e-05, |
|
"loss": 0.8389, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.13297872340425532, |
|
"grad_norm": 1.251947457987828, |
|
"learning_rate": 1.9452689211084775e-05, |
|
"loss": 0.7373, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1337386018237082, |
|
"grad_norm": 1.4159733752838533, |
|
"learning_rate": 1.9444627046536055e-05, |
|
"loss": 0.8107, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1344984802431611, |
|
"grad_norm": 1.308294027207144, |
|
"learning_rate": 1.9436507630998758e-05, |
|
"loss": 0.7178, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.135258358662614, |
|
"grad_norm": 1.502145995616604, |
|
"learning_rate": 1.9428331013690763e-05, |
|
"loss": 0.8856, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.13601823708206687, |
|
"grad_norm": 1.2598852554569113, |
|
"learning_rate": 1.9420097244176708e-05, |
|
"loss": 0.8292, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.13677811550151975, |
|
"grad_norm": 1.6076315002668338, |
|
"learning_rate": 1.9411806372367656e-05, |
|
"loss": 0.8087, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13753799392097266, |
|
"grad_norm": 1.124290885756185, |
|
"learning_rate": 1.940345844852082e-05, |
|
"loss": 0.7432, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.13829787234042554, |
|
"grad_norm": 1.0686912126196735, |
|
"learning_rate": 1.9395053523239243e-05, |
|
"loss": 0.7699, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.13905775075987842, |
|
"grad_norm": 1.1573699691152508, |
|
"learning_rate": 1.9386591647471508e-05, |
|
"loss": 0.7677, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1398176291793313, |
|
"grad_norm": 1.3648977894265473, |
|
"learning_rate": 1.9378072872511397e-05, |
|
"loss": 0.8221, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1405775075987842, |
|
"grad_norm": 1.1364664694163902, |
|
"learning_rate": 1.936949724999762e-05, |
|
"loss": 0.8384, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1413373860182371, |
|
"grad_norm": 1.3416024694427788, |
|
"learning_rate": 1.936086483191347e-05, |
|
"loss": 0.7966, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.14209726443768997, |
|
"grad_norm": 1.4957047699486865, |
|
"learning_rate": 1.9352175670586534e-05, |
|
"loss": 0.7893, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 1.244185236914744, |
|
"learning_rate": 1.934342981868835e-05, |
|
"loss": 0.8186, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.14361702127659576, |
|
"grad_norm": 1.0898922714772983, |
|
"learning_rate": 1.93346273292341e-05, |
|
"loss": 0.7498, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.14437689969604864, |
|
"grad_norm": 1.5550980224719488, |
|
"learning_rate": 1.93257682555823e-05, |
|
"loss": 0.8653, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.14513677811550152, |
|
"grad_norm": 1.0239577576099583, |
|
"learning_rate": 1.9316852651434463e-05, |
|
"loss": 0.7954, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.1458966565349544, |
|
"grad_norm": 1.249219097874476, |
|
"learning_rate": 1.9307880570834762e-05, |
|
"loss": 0.8113, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1466565349544073, |
|
"grad_norm": 1.2536737063563324, |
|
"learning_rate": 1.929885206816973e-05, |
|
"loss": 0.8924, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.1474164133738602, |
|
"grad_norm": 1.248144446624885, |
|
"learning_rate": 1.9289767198167918e-05, |
|
"loss": 0.8558, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.14817629179331307, |
|
"grad_norm": 1.3218071038847665, |
|
"learning_rate": 1.9280626015899548e-05, |
|
"loss": 0.6789, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.14893617021276595, |
|
"grad_norm": 1.3457663209794053, |
|
"learning_rate": 1.9271428576776206e-05, |
|
"loss": 0.909, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.14969604863221886, |
|
"grad_norm": 1.2355616152753617, |
|
"learning_rate": 1.9262174936550485e-05, |
|
"loss": 0.8809, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.15045592705167174, |
|
"grad_norm": 1.2568018009001072, |
|
"learning_rate": 1.9252865151315667e-05, |
|
"loss": 0.8291, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.15121580547112462, |
|
"grad_norm": 1.3065339012403299, |
|
"learning_rate": 1.9243499277505355e-05, |
|
"loss": 0.7787, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1519756838905775, |
|
"grad_norm": 1.1262100235788826, |
|
"learning_rate": 1.9234077371893156e-05, |
|
"loss": 0.8359, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15273556231003038, |
|
"grad_norm": 1.1213547156508892, |
|
"learning_rate": 1.922459949159233e-05, |
|
"loss": 0.7428, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.1534954407294833, |
|
"grad_norm": 1.1864132070158617, |
|
"learning_rate": 1.921506569405544e-05, |
|
"loss": 0.8355, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.15425531914893617, |
|
"grad_norm": 1.292709508551478, |
|
"learning_rate": 1.9205476037073997e-05, |
|
"loss": 0.8431, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.15501519756838905, |
|
"grad_norm": 1.1868138734025648, |
|
"learning_rate": 1.9195830578778133e-05, |
|
"loss": 0.745, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.15577507598784193, |
|
"grad_norm": 1.243939822562013, |
|
"learning_rate": 1.918612937763622e-05, |
|
"loss": 0.8194, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.15653495440729484, |
|
"grad_norm": 1.149116048090664, |
|
"learning_rate": 1.917637249245454e-05, |
|
"loss": 0.8315, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.15729483282674772, |
|
"grad_norm": 1.2601717567550066, |
|
"learning_rate": 1.9166559982376905e-05, |
|
"loss": 0.8913, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.1580547112462006, |
|
"grad_norm": 1.328584177588409, |
|
"learning_rate": 1.9156691906884327e-05, |
|
"loss": 0.8223, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.15881458966565348, |
|
"grad_norm": 1.5547321827587175, |
|
"learning_rate": 1.914676832579463e-05, |
|
"loss": 0.791, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.1595744680851064, |
|
"grad_norm": 1.2058407361433814, |
|
"learning_rate": 1.913678929926211e-05, |
|
"loss": 0.779, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16033434650455927, |
|
"grad_norm": 1.3396307771537412, |
|
"learning_rate": 1.912675488777714e-05, |
|
"loss": 0.7232, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.16109422492401215, |
|
"grad_norm": 1.2998190586100276, |
|
"learning_rate": 1.911666515216585e-05, |
|
"loss": 0.7749, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.16185410334346503, |
|
"grad_norm": 1.1849324726680373, |
|
"learning_rate": 1.9106520153589708e-05, |
|
"loss": 0.8955, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.16261398176291794, |
|
"grad_norm": 1.3790790669413162, |
|
"learning_rate": 1.9096319953545186e-05, |
|
"loss": 0.9048, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.16337386018237082, |
|
"grad_norm": 1.1937403185789235, |
|
"learning_rate": 1.9086064613863366e-05, |
|
"loss": 0.7869, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1641337386018237, |
|
"grad_norm": 1.3427073302617243, |
|
"learning_rate": 1.9075754196709574e-05, |
|
"loss": 0.8516, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.16489361702127658, |
|
"grad_norm": 1.3609249588495251, |
|
"learning_rate": 1.9065388764583003e-05, |
|
"loss": 0.8163, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.1656534954407295, |
|
"grad_norm": 1.082447919451176, |
|
"learning_rate": 1.9054968380316342e-05, |
|
"loss": 0.7296, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.16641337386018237, |
|
"grad_norm": 1.1720051941365763, |
|
"learning_rate": 1.9044493107075367e-05, |
|
"loss": 0.7865, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.16717325227963525, |
|
"grad_norm": 1.1536113908201957, |
|
"learning_rate": 1.90339630083586e-05, |
|
"loss": 0.8147, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.16793313069908813, |
|
"grad_norm": 1.30860579752053, |
|
"learning_rate": 1.902337814799688e-05, |
|
"loss": 0.9001, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.16869300911854104, |
|
"grad_norm": 1.2957687830149693, |
|
"learning_rate": 1.901273859015301e-05, |
|
"loss": 0.9007, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.16945288753799392, |
|
"grad_norm": 2.6842584865115526, |
|
"learning_rate": 1.900204439932136e-05, |
|
"loss": 0.8743, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 1.4396478669672228, |
|
"learning_rate": 1.899129564032745e-05, |
|
"loss": 0.8249, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.17097264437689969, |
|
"grad_norm": 1.4422431849699353, |
|
"learning_rate": 1.898049237832761e-05, |
|
"loss": 0.7921, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1717325227963526, |
|
"grad_norm": 1.2168879309113474, |
|
"learning_rate": 1.8969634678808523e-05, |
|
"loss": 0.7845, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.17249240121580547, |
|
"grad_norm": 1.2456034586319875, |
|
"learning_rate": 1.8958722607586883e-05, |
|
"loss": 0.7799, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.17325227963525835, |
|
"grad_norm": 1.1600154305212664, |
|
"learning_rate": 1.8947756230808955e-05, |
|
"loss": 0.8335, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.17401215805471124, |
|
"grad_norm": 1.494016771059299, |
|
"learning_rate": 1.8936735614950196e-05, |
|
"loss": 0.8819, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.17477203647416414, |
|
"grad_norm": 1.2756174524563888, |
|
"learning_rate": 1.8925660826814855e-05, |
|
"loss": 0.8888, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.17553191489361702, |
|
"grad_norm": 1.1619746990383337, |
|
"learning_rate": 1.8914531933535548e-05, |
|
"loss": 0.8657, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.1762917933130699, |
|
"grad_norm": 1.1424858446652142, |
|
"learning_rate": 1.8903349002572873e-05, |
|
"loss": 0.8787, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.1770516717325228, |
|
"grad_norm": 1.189939887590901, |
|
"learning_rate": 1.889211210171498e-05, |
|
"loss": 0.8428, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.1778115501519757, |
|
"grad_norm": 1.1936796343318596, |
|
"learning_rate": 1.8880821299077184e-05, |
|
"loss": 0.8588, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.17857142857142858, |
|
"grad_norm": 1.4065647340327965, |
|
"learning_rate": 1.8869476663101525e-05, |
|
"loss": 0.8189, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.17933130699088146, |
|
"grad_norm": 1.0408203774913998, |
|
"learning_rate": 1.885807826255638e-05, |
|
"loss": 0.8276, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.18009118541033434, |
|
"grad_norm": 1.081409053735091, |
|
"learning_rate": 1.8846626166536027e-05, |
|
"loss": 0.8443, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.18085106382978725, |
|
"grad_norm": 1.1432310944964015, |
|
"learning_rate": 1.883512044446023e-05, |
|
"loss": 0.7497, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.18161094224924013, |
|
"grad_norm": 1.3470961995619208, |
|
"learning_rate": 1.882356116607383e-05, |
|
"loss": 0.8103, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.182370820668693, |
|
"grad_norm": 1.411957400701631, |
|
"learning_rate": 1.8811948401446312e-05, |
|
"loss": 0.8736, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1831306990881459, |
|
"grad_norm": 1.2352325761124088, |
|
"learning_rate": 1.8800282220971368e-05, |
|
"loss": 0.8187, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.1838905775075988, |
|
"grad_norm": 1.0889469436250492, |
|
"learning_rate": 1.8788562695366495e-05, |
|
"loss": 0.8589, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.18465045592705168, |
|
"grad_norm": 1.3133401090492782, |
|
"learning_rate": 1.8776789895672557e-05, |
|
"loss": 0.8244, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.18541033434650456, |
|
"grad_norm": 1.5074821242863536, |
|
"learning_rate": 1.8764963893253346e-05, |
|
"loss": 0.8579, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.18617021276595744, |
|
"grad_norm": 1.00865864329584, |
|
"learning_rate": 1.875308475979516e-05, |
|
"loss": 0.7791, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.18693009118541035, |
|
"grad_norm": 1.253557021475991, |
|
"learning_rate": 1.8741152567306356e-05, |
|
"loss": 0.7365, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.18768996960486323, |
|
"grad_norm": 1.3369745721969943, |
|
"learning_rate": 1.8729167388116934e-05, |
|
"loss": 0.8689, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.1884498480243161, |
|
"grad_norm": 1.3071305924038121, |
|
"learning_rate": 1.8717129294878075e-05, |
|
"loss": 0.8417, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.189209726443769, |
|
"grad_norm": 1.1501306153544906, |
|
"learning_rate": 1.8705038360561724e-05, |
|
"loss": 0.8011, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.1899696048632219, |
|
"grad_norm": 1.2045201301924366, |
|
"learning_rate": 1.869289465846012e-05, |
|
"loss": 0.9313, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19072948328267478, |
|
"grad_norm": 1.2535595856961743, |
|
"learning_rate": 1.868069826218538e-05, |
|
"loss": 0.8452, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.19148936170212766, |
|
"grad_norm": 1.3220176709231053, |
|
"learning_rate": 1.866844924566904e-05, |
|
"loss": 0.8362, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.19224924012158054, |
|
"grad_norm": 1.1019657196787032, |
|
"learning_rate": 1.8656147683161594e-05, |
|
"loss": 0.8708, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.19300911854103345, |
|
"grad_norm": 1.424630197873639, |
|
"learning_rate": 1.8643793649232072e-05, |
|
"loss": 0.806, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.19376899696048633, |
|
"grad_norm": 1.378063597833453, |
|
"learning_rate": 1.8631387218767564e-05, |
|
"loss": 0.7881, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.1945288753799392, |
|
"grad_norm": 1.282933125284444, |
|
"learning_rate": 1.8618928466972773e-05, |
|
"loss": 0.8304, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.1952887537993921, |
|
"grad_norm": 2.4772765829403123, |
|
"learning_rate": 1.860641746936957e-05, |
|
"loss": 0.8523, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.196048632218845, |
|
"grad_norm": 1.418144991348534, |
|
"learning_rate": 1.859385430179652e-05, |
|
"loss": 0.8348, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.19680851063829788, |
|
"grad_norm": 1.213603855886597, |
|
"learning_rate": 1.8581239040408433e-05, |
|
"loss": 0.758, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.19756838905775076, |
|
"grad_norm": 1.153413472883319, |
|
"learning_rate": 1.8568571761675893e-05, |
|
"loss": 0.7345, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.19832826747720364, |
|
"grad_norm": 1.344343857832059, |
|
"learning_rate": 1.855585254238481e-05, |
|
"loss": 0.685, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.19908814589665655, |
|
"grad_norm": 1.4308863366658235, |
|
"learning_rate": 1.8543081459635937e-05, |
|
"loss": 0.735, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.19984802431610943, |
|
"grad_norm": 1.2987140195370532, |
|
"learning_rate": 1.853025859084441e-05, |
|
"loss": 0.7916, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.2006079027355623, |
|
"grad_norm": 2.4292975031700292, |
|
"learning_rate": 1.8517384013739287e-05, |
|
"loss": 0.7567, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.2013677811550152, |
|
"grad_norm": 1.346335767671634, |
|
"learning_rate": 1.8504457806363058e-05, |
|
"loss": 0.7877, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.20212765957446807, |
|
"grad_norm": 1.3311699322400439, |
|
"learning_rate": 1.8491480047071192e-05, |
|
"loss": 0.8149, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.20288753799392098, |
|
"grad_norm": 1.5753723269120146, |
|
"learning_rate": 1.847845081453165e-05, |
|
"loss": 0.8317, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.20364741641337386, |
|
"grad_norm": 1.3240965527913418, |
|
"learning_rate": 1.846537018772441e-05, |
|
"loss": 0.8982, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.20440729483282674, |
|
"grad_norm": 1.5986433343548423, |
|
"learning_rate": 1.845223824594099e-05, |
|
"loss": 0.9481, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.20516717325227962, |
|
"grad_norm": 1.289128583925758, |
|
"learning_rate": 1.8439055068783966e-05, |
|
"loss": 0.7662, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.20592705167173253, |
|
"grad_norm": 1.3158592270793201, |
|
"learning_rate": 1.8425820736166492e-05, |
|
"loss": 0.8643, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.2066869300911854, |
|
"grad_norm": 1.1872514587697873, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.8401, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.2074468085106383, |
|
"grad_norm": 1.8169105863005799, |
|
"learning_rate": 1.839919892575278e-05, |
|
"loss": 0.817, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.20820668693009117, |
|
"grad_norm": 1.2769137841393565, |
|
"learning_rate": 1.8385811609331355e-05, |
|
"loss": 0.8177, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.20896656534954408, |
|
"grad_norm": 1.3546661685046748, |
|
"learning_rate": 1.837237346019814e-05, |
|
"loss": 0.7926, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.20972644376899696, |
|
"grad_norm": 1.4388025793800308, |
|
"learning_rate": 1.8358884559811855e-05, |
|
"loss": 0.7365, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.21048632218844984, |
|
"grad_norm": 1.7306062464308192, |
|
"learning_rate": 1.834534498993888e-05, |
|
"loss": 0.8317, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.21124620060790272, |
|
"grad_norm": 1.442442585820585, |
|
"learning_rate": 1.833175483265273e-05, |
|
"loss": 0.8254, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.21200607902735563, |
|
"grad_norm": 1.4238653530865713, |
|
"learning_rate": 1.831811417033357e-05, |
|
"loss": 0.8327, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2127659574468085, |
|
"grad_norm": 1.1764715502052638, |
|
"learning_rate": 1.8304423085667713e-05, |
|
"loss": 0.753, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2135258358662614, |
|
"grad_norm": 1.1855529270479457, |
|
"learning_rate": 1.8290681661647124e-05, |
|
"loss": 0.7281, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 1.4055428341031602, |
|
"learning_rate": 1.827688998156891e-05, |
|
"loss": 0.8775, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.21504559270516718, |
|
"grad_norm": 1.1278610066021444, |
|
"learning_rate": 1.826304812903481e-05, |
|
"loss": 0.7605, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.21580547112462006, |
|
"grad_norm": 1.230292541864071, |
|
"learning_rate": 1.8249156187950717e-05, |
|
"loss": 0.8114, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.21656534954407294, |
|
"grad_norm": 1.0624171978120978, |
|
"learning_rate": 1.8235214242526125e-05, |
|
"loss": 0.7305, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.21732522796352582, |
|
"grad_norm": 1.248817652679081, |
|
"learning_rate": 1.8221222377273656e-05, |
|
"loss": 0.703, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.21808510638297873, |
|
"grad_norm": 1.339926916421626, |
|
"learning_rate": 1.8207180677008528e-05, |
|
"loss": 0.8791, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.2188449848024316, |
|
"grad_norm": 1.3120865498034557, |
|
"learning_rate": 1.819308922684805e-05, |
|
"loss": 0.8117, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.2196048632218845, |
|
"grad_norm": 1.586444358599728, |
|
"learning_rate": 1.8178948112211104e-05, |
|
"loss": 0.7686, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.22036474164133737, |
|
"grad_norm": 1.1782894592032886, |
|
"learning_rate": 1.816475741881761e-05, |
|
"loss": 0.7765, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.22112462006079028, |
|
"grad_norm": 1.024174827969512, |
|
"learning_rate": 1.815051723268805e-05, |
|
"loss": 0.7618, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.22188449848024316, |
|
"grad_norm": 1.2118229336816007, |
|
"learning_rate": 1.8136227640142895e-05, |
|
"loss": 0.7632, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.22264437689969604, |
|
"grad_norm": 2.831901255089281, |
|
"learning_rate": 1.8121888727802113e-05, |
|
"loss": 0.8469, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.22340425531914893, |
|
"grad_norm": 1.0974630686236104, |
|
"learning_rate": 1.8107500582584642e-05, |
|
"loss": 0.7237, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.22416413373860183, |
|
"grad_norm": 1.1089723309653179, |
|
"learning_rate": 1.8093063291707847e-05, |
|
"loss": 0.8126, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.22492401215805471, |
|
"grad_norm": 1.239233754298523, |
|
"learning_rate": 1.807857694268701e-05, |
|
"loss": 0.6578, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.2256838905775076, |
|
"grad_norm": 1.218648492045814, |
|
"learning_rate": 1.806404162333479e-05, |
|
"loss": 0.7293, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.22644376899696048, |
|
"grad_norm": 1.4388090752273834, |
|
"learning_rate": 1.804945742176069e-05, |
|
"loss": 0.8582, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.22720364741641338, |
|
"grad_norm": 1.1338612730074744, |
|
"learning_rate": 1.8034824426370522e-05, |
|
"loss": 0.6526, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.22796352583586627, |
|
"grad_norm": 1.1095651278513057, |
|
"learning_rate": 1.802014272586589e-05, |
|
"loss": 0.7955, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22872340425531915, |
|
"grad_norm": 1.0179556574023763, |
|
"learning_rate": 1.8005412409243604e-05, |
|
"loss": 0.8085, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.22948328267477203, |
|
"grad_norm": 1.1342699502336406, |
|
"learning_rate": 1.799063356579521e-05, |
|
"loss": 0.8013, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.23024316109422494, |
|
"grad_norm": 1.3480287218090232, |
|
"learning_rate": 1.797580628510639e-05, |
|
"loss": 0.7289, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.23100303951367782, |
|
"grad_norm": 1.3481284749528273, |
|
"learning_rate": 1.796093065705644e-05, |
|
"loss": 0.789, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.2317629179331307, |
|
"grad_norm": 1.134281183785409, |
|
"learning_rate": 1.7946006771817733e-05, |
|
"loss": 0.7622, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.23252279635258358, |
|
"grad_norm": 1.3034552078744333, |
|
"learning_rate": 1.7931034719855166e-05, |
|
"loss": 0.7145, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.23328267477203649, |
|
"grad_norm": 4.164441591688541, |
|
"learning_rate": 1.7916014591925605e-05, |
|
"loss": 0.7498, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.23404255319148937, |
|
"grad_norm": 1.274502113758588, |
|
"learning_rate": 1.7900946479077345e-05, |
|
"loss": 0.813, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.23480243161094225, |
|
"grad_norm": 1.2561873096870315, |
|
"learning_rate": 1.788583047264955e-05, |
|
"loss": 0.7454, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.23556231003039513, |
|
"grad_norm": 1.3636582841755223, |
|
"learning_rate": 1.7870666664271706e-05, |
|
"loss": 0.8052, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.23632218844984804, |
|
"grad_norm": 1.266779632537306, |
|
"learning_rate": 1.7855455145863064e-05, |
|
"loss": 0.8781, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.23708206686930092, |
|
"grad_norm": 1.2674372090536117, |
|
"learning_rate": 1.784019600963207e-05, |
|
"loss": 0.671, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.2378419452887538, |
|
"grad_norm": 1.2644363201775775, |
|
"learning_rate": 1.782488934807584e-05, |
|
"loss": 0.708, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.23860182370820668, |
|
"grad_norm": 1.3595379806788828, |
|
"learning_rate": 1.7809535253979548e-05, |
|
"loss": 0.7788, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.2393617021276596, |
|
"grad_norm": 1.235124119841333, |
|
"learning_rate": 1.7794133820415916e-05, |
|
"loss": 0.7672, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.24012158054711247, |
|
"grad_norm": 1.2547608529516823, |
|
"learning_rate": 1.777868514074462e-05, |
|
"loss": 0.6577, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.24088145896656535, |
|
"grad_norm": 1.2464188679916115, |
|
"learning_rate": 1.776318930861172e-05, |
|
"loss": 0.8099, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.24164133738601823, |
|
"grad_norm": 1.253977613923761, |
|
"learning_rate": 1.7747646417949114e-05, |
|
"loss": 0.8013, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.24240121580547114, |
|
"grad_norm": 1.2580058097893763, |
|
"learning_rate": 1.7732056562973956e-05, |
|
"loss": 0.7319, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.24316109422492402, |
|
"grad_norm": 1.312382310386678, |
|
"learning_rate": 1.771641983818808e-05, |
|
"loss": 0.7896, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2439209726443769, |
|
"grad_norm": 1.2391223661498425, |
|
"learning_rate": 1.7700736338377435e-05, |
|
"loss": 0.7844, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.24468085106382978, |
|
"grad_norm": 1.2098548099697453, |
|
"learning_rate": 1.7685006158611514e-05, |
|
"loss": 0.8045, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.2454407294832827, |
|
"grad_norm": 1.3471453672810556, |
|
"learning_rate": 1.7669229394242767e-05, |
|
"loss": 0.7868, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.24620060790273557, |
|
"grad_norm": 1.2136618191577149, |
|
"learning_rate": 1.7653406140906027e-05, |
|
"loss": 0.7945, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.24696048632218845, |
|
"grad_norm": 1.1649325831768262, |
|
"learning_rate": 1.763753649451794e-05, |
|
"loss": 0.776, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.24772036474164133, |
|
"grad_norm": 1.195030183514188, |
|
"learning_rate": 1.7621620551276366e-05, |
|
"loss": 0.7754, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.24848024316109424, |
|
"grad_norm": 1.159257066391459, |
|
"learning_rate": 1.760565840765981e-05, |
|
"loss": 0.7639, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.24924012158054712, |
|
"grad_norm": 1.231598878991431, |
|
"learning_rate": 1.758965016042683e-05, |
|
"loss": 0.7727, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1844601831849817, |
|
"learning_rate": 1.757359590661545e-05, |
|
"loss": 0.7067, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.2507598784194529, |
|
"grad_norm": 1.5924474170665193, |
|
"learning_rate": 1.7557495743542586e-05, |
|
"loss": 0.8584, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.25151975683890576, |
|
"grad_norm": 1.1493715177062336, |
|
"learning_rate": 1.754134976880343e-05, |
|
"loss": 0.7477, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.25227963525835867, |
|
"grad_norm": 1.2275942317850876, |
|
"learning_rate": 1.752515808027088e-05, |
|
"loss": 0.7986, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.2530395136778115, |
|
"grad_norm": 1.2516077599858326, |
|
"learning_rate": 1.7508920776094943e-05, |
|
"loss": 0.7934, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.25379939209726443, |
|
"grad_norm": 1.1819855497843523, |
|
"learning_rate": 1.749263795470213e-05, |
|
"loss": 0.744, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.25455927051671734, |
|
"grad_norm": 1.2143224653512694, |
|
"learning_rate": 1.7476309714794874e-05, |
|
"loss": 0.7713, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 1.2321509393176535, |
|
"learning_rate": 1.7459936155350908e-05, |
|
"loss": 0.8308, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.2560790273556231, |
|
"grad_norm": 1.2988231987462813, |
|
"learning_rate": 1.7443517375622706e-05, |
|
"loss": 0.7681, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.256838905775076, |
|
"grad_norm": 1.1910359567755102, |
|
"learning_rate": 1.742705347513683e-05, |
|
"loss": 0.7908, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.25759878419452886, |
|
"grad_norm": 1.2710111666197443, |
|
"learning_rate": 1.7410544553693368e-05, |
|
"loss": 0.8822, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.25835866261398177, |
|
"grad_norm": 1.1028923587629835, |
|
"learning_rate": 1.7393990711365312e-05, |
|
"loss": 0.8332, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2591185410334346, |
|
"grad_norm": 1.0426416469688176, |
|
"learning_rate": 1.7377392048497954e-05, |
|
"loss": 0.7304, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.25987841945288753, |
|
"grad_norm": 1.2949586899018366, |
|
"learning_rate": 1.7360748665708268e-05, |
|
"loss": 0.7674, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.26063829787234044, |
|
"grad_norm": 1.1027637940420714, |
|
"learning_rate": 1.7344060663884325e-05, |
|
"loss": 0.7404, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.2613981762917933, |
|
"grad_norm": 1.3927957909651685, |
|
"learning_rate": 1.7327328144184648e-05, |
|
"loss": 0.7895, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.2621580547112462, |
|
"grad_norm": 1.1621493894342037, |
|
"learning_rate": 1.7310551208037627e-05, |
|
"loss": 0.8042, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2629179331306991, |
|
"grad_norm": 1.2081606306645722, |
|
"learning_rate": 1.729372995714089e-05, |
|
"loss": 0.6491, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.26367781155015196, |
|
"grad_norm": 1.219598890320066, |
|
"learning_rate": 1.7276864493460702e-05, |
|
"loss": 0.8626, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.26443768996960487, |
|
"grad_norm": 1.2033463802362787, |
|
"learning_rate": 1.725995491923131e-05, |
|
"loss": 0.7272, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2651975683890577, |
|
"grad_norm": 1.346822838996504, |
|
"learning_rate": 1.724300133695437e-05, |
|
"loss": 0.7944, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.26595744680851063, |
|
"grad_norm": 1.4462444891707886, |
|
"learning_rate": 1.7226003849398292e-05, |
|
"loss": 0.8156, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.26671732522796354, |
|
"grad_norm": 1.2005302941250175, |
|
"learning_rate": 1.720896255959764e-05, |
|
"loss": 0.8301, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.2674772036474164, |
|
"grad_norm": 1.0894697360010115, |
|
"learning_rate": 1.7191877570852482e-05, |
|
"loss": 0.7737, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2682370820668693, |
|
"grad_norm": 1.0806869356296902, |
|
"learning_rate": 1.717474898672779e-05, |
|
"loss": 0.72, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.2689969604863222, |
|
"grad_norm": 1.3169214061141539, |
|
"learning_rate": 1.7157576911052796e-05, |
|
"loss": 0.8269, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.26975683890577506, |
|
"grad_norm": 1.2420921150436637, |
|
"learning_rate": 1.7140361447920363e-05, |
|
"loss": 0.805, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.270516717325228, |
|
"grad_norm": 1.0641952583078091, |
|
"learning_rate": 1.712310270168637e-05, |
|
"loss": 0.6925, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.2712765957446808, |
|
"grad_norm": 1.08072912866875, |
|
"learning_rate": 1.7105800776969053e-05, |
|
"loss": 0.7469, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.27203647416413373, |
|
"grad_norm": 1.077651076838975, |
|
"learning_rate": 1.7088455778648397e-05, |
|
"loss": 0.8513, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.27279635258358664, |
|
"grad_norm": 1.115403034371285, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.7293, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.2735562310030395, |
|
"grad_norm": 1.0448107198849983, |
|
"learning_rate": 1.7053636982021844e-05, |
|
"loss": 0.7339, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2743161094224924, |
|
"grad_norm": 1.1040024265383643, |
|
"learning_rate": 1.7036163394778865e-05, |
|
"loss": 0.7801, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.2750759878419453, |
|
"grad_norm": 1.1830956591962893, |
|
"learning_rate": 1.7018647156057095e-05, |
|
"loss": 0.7069, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.27583586626139817, |
|
"grad_norm": 1.221649805199723, |
|
"learning_rate": 1.7001088372035637e-05, |
|
"loss": 0.7232, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.2765957446808511, |
|
"grad_norm": 1.2331709557416881, |
|
"learning_rate": 1.6983487149151486e-05, |
|
"loss": 0.7239, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.2773556231003039, |
|
"grad_norm": 1.2190524746498974, |
|
"learning_rate": 1.696584359409889e-05, |
|
"loss": 0.6422, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.27811550151975684, |
|
"grad_norm": 1.3212175401909012, |
|
"learning_rate": 1.6948157813828718e-05, |
|
"loss": 0.7876, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.27887537993920974, |
|
"grad_norm": 1.340964092097885, |
|
"learning_rate": 1.693042991554777e-05, |
|
"loss": 0.8319, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.2796352583586626, |
|
"grad_norm": 1.4400012740896617, |
|
"learning_rate": 1.6912660006718186e-05, |
|
"loss": 0.8326, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.2803951367781155, |
|
"grad_norm": 1.3071365110935433, |
|
"learning_rate": 1.6894848195056747e-05, |
|
"loss": 0.802, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.2811550151975684, |
|
"grad_norm": 1.143992366688308, |
|
"learning_rate": 1.6876994588534234e-05, |
|
"loss": 0.8713, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.28191489361702127, |
|
"grad_norm": 1.421416338312035, |
|
"learning_rate": 1.685909929537479e-05, |
|
"loss": 0.6818, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.2826747720364742, |
|
"grad_norm": 1.1835459613627775, |
|
"learning_rate": 1.684116242405525e-05, |
|
"loss": 0.7498, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.28343465045592703, |
|
"grad_norm": 1.322714460411963, |
|
"learning_rate": 1.6823184083304482e-05, |
|
"loss": 0.7272, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.28419452887537994, |
|
"grad_norm": 1.21680776076326, |
|
"learning_rate": 1.680516438210273e-05, |
|
"loss": 0.8131, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.28495440729483285, |
|
"grad_norm": 1.0478828789518506, |
|
"learning_rate": 1.6787103429680955e-05, |
|
"loss": 0.7681, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 1.1967472445752216, |
|
"learning_rate": 1.676900133552018e-05, |
|
"loss": 0.7526, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.2864741641337386, |
|
"grad_norm": 1.0345058357987296, |
|
"learning_rate": 1.6750858209350808e-05, |
|
"loss": 0.8153, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.2872340425531915, |
|
"grad_norm": 1.208809639529449, |
|
"learning_rate": 1.673267416115198e-05, |
|
"loss": 0.8176, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.28799392097264437, |
|
"grad_norm": 1.1712691389380345, |
|
"learning_rate": 1.6714449301150883e-05, |
|
"loss": 0.7528, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.2887537993920973, |
|
"grad_norm": 1.1839785201033064, |
|
"learning_rate": 1.6696183739822108e-05, |
|
"loss": 0.8387, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.28951367781155013, |
|
"grad_norm": 1.2516534759009132, |
|
"learning_rate": 1.6677877587886956e-05, |
|
"loss": 0.7668, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.29027355623100304, |
|
"grad_norm": 1.410017906394451, |
|
"learning_rate": 1.665953095631279e-05, |
|
"loss": 0.7434, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.29103343465045595, |
|
"grad_norm": 1.2800424937579746, |
|
"learning_rate": 1.6641143956312337e-05, |
|
"loss": 0.8265, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.2917933130699088, |
|
"grad_norm": 1.5058975032467392, |
|
"learning_rate": 1.6622716699343032e-05, |
|
"loss": 0.8508, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.2925531914893617, |
|
"grad_norm": 1.303773721114821, |
|
"learning_rate": 1.660424929710635e-05, |
|
"loss": 0.7675, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.2933130699088146, |
|
"grad_norm": 1.4036528322023598, |
|
"learning_rate": 1.6585741861547102e-05, |
|
"loss": 0.7774, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.29407294832826747, |
|
"grad_norm": 1.081350837248616, |
|
"learning_rate": 1.6567194504852778e-05, |
|
"loss": 0.7436, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.2948328267477204, |
|
"grad_norm": 1.3699282034155558, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.7488, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.29559270516717323, |
|
"grad_norm": 1.3175813961312064, |
|
"learning_rate": 1.652998047801812e-05, |
|
"loss": 0.8234, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.29635258358662614, |
|
"grad_norm": 1.4991870376005667, |
|
"learning_rate": 1.6511314033459994e-05, |
|
"loss": 0.8478, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.29711246200607905, |
|
"grad_norm": 1.274575767033035, |
|
"learning_rate": 1.649260811892984e-05, |
|
"loss": 0.9165, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.2978723404255319, |
|
"grad_norm": 1.095438448087222, |
|
"learning_rate": 1.647386284781828e-05, |
|
"loss": 0.7289, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.2986322188449848, |
|
"grad_norm": 1.2565901625292208, |
|
"learning_rate": 1.645507833375449e-05, |
|
"loss": 0.7155, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.2993920972644377, |
|
"grad_norm": 1.1346280159229207, |
|
"learning_rate": 1.643625469060555e-05, |
|
"loss": 0.8248, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.30015197568389057, |
|
"grad_norm": 1.1990888274207598, |
|
"learning_rate": 1.6417392032475715e-05, |
|
"loss": 0.8198, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3009118541033435, |
|
"grad_norm": 1.0215794971897059, |
|
"learning_rate": 1.6398490473705742e-05, |
|
"loss": 0.8245, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.30167173252279633, |
|
"grad_norm": 1.1115585693979149, |
|
"learning_rate": 1.6379550128872202e-05, |
|
"loss": 0.7206, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.30243161094224924, |
|
"grad_norm": 1.004768858656318, |
|
"learning_rate": 1.6360571112786768e-05, |
|
"loss": 0.7819, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.30319148936170215, |
|
"grad_norm": 1.1508323939293101, |
|
"learning_rate": 1.6341553540495533e-05, |
|
"loss": 0.7656, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.303951367781155, |
|
"grad_norm": 1.3202901349614393, |
|
"learning_rate": 1.6322497527278308e-05, |
|
"loss": 0.7249, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3047112462006079, |
|
"grad_norm": 1.085179473978901, |
|
"learning_rate": 1.6303403188647914e-05, |
|
"loss": 0.7614, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.30547112462006076, |
|
"grad_norm": 1.3733912801023669, |
|
"learning_rate": 1.6284270640349516e-05, |
|
"loss": 0.7673, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.30623100303951367, |
|
"grad_norm": 1.005601823506343, |
|
"learning_rate": 1.6265099998359868e-05, |
|
"loss": 0.7739, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.3069908814589666, |
|
"grad_norm": 1.2084167262300816, |
|
"learning_rate": 1.6245891378886655e-05, |
|
"loss": 0.7411, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.30775075987841943, |
|
"grad_norm": 1.026803819956731, |
|
"learning_rate": 1.6226644898367767e-05, |
|
"loss": 0.8073, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.30851063829787234, |
|
"grad_norm": 1.174796304680086, |
|
"learning_rate": 1.62073606734706e-05, |
|
"loss": 0.749, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.30927051671732525, |
|
"grad_norm": 1.0463661252192882, |
|
"learning_rate": 1.6188038821091346e-05, |
|
"loss": 0.7496, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.3100303951367781, |
|
"grad_norm": 1.171960934171062, |
|
"learning_rate": 1.6168679458354284e-05, |
|
"loss": 0.747, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.310790273556231, |
|
"grad_norm": 1.3147049412489755, |
|
"learning_rate": 1.6149282702611077e-05, |
|
"loss": 0.7915, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.31155015197568386, |
|
"grad_norm": 1.3350489898674418, |
|
"learning_rate": 1.6129848671440047e-05, |
|
"loss": 0.6741, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3123100303951368, |
|
"grad_norm": 1.103528249440529, |
|
"learning_rate": 1.611037748264548e-05, |
|
"loss": 0.8204, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.3130699088145897, |
|
"grad_norm": 1.3771958067762746, |
|
"learning_rate": 1.6090869254256892e-05, |
|
"loss": 0.6846, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.31382978723404253, |
|
"grad_norm": 1.3058389924401281, |
|
"learning_rate": 1.6071324104528333e-05, |
|
"loss": 0.8816, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.31458966565349544, |
|
"grad_norm": 1.1591394916763322, |
|
"learning_rate": 1.6051742151937655e-05, |
|
"loss": 0.7428, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.31534954407294835, |
|
"grad_norm": 1.1638239136948474, |
|
"learning_rate": 1.60321235151858e-05, |
|
"loss": 0.8085, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3161094224924012, |
|
"grad_norm": 1.1336620717950094, |
|
"learning_rate": 1.6012468313196086e-05, |
|
"loss": 0.8214, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.3168693009118541, |
|
"grad_norm": 1.1940294495807557, |
|
"learning_rate": 1.599277666511347e-05, |
|
"loss": 0.8215, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.31762917933130697, |
|
"grad_norm": 1.1802830931942, |
|
"learning_rate": 1.5973048690303848e-05, |
|
"loss": 0.7415, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.3183890577507599, |
|
"grad_norm": 1.1182604763564765, |
|
"learning_rate": 1.5953284508353316e-05, |
|
"loss": 0.7352, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.3191489361702128, |
|
"grad_norm": 1.2492278687757044, |
|
"learning_rate": 1.593348423906745e-05, |
|
"loss": 0.776, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.31990881458966564, |
|
"grad_norm": 1.1954141273737178, |
|
"learning_rate": 1.5913648002470562e-05, |
|
"loss": 0.7464, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.32066869300911854, |
|
"grad_norm": 1.3015246134897502, |
|
"learning_rate": 1.589377591880501e-05, |
|
"loss": 0.7622, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.32142857142857145, |
|
"grad_norm": 1.1224692664097269, |
|
"learning_rate": 1.5873868108530443e-05, |
|
"loss": 0.6181, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3221884498480243, |
|
"grad_norm": 1.2533483748063843, |
|
"learning_rate": 1.585392469232307e-05, |
|
"loss": 0.858, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.3229483282674772, |
|
"grad_norm": 1.122961911873335, |
|
"learning_rate": 1.5833945791074943e-05, |
|
"loss": 0.7809, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.32370820668693007, |
|
"grad_norm": 1.3135799173833802, |
|
"learning_rate": 1.58139315258932e-05, |
|
"loss": 0.7554, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.324468085106383, |
|
"grad_norm": 1.273584627487933, |
|
"learning_rate": 1.5793882018099365e-05, |
|
"loss": 0.8533, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.3252279635258359, |
|
"grad_norm": 1.2874066666555242, |
|
"learning_rate": 1.5773797389228583e-05, |
|
"loss": 0.8293, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.32598784194528874, |
|
"grad_norm": 1.1526970687848226, |
|
"learning_rate": 1.5753677761028896e-05, |
|
"loss": 0.7106, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.32674772036474165, |
|
"grad_norm": 1.194532089923422, |
|
"learning_rate": 1.5733523255460506e-05, |
|
"loss": 0.6916, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.32750759878419455, |
|
"grad_norm": 1.1191253614825432, |
|
"learning_rate": 1.571333399469503e-05, |
|
"loss": 0.6864, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.3282674772036474, |
|
"grad_norm": 1.002599107035822, |
|
"learning_rate": 1.5693110101114763e-05, |
|
"loss": 0.6757, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.3290273556231003, |
|
"grad_norm": 1.1606653068590027, |
|
"learning_rate": 1.5672851697311935e-05, |
|
"loss": 0.8724, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.32978723404255317, |
|
"grad_norm": 1.0461255227947164, |
|
"learning_rate": 1.565255890608797e-05, |
|
"loss": 0.7232, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.3305471124620061, |
|
"grad_norm": 1.1337841433196596, |
|
"learning_rate": 1.5632231850452745e-05, |
|
"loss": 0.7884, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.331306990881459, |
|
"grad_norm": 1.2481479032019933, |
|
"learning_rate": 1.5611870653623826e-05, |
|
"loss": 0.7777, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.33206686930091184, |
|
"grad_norm": 1.1538715959835062, |
|
"learning_rate": 1.5591475439025745e-05, |
|
"loss": 0.8474, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.33282674772036475, |
|
"grad_norm": 1.1278764402856465, |
|
"learning_rate": 1.557104633028924e-05, |
|
"loss": 0.7918, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.33358662613981765, |
|
"grad_norm": 1.2177653458117224, |
|
"learning_rate": 1.5550583451250504e-05, |
|
"loss": 0.7433, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.3343465045592705, |
|
"grad_norm": 1.2816295962098154, |
|
"learning_rate": 1.5530086925950435e-05, |
|
"loss": 0.7341, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.3351063829787234, |
|
"grad_norm": 1.2855058438606246, |
|
"learning_rate": 1.5509556878633894e-05, |
|
"loss": 0.7964, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.33586626139817627, |
|
"grad_norm": 1.210793739638994, |
|
"learning_rate": 1.5488993433748944e-05, |
|
"loss": 0.7667, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.3366261398176292, |
|
"grad_norm": 1.2354508105613038, |
|
"learning_rate": 1.5468396715946082e-05, |
|
"loss": 0.8007, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.3373860182370821, |
|
"grad_norm": 1.2157655449115916, |
|
"learning_rate": 1.5447766850077517e-05, |
|
"loss": 0.7962, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.33814589665653494, |
|
"grad_norm": 1.1814788790840558, |
|
"learning_rate": 1.5427103961196378e-05, |
|
"loss": 0.6983, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.33890577507598785, |
|
"grad_norm": 1.0656501038586574, |
|
"learning_rate": 1.5406408174555978e-05, |
|
"loss": 0.7435, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.33966565349544076, |
|
"grad_norm": 1.05447829161781, |
|
"learning_rate": 1.5385679615609045e-05, |
|
"loss": 0.8181, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 1.3259139309112116, |
|
"learning_rate": 1.5364918410006967e-05, |
|
"loss": 0.8196, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.3411854103343465, |
|
"grad_norm": 1.3179288092906976, |
|
"learning_rate": 1.534412468359903e-05, |
|
"loss": 0.7695, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.34194528875379937, |
|
"grad_norm": 1.294894096237413, |
|
"learning_rate": 1.5323298562431646e-05, |
|
"loss": 0.8215, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3427051671732523, |
|
"grad_norm": 1.0755513853730156, |
|
"learning_rate": 1.5302440172747606e-05, |
|
"loss": 0.7272, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.3434650455927052, |
|
"grad_norm": 1.282859085633421, |
|
"learning_rate": 1.5281549640985295e-05, |
|
"loss": 0.6895, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.34422492401215804, |
|
"grad_norm": 1.1930661243488856, |
|
"learning_rate": 1.5260627093777936e-05, |
|
"loss": 0.8268, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.34498480243161095, |
|
"grad_norm": 1.0866451187615458, |
|
"learning_rate": 1.5239672657952833e-05, |
|
"loss": 0.7809, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.34574468085106386, |
|
"grad_norm": 1.220607386293379, |
|
"learning_rate": 1.5218686460530579e-05, |
|
"loss": 0.7719, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.3465045592705167, |
|
"grad_norm": 1.3393480631399062, |
|
"learning_rate": 1.5197668628724302e-05, |
|
"loss": 0.7731, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.3472644376899696, |
|
"grad_norm": 1.290488709701274, |
|
"learning_rate": 1.517661928993889e-05, |
|
"loss": 0.8544, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.34802431610942247, |
|
"grad_norm": 1.2186437415720965, |
|
"learning_rate": 1.515553857177022e-05, |
|
"loss": 0.7633, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.3487841945288754, |
|
"grad_norm": 1.0632730996984814, |
|
"learning_rate": 1.5134426602004378e-05, |
|
"loss": 0.7913, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.3495440729483283, |
|
"grad_norm": 1.3885119597502555, |
|
"learning_rate": 1.5113283508616895e-05, |
|
"loss": 0.728, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.35030395136778114, |
|
"grad_norm": 1.1049399011437813, |
|
"learning_rate": 1.5092109419771962e-05, |
|
"loss": 0.688, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.35106382978723405, |
|
"grad_norm": 1.1804058487766822, |
|
"learning_rate": 1.5070904463821658e-05, |
|
"loss": 0.8329, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.3518237082066869, |
|
"grad_norm": 1.1751007297581517, |
|
"learning_rate": 1.5049668769305172e-05, |
|
"loss": 0.7934, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.3525835866261398, |
|
"grad_norm": 1.0871412331260804, |
|
"learning_rate": 1.5028402464948023e-05, |
|
"loss": 0.7124, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.3533434650455927, |
|
"grad_norm": 1.1814869569600126, |
|
"learning_rate": 1.5007105679661276e-05, |
|
"loss": 0.7608, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.3541033434650456, |
|
"grad_norm": 1.2768595272009262, |
|
"learning_rate": 1.4985778542540764e-05, |
|
"loss": 0.7963, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.3548632218844985, |
|
"grad_norm": 1.2637062865213613, |
|
"learning_rate": 1.4964421182866312e-05, |
|
"loss": 0.8799, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.3556231003039514, |
|
"grad_norm": 1.095234475660764, |
|
"learning_rate": 1.4943033730100936e-05, |
|
"loss": 0.6945, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.35638297872340424, |
|
"grad_norm": 1.2023691776289789, |
|
"learning_rate": 1.4921616313890073e-05, |
|
"loss": 0.6206, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.9692133018570475, |
|
"learning_rate": 1.4900169064060804e-05, |
|
"loss": 0.7085, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.35790273556231, |
|
"grad_norm": 1.2970972735015418, |
|
"learning_rate": 1.4878692110621028e-05, |
|
"loss": 0.7751, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.3586626139817629, |
|
"grad_norm": 1.192748655045959, |
|
"learning_rate": 1.4857185583758722e-05, |
|
"loss": 0.7444, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.3594224924012158, |
|
"grad_norm": 1.3135419687207899, |
|
"learning_rate": 1.483564961384112e-05, |
|
"loss": 0.7526, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.3601823708206687, |
|
"grad_norm": 1.2715561806372109, |
|
"learning_rate": 1.4814084331413938e-05, |
|
"loss": 0.801, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.3609422492401216, |
|
"grad_norm": 1.1055960137648684, |
|
"learning_rate": 1.479248986720057e-05, |
|
"loss": 0.7801, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3617021276595745, |
|
"grad_norm": 1.2252469713673722, |
|
"learning_rate": 1.4770866352101308e-05, |
|
"loss": 0.7273, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.36246200607902734, |
|
"grad_norm": 1.1487823254499623, |
|
"learning_rate": 1.474921391719254e-05, |
|
"loss": 0.8097, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.36322188449848025, |
|
"grad_norm": 1.1386182161632215, |
|
"learning_rate": 1.472753269372596e-05, |
|
"loss": 0.7327, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.3639817629179331, |
|
"grad_norm": 1.1552140594901108, |
|
"learning_rate": 1.4705822813127776e-05, |
|
"loss": 0.818, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.364741641337386, |
|
"grad_norm": 0.9898996357312346, |
|
"learning_rate": 1.4684084406997903e-05, |
|
"loss": 0.7945, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3655015197568389, |
|
"grad_norm": 0.9887730384363455, |
|
"learning_rate": 1.466231760710917e-05, |
|
"loss": 0.7552, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.3662613981762918, |
|
"grad_norm": 1.3676239745133492, |
|
"learning_rate": 1.4640522545406519e-05, |
|
"loss": 0.7643, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.3670212765957447, |
|
"grad_norm": 1.256157177775327, |
|
"learning_rate": 1.4618699354006223e-05, |
|
"loss": 0.8437, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.3677811550151976, |
|
"grad_norm": 1.1539469195117926, |
|
"learning_rate": 1.4596848165195052e-05, |
|
"loss": 0.6623, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.36854103343465044, |
|
"grad_norm": 1.2507320110146378, |
|
"learning_rate": 1.45749691114295e-05, |
|
"loss": 0.7909, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.36930091185410335, |
|
"grad_norm": 1.1367719149570354, |
|
"learning_rate": 1.4553062325334968e-05, |
|
"loss": 0.8016, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.3700607902735562, |
|
"grad_norm": 1.0817239050227132, |
|
"learning_rate": 1.4531127939704965e-05, |
|
"loss": 0.7795, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.3708206686930091, |
|
"grad_norm": 1.1319940927957448, |
|
"learning_rate": 1.4509166087500305e-05, |
|
"loss": 0.8259, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.371580547112462, |
|
"grad_norm": 1.111174747008594, |
|
"learning_rate": 1.4487176901848285e-05, |
|
"loss": 0.7436, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.3723404255319149, |
|
"grad_norm": 1.2088816855900237, |
|
"learning_rate": 1.4465160516041905e-05, |
|
"loss": 0.7708, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.3731003039513678, |
|
"grad_norm": 1.225764934403788, |
|
"learning_rate": 1.4443117063539039e-05, |
|
"loss": 0.7472, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.3738601823708207, |
|
"grad_norm": 1.1162502285286449, |
|
"learning_rate": 1.4421046677961627e-05, |
|
"loss": 0.8919, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.37462006079027355, |
|
"grad_norm": 1.1158602272833609, |
|
"learning_rate": 1.439894949309489e-05, |
|
"loss": 0.6754, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.37537993920972645, |
|
"grad_norm": 1.0891222481558076, |
|
"learning_rate": 1.4376825642886473e-05, |
|
"loss": 0.835, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.3761398176291793, |
|
"grad_norm": 1.0774289009642644, |
|
"learning_rate": 1.435467526144568e-05, |
|
"loss": 0.7633, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3768996960486322, |
|
"grad_norm": 1.1010053587410074, |
|
"learning_rate": 1.4332498483042639e-05, |
|
"loss": 0.7572, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.3776595744680851, |
|
"grad_norm": 1.3042486518362346, |
|
"learning_rate": 1.4310295442107472e-05, |
|
"loss": 0.7659, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.378419452887538, |
|
"grad_norm": 1.1632619767172343, |
|
"learning_rate": 1.428806627322952e-05, |
|
"loss": 0.8763, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.3791793313069909, |
|
"grad_norm": 1.2766467060051423, |
|
"learning_rate": 1.4265811111156491e-05, |
|
"loss": 0.7718, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.3799392097264438, |
|
"grad_norm": 1.1013403801657626, |
|
"learning_rate": 1.4243530090793667e-05, |
|
"loss": 0.7466, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.38069908814589665, |
|
"grad_norm": 1.1081380726596666, |
|
"learning_rate": 1.4221223347203067e-05, |
|
"loss": 0.7945, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.38145896656534956, |
|
"grad_norm": 1.1600362541283282, |
|
"learning_rate": 1.4198891015602648e-05, |
|
"loss": 0.8559, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.3822188449848024, |
|
"grad_norm": 1.1679070483079377, |
|
"learning_rate": 1.4176533231365463e-05, |
|
"loss": 0.7827, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.3829787234042553, |
|
"grad_norm": 1.087576877411926, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.6864, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.3837386018237082, |
|
"grad_norm": 1.1756331296030478, |
|
"learning_rate": 1.4131741847243665e-05, |
|
"loss": 0.8091, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3844984802431611, |
|
"grad_norm": 1.2645845492058057, |
|
"learning_rate": 1.4109308518873321e-05, |
|
"loss": 0.8335, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.385258358662614, |
|
"grad_norm": 1.163925232622094, |
|
"learning_rate": 1.4086850280893107e-05, |
|
"loss": 0.6869, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.3860182370820669, |
|
"grad_norm": 1.3704424537243407, |
|
"learning_rate": 1.40643672694393e-05, |
|
"loss": 0.7459, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.38677811550151975, |
|
"grad_norm": 1.14432417961867, |
|
"learning_rate": 1.4041859620798341e-05, |
|
"loss": 0.7432, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.38753799392097266, |
|
"grad_norm": 1.223881973008391, |
|
"learning_rate": 1.4019327471406021e-05, |
|
"loss": 0.7779, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3882978723404255, |
|
"grad_norm": 1.1692544458251006, |
|
"learning_rate": 1.3996770957846643e-05, |
|
"loss": 0.7751, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.3890577507598784, |
|
"grad_norm": 1.166975919514324, |
|
"learning_rate": 1.3974190216852203e-05, |
|
"loss": 0.7696, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.3898176291793313, |
|
"grad_norm": 1.1562713426590367, |
|
"learning_rate": 1.3951585385301557e-05, |
|
"loss": 0.7419, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.3905775075987842, |
|
"grad_norm": 1.187327520978288, |
|
"learning_rate": 1.3928956600219593e-05, |
|
"loss": 0.7498, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.3913373860182371, |
|
"grad_norm": 1.2230763170723078, |
|
"learning_rate": 1.3906303998776392e-05, |
|
"loss": 0.6194, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.39209726443769, |
|
"grad_norm": 1.3399677206766483, |
|
"learning_rate": 1.388362771828642e-05, |
|
"loss": 0.826, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.39285714285714285, |
|
"grad_norm": 1.2045222889734593, |
|
"learning_rate": 1.3860927896207665e-05, |
|
"loss": 0.6885, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.39361702127659576, |
|
"grad_norm": 1.249508900751444, |
|
"learning_rate": 1.383820467014082e-05, |
|
"loss": 0.826, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.3943768996960486, |
|
"grad_norm": 1.463197283717251, |
|
"learning_rate": 1.3815458177828455e-05, |
|
"loss": 0.7469, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.3951367781155015, |
|
"grad_norm": 1.4240981666509096, |
|
"learning_rate": 1.3792688557154166e-05, |
|
"loss": 0.7172, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3958966565349544, |
|
"grad_norm": 1.037596684194348, |
|
"learning_rate": 1.3769895946141753e-05, |
|
"loss": 0.7861, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.3966565349544073, |
|
"grad_norm": 1.2627813756704651, |
|
"learning_rate": 1.3747080482954378e-05, |
|
"loss": 0.8146, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.3974164133738602, |
|
"grad_norm": 1.196949297922996, |
|
"learning_rate": 1.3724242305893716e-05, |
|
"loss": 0.8127, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.3981762917933131, |
|
"grad_norm": 1.1349164147949475, |
|
"learning_rate": 1.3701381553399147e-05, |
|
"loss": 0.699, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.39893617021276595, |
|
"grad_norm": 1.049413976516623, |
|
"learning_rate": 1.3678498364046877e-05, |
|
"loss": 0.7486, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.39969604863221886, |
|
"grad_norm": 1.0959358568221518, |
|
"learning_rate": 1.3655592876549135e-05, |
|
"loss": 0.7678, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.4004559270516717, |
|
"grad_norm": 1.0203803122275903, |
|
"learning_rate": 1.363266522975331e-05, |
|
"loss": 0.725, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.4012158054711246, |
|
"grad_norm": 1.2044356250068233, |
|
"learning_rate": 1.3609715562641116e-05, |
|
"loss": 0.811, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.40197568389057753, |
|
"grad_norm": 1.3222295856000086, |
|
"learning_rate": 1.358674401432774e-05, |
|
"loss": 0.5911, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.4027355623100304, |
|
"grad_norm": 1.0913691366241578, |
|
"learning_rate": 1.3563750724061025e-05, |
|
"loss": 0.7614, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4034954407294833, |
|
"grad_norm": 1.1746171382577624, |
|
"learning_rate": 1.354073583122059e-05, |
|
"loss": 0.7411, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.40425531914893614, |
|
"grad_norm": 1.4158235294830364, |
|
"learning_rate": 1.3517699475317016e-05, |
|
"loss": 0.8055, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.40501519756838905, |
|
"grad_norm": 1.1926144941953758, |
|
"learning_rate": 1.3494641795990986e-05, |
|
"loss": 0.7008, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.40577507598784196, |
|
"grad_norm": 1.1977355387652078, |
|
"learning_rate": 1.3471562933012432e-05, |
|
"loss": 0.7389, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.4065349544072948, |
|
"grad_norm": 1.1074293443823684, |
|
"learning_rate": 1.3448463026279706e-05, |
|
"loss": 0.6826, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4072948328267477, |
|
"grad_norm": 1.1579000196463276, |
|
"learning_rate": 1.3425342215818718e-05, |
|
"loss": 0.8204, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.40805471124620063, |
|
"grad_norm": 1.1320408734153626, |
|
"learning_rate": 1.3402200641782089e-05, |
|
"loss": 0.666, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.4088145896656535, |
|
"grad_norm": 1.102357738478694, |
|
"learning_rate": 1.3379038444448307e-05, |
|
"loss": 0.7923, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.4095744680851064, |
|
"grad_norm": 1.115969398834189, |
|
"learning_rate": 1.335585576422087e-05, |
|
"loss": 0.795, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.41033434650455924, |
|
"grad_norm": 1.1177016135967561, |
|
"learning_rate": 1.3332652741627445e-05, |
|
"loss": 0.8207, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.41109422492401215, |
|
"grad_norm": 1.0183563617545865, |
|
"learning_rate": 1.3309429517318999e-05, |
|
"loss": 0.6787, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.41185410334346506, |
|
"grad_norm": 1.1849766766962344, |
|
"learning_rate": 1.3286186232068972e-05, |
|
"loss": 0.7712, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.4126139817629179, |
|
"grad_norm": 1.0649504403154333, |
|
"learning_rate": 1.326292302677239e-05, |
|
"loss": 0.776, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.4133738601823708, |
|
"grad_norm": 1.1103778255621153, |
|
"learning_rate": 1.3239640042445037e-05, |
|
"loss": 0.7236, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.41413373860182373, |
|
"grad_norm": 1.0781850790804375, |
|
"learning_rate": 1.3216337420222602e-05, |
|
"loss": 0.6777, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.4148936170212766, |
|
"grad_norm": 1.1779486641454955, |
|
"learning_rate": 1.31930153013598e-05, |
|
"loss": 0.8779, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.4156534954407295, |
|
"grad_norm": 1.1121183142586448, |
|
"learning_rate": 1.3169673827229539e-05, |
|
"loss": 0.7979, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.41641337386018235, |
|
"grad_norm": 1.0452578616803079, |
|
"learning_rate": 1.3146313139322051e-05, |
|
"loss": 0.8233, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.41717325227963525, |
|
"grad_norm": 1.1305161368686159, |
|
"learning_rate": 1.3122933379244036e-05, |
|
"loss": 0.7697, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.41793313069908816, |
|
"grad_norm": 1.1332904441347997, |
|
"learning_rate": 1.3099534688717806e-05, |
|
"loss": 0.7204, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.418693009118541, |
|
"grad_norm": 1.0544281101793018, |
|
"learning_rate": 1.3076117209580418e-05, |
|
"loss": 0.7957, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.4194528875379939, |
|
"grad_norm": 1.2097653291564578, |
|
"learning_rate": 1.3052681083782837e-05, |
|
"loss": 0.6848, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.42021276595744683, |
|
"grad_norm": 1.0942027920905422, |
|
"learning_rate": 1.3029226453389044e-05, |
|
"loss": 0.9204, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.4209726443768997, |
|
"grad_norm": 1.1629671487499098, |
|
"learning_rate": 1.3005753460575195e-05, |
|
"loss": 0.7929, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.4217325227963526, |
|
"grad_norm": 1.1615017757832076, |
|
"learning_rate": 1.2982262247628758e-05, |
|
"loss": 0.6554, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.42249240121580545, |
|
"grad_norm": 1.2350464342641283, |
|
"learning_rate": 1.2958752956947645e-05, |
|
"loss": 0.6981, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.42325227963525835, |
|
"grad_norm": 1.074491759854292, |
|
"learning_rate": 1.2935225731039349e-05, |
|
"loss": 0.8622, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.42401215805471126, |
|
"grad_norm": 1.1647847863807008, |
|
"learning_rate": 1.2911680712520082e-05, |
|
"loss": 0.8068, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.4247720364741641, |
|
"grad_norm": 1.1447247309035253, |
|
"learning_rate": 1.2888118044113913e-05, |
|
"loss": 0.7515, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 1.0904101769686227, |
|
"learning_rate": 1.2864537868651894e-05, |
|
"loss": 0.7788, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.42629179331306993, |
|
"grad_norm": 1.1684433478464358, |
|
"learning_rate": 1.2840940329071213e-05, |
|
"loss": 0.671, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.4270516717325228, |
|
"grad_norm": 1.2017550465747733, |
|
"learning_rate": 1.2817325568414299e-05, |
|
"loss": 0.7427, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.4278115501519757, |
|
"grad_norm": 1.0420135459940278, |
|
"learning_rate": 1.2793693729827984e-05, |
|
"loss": 0.7451, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 1.0449468298970457, |
|
"learning_rate": 1.2770044956562613e-05, |
|
"loss": 0.8562, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.42933130699088146, |
|
"grad_norm": 1.2136363762203919, |
|
"learning_rate": 1.2746379391971191e-05, |
|
"loss": 0.6584, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.43009118541033436, |
|
"grad_norm": 1.2362341757280944, |
|
"learning_rate": 1.272269717950851e-05, |
|
"loss": 0.6923, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.4308510638297872, |
|
"grad_norm": 1.2038813073156636, |
|
"learning_rate": 1.2698998462730265e-05, |
|
"loss": 0.7222, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.4316109422492401, |
|
"grad_norm": 1.1402926150117914, |
|
"learning_rate": 1.2675283385292212e-05, |
|
"loss": 0.7143, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.43237082066869303, |
|
"grad_norm": 1.277234664355934, |
|
"learning_rate": 1.2651552090949265e-05, |
|
"loss": 0.7828, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.4331306990881459, |
|
"grad_norm": 1.083121761869766, |
|
"learning_rate": 1.2627804723554653e-05, |
|
"loss": 0.8248, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4338905775075988, |
|
"grad_norm": 1.1046276920654474, |
|
"learning_rate": 1.2604041427059037e-05, |
|
"loss": 0.8345, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.43465045592705165, |
|
"grad_norm": 1.1001442593016568, |
|
"learning_rate": 1.2580262345509621e-05, |
|
"loss": 0.7206, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.43541033434650456, |
|
"grad_norm": 1.14236207055973, |
|
"learning_rate": 1.2556467623049313e-05, |
|
"loss": 0.7629, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.43617021276595747, |
|
"grad_norm": 1.1605887190397732, |
|
"learning_rate": 1.2532657403915821e-05, |
|
"loss": 0.747, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.4369300911854103, |
|
"grad_norm": 1.117710187766665, |
|
"learning_rate": 1.2508831832440795e-05, |
|
"loss": 0.78, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4376899696048632, |
|
"grad_norm": 1.2482054789186818, |
|
"learning_rate": 1.248499105304894e-05, |
|
"loss": 0.7959, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.43844984802431614, |
|
"grad_norm": 1.2256788050979497, |
|
"learning_rate": 1.2461135210257156e-05, |
|
"loss": 0.5836, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.439209726443769, |
|
"grad_norm": 1.4775730013813828, |
|
"learning_rate": 1.2437264448673647e-05, |
|
"loss": 0.7441, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.4399696048632219, |
|
"grad_norm": 1.0844347032086228, |
|
"learning_rate": 1.2413378912997058e-05, |
|
"loss": 0.7404, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.44072948328267475, |
|
"grad_norm": 1.220072154238479, |
|
"learning_rate": 1.2389478748015584e-05, |
|
"loss": 0.7549, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.44148936170212766, |
|
"grad_norm": 1.3797855512370152, |
|
"learning_rate": 1.2365564098606103e-05, |
|
"loss": 0.702, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.44224924012158057, |
|
"grad_norm": 1.1048282390434343, |
|
"learning_rate": 1.2341635109733292e-05, |
|
"loss": 0.681, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.4430091185410334, |
|
"grad_norm": 1.2410407226194968, |
|
"learning_rate": 1.2317691926448753e-05, |
|
"loss": 0.6772, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.44376899696048633, |
|
"grad_norm": 1.1242367894543606, |
|
"learning_rate": 1.2293734693890131e-05, |
|
"loss": 0.6518, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.44452887537993924, |
|
"grad_norm": 0.9905778948996463, |
|
"learning_rate": 1.226976355728023e-05, |
|
"loss": 0.6832, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.4452887537993921, |
|
"grad_norm": 1.1201973075655094, |
|
"learning_rate": 1.2245778661926138e-05, |
|
"loss": 0.7694, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.446048632218845, |
|
"grad_norm": 1.2210274246473196, |
|
"learning_rate": 1.222178015321835e-05, |
|
"loss": 0.7827, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.44680851063829785, |
|
"grad_norm": 1.0076097698137845, |
|
"learning_rate": 1.2197768176629876e-05, |
|
"loss": 0.762, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.44756838905775076, |
|
"grad_norm": 1.0624166505363433, |
|
"learning_rate": 1.2173742877715374e-05, |
|
"loss": 0.7607, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.44832826747720367, |
|
"grad_norm": 1.2321772853254775, |
|
"learning_rate": 1.2149704402110243e-05, |
|
"loss": 0.7371, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.4490881458966565, |
|
"grad_norm": 1.2595966240381076, |
|
"learning_rate": 1.2125652895529766e-05, |
|
"loss": 0.7215, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.44984802431610943, |
|
"grad_norm": 1.1213688750304858, |
|
"learning_rate": 1.2101588503768224e-05, |
|
"loss": 0.7963, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.4506079027355623, |
|
"grad_norm": 0.9769789823292284, |
|
"learning_rate": 1.2077511372697986e-05, |
|
"loss": 0.6596, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.4513677811550152, |
|
"grad_norm": 1.3705621757358848, |
|
"learning_rate": 1.2053421648268661e-05, |
|
"loss": 0.7226, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.4521276595744681, |
|
"grad_norm": 1.0877206767535184, |
|
"learning_rate": 1.2029319476506183e-05, |
|
"loss": 0.8156, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.45288753799392095, |
|
"grad_norm": 1.1559961003190877, |
|
"learning_rate": 1.2005205003511948e-05, |
|
"loss": 0.6944, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.45364741641337386, |
|
"grad_norm": 1.055306325871312, |
|
"learning_rate": 1.1981078375461917e-05, |
|
"loss": 0.7877, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.45440729483282677, |
|
"grad_norm": 1.2138338408638794, |
|
"learning_rate": 1.1956939738605722e-05, |
|
"loss": 0.8352, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.4551671732522796, |
|
"grad_norm": 1.2164380288185532, |
|
"learning_rate": 1.1932789239265803e-05, |
|
"loss": 0.7387, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.45592705167173253, |
|
"grad_norm": 1.0622016134428793, |
|
"learning_rate": 1.1908627023836504e-05, |
|
"loss": 0.7813, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.4566869300911854, |
|
"grad_norm": 1.0354380350319183, |
|
"learning_rate": 1.1884453238783185e-05, |
|
"loss": 0.6817, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.4574468085106383, |
|
"grad_norm": 1.1146324749938001, |
|
"learning_rate": 1.1860268030641337e-05, |
|
"loss": 0.6585, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.4582066869300912, |
|
"grad_norm": 1.1733953270814297, |
|
"learning_rate": 1.1836071546015704e-05, |
|
"loss": 0.7152, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.45896656534954405, |
|
"grad_norm": 1.1063408683598057, |
|
"learning_rate": 1.1811863931579377e-05, |
|
"loss": 0.7577, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.45972644376899696, |
|
"grad_norm": 1.2502742143674057, |
|
"learning_rate": 1.1787645334072912e-05, |
|
"loss": 0.6801, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.46048632218844987, |
|
"grad_norm": 1.0856442473471655, |
|
"learning_rate": 1.176341590030345e-05, |
|
"loss": 0.7897, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.4612462006079027, |
|
"grad_norm": 1.269493236892582, |
|
"learning_rate": 1.1739175777143813e-05, |
|
"loss": 0.7531, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.46200607902735563, |
|
"grad_norm": 1.1160454691982469, |
|
"learning_rate": 1.1714925111531619e-05, |
|
"loss": 0.653, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.4627659574468085, |
|
"grad_norm": 1.1847312086049953, |
|
"learning_rate": 1.169066405046839e-05, |
|
"loss": 0.6859, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.4635258358662614, |
|
"grad_norm": 1.230192197807551, |
|
"learning_rate": 1.1666392741018675e-05, |
|
"loss": 0.8168, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.4642857142857143, |
|
"grad_norm": 1.1096867804048502, |
|
"learning_rate": 1.1642111330309129e-05, |
|
"loss": 0.7538, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.46504559270516715, |
|
"grad_norm": 1.0792534800316143, |
|
"learning_rate": 1.161781996552765e-05, |
|
"loss": 0.7887, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.46580547112462006, |
|
"grad_norm": 1.4139217480179833, |
|
"learning_rate": 1.159351879392247e-05, |
|
"loss": 0.7883, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.46656534954407297, |
|
"grad_norm": 1.2339602383114001, |
|
"learning_rate": 1.1569207962801264e-05, |
|
"loss": 0.7791, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.4673252279635258, |
|
"grad_norm": 1.0168116006915144, |
|
"learning_rate": 1.1544887619530277e-05, |
|
"loss": 0.7971, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.46808510638297873, |
|
"grad_norm": 0.9641437311687417, |
|
"learning_rate": 1.152055791153339e-05, |
|
"loss": 0.7587, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.4688449848024316, |
|
"grad_norm": 1.2384311319102208, |
|
"learning_rate": 1.1496218986291274e-05, |
|
"loss": 0.6966, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.4696048632218845, |
|
"grad_norm": 1.123483046561224, |
|
"learning_rate": 1.1471870991340459e-05, |
|
"loss": 0.7111, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.4703647416413374, |
|
"grad_norm": 1.3323715236633211, |
|
"learning_rate": 1.1447514074272452e-05, |
|
"loss": 0.9058, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.47112462006079026, |
|
"grad_norm": 1.0458444755972274, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.6511, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.47188449848024316, |
|
"grad_norm": 1.1212928070334884, |
|
"learning_rate": 1.1398774064420444e-05, |
|
"loss": 0.7476, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.4726443768996961, |
|
"grad_norm": 1.0909705721586769, |
|
"learning_rate": 1.1374391267086301e-05, |
|
"loss": 0.7604, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.4734042553191489, |
|
"grad_norm": 1.1751938977778684, |
|
"learning_rate": 1.1350000138532902e-05, |
|
"loss": 0.8169, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.47416413373860183, |
|
"grad_norm": 1.2461151672781425, |
|
"learning_rate": 1.1325600826613221e-05, |
|
"loss": 0.7262, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.4749240121580547, |
|
"grad_norm": 0.9967543669312325, |
|
"learning_rate": 1.1301193479229842e-05, |
|
"loss": 0.7393, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4756838905775076, |
|
"grad_norm": 1.1946765867292415, |
|
"learning_rate": 1.1276778244334055e-05, |
|
"loss": 0.8378, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.4764437689969605, |
|
"grad_norm": 1.1800912079687036, |
|
"learning_rate": 1.1252355269924965e-05, |
|
"loss": 0.7834, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.47720364741641336, |
|
"grad_norm": 1.0682660612153005, |
|
"learning_rate": 1.1227924704048585e-05, |
|
"loss": 0.6977, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.47796352583586627, |
|
"grad_norm": 1.2082657620713166, |
|
"learning_rate": 1.1203486694796956e-05, |
|
"loss": 0.7893, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.4787234042553192, |
|
"grad_norm": 1.5186537845384005, |
|
"learning_rate": 1.1179041390307235e-05, |
|
"loss": 0.8235, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.479483282674772, |
|
"grad_norm": 1.1171118006243392, |
|
"learning_rate": 1.1154588938760795e-05, |
|
"loss": 0.8361, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.48024316109422494, |
|
"grad_norm": 1.0720592098179131, |
|
"learning_rate": 1.1130129488382341e-05, |
|
"loss": 0.8329, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.4810030395136778, |
|
"grad_norm": 1.149089667943674, |
|
"learning_rate": 1.1105663187438997e-05, |
|
"loss": 0.7355, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.4817629179331307, |
|
"grad_norm": 1.0824070043425529, |
|
"learning_rate": 1.1081190184239418e-05, |
|
"loss": 0.6931, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.4825227963525836, |
|
"grad_norm": 1.1997002322685555, |
|
"learning_rate": 1.1056710627132885e-05, |
|
"loss": 0.6972, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.48328267477203646, |
|
"grad_norm": 1.1943832275144262, |
|
"learning_rate": 1.1032224664508406e-05, |
|
"loss": 0.6955, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.48404255319148937, |
|
"grad_norm": 1.478006634154453, |
|
"learning_rate": 1.1007732444793815e-05, |
|
"loss": 0.7545, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.4848024316109423, |
|
"grad_norm": 1.3061954095680135, |
|
"learning_rate": 1.0983234116454885e-05, |
|
"loss": 0.7204, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.48556231003039513, |
|
"grad_norm": 1.2531293584528425, |
|
"learning_rate": 1.0958729827994406e-05, |
|
"loss": 0.844, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.48632218844984804, |
|
"grad_norm": 1.0687070140958945, |
|
"learning_rate": 1.09342197279513e-05, |
|
"loss": 0.7528, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4870820668693009, |
|
"grad_norm": 1.2505257621824397, |
|
"learning_rate": 1.090970396489973e-05, |
|
"loss": 0.7184, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.4878419452887538, |
|
"grad_norm": 1.2507111589246462, |
|
"learning_rate": 1.0885182687448162e-05, |
|
"loss": 0.7329, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.4886018237082067, |
|
"grad_norm": 1.080529498499154, |
|
"learning_rate": 1.086065604423851e-05, |
|
"loss": 0.7596, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.48936170212765956, |
|
"grad_norm": 1.0020667948655044, |
|
"learning_rate": 1.0836124183945209e-05, |
|
"loss": 0.7774, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.49012158054711247, |
|
"grad_norm": 1.2598415490818535, |
|
"learning_rate": 1.0811587255274313e-05, |
|
"loss": 0.7406, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.4908814589665654, |
|
"grad_norm": 1.2288655437787905, |
|
"learning_rate": 1.0787045406962605e-05, |
|
"loss": 0.71, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.49164133738601823, |
|
"grad_norm": 1.190316068046688, |
|
"learning_rate": 1.0762498787776688e-05, |
|
"loss": 0.7821, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.49240121580547114, |
|
"grad_norm": 1.1505791158862277, |
|
"learning_rate": 1.073794754651208e-05, |
|
"loss": 0.7304, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.493161094224924, |
|
"grad_norm": 1.0576484726630266, |
|
"learning_rate": 1.0713391831992324e-05, |
|
"loss": 0.7889, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.4939209726443769, |
|
"grad_norm": 1.377761448021965, |
|
"learning_rate": 1.0688831793068078e-05, |
|
"loss": 0.7526, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4946808510638298, |
|
"grad_norm": 1.248091367107788, |
|
"learning_rate": 1.0664267578616208e-05, |
|
"loss": 0.6932, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.49544072948328266, |
|
"grad_norm": 1.1413076054917706, |
|
"learning_rate": 1.0639699337538897e-05, |
|
"loss": 0.6946, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.49620060790273557, |
|
"grad_norm": 1.1391480731885173, |
|
"learning_rate": 1.0615127218762733e-05, |
|
"loss": 0.6469, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.4969604863221885, |
|
"grad_norm": 1.2298577878062031, |
|
"learning_rate": 1.059055137123781e-05, |
|
"loss": 0.6232, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.49772036474164133, |
|
"grad_norm": 1.1016633016463047, |
|
"learning_rate": 1.0565971943936826e-05, |
|
"loss": 0.6501, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.49848024316109424, |
|
"grad_norm": 1.13210944329585, |
|
"learning_rate": 1.0541389085854177e-05, |
|
"loss": 0.7832, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.4992401215805471, |
|
"grad_norm": 1.06302593615317, |
|
"learning_rate": 1.0516802946005059e-05, |
|
"loss": 0.7029, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.120242254263207, |
|
"learning_rate": 1.0492213673424554e-05, |
|
"loss": 0.7361, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.5007598784194529, |
|
"grad_norm": 1.1330827914748247, |
|
"learning_rate": 1.0467621417166744e-05, |
|
"loss": 0.8563, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.5015197568389058, |
|
"grad_norm": 1.3511630849170222, |
|
"learning_rate": 1.0443026326303789e-05, |
|
"loss": 0.7762, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5022796352583586, |
|
"grad_norm": 1.1291585501546435, |
|
"learning_rate": 1.0418428549925033e-05, |
|
"loss": 0.6979, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.5030395136778115, |
|
"grad_norm": 1.0884622705560356, |
|
"learning_rate": 1.0393828237136108e-05, |
|
"loss": 0.7682, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.5037993920972644, |
|
"grad_norm": 1.2429830639549164, |
|
"learning_rate": 1.0369225537058004e-05, |
|
"loss": 0.8714, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.5045592705167173, |
|
"grad_norm": 1.0576093612851172, |
|
"learning_rate": 1.0344620598826199e-05, |
|
"loss": 0.6366, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.5053191489361702, |
|
"grad_norm": 1.236175622011617, |
|
"learning_rate": 1.0320013571589727e-05, |
|
"loss": 0.7086, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.506079027355623, |
|
"grad_norm": 1.5006644344885889, |
|
"learning_rate": 1.0295404604510286e-05, |
|
"loss": 0.7595, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.506838905775076, |
|
"grad_norm": 1.2767719535087934, |
|
"learning_rate": 1.0270793846761347e-05, |
|
"loss": 0.7759, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.5075987841945289, |
|
"grad_norm": 1.1155010522074496, |
|
"learning_rate": 1.0246181447527213e-05, |
|
"loss": 0.7257, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.5083586626139818, |
|
"grad_norm": 1.1552502485248597, |
|
"learning_rate": 1.0221567556002154e-05, |
|
"loss": 0.8051, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.5091185410334347, |
|
"grad_norm": 1.0214004297340544, |
|
"learning_rate": 1.0196952321389482e-05, |
|
"loss": 0.7516, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.5098784194528876, |
|
"grad_norm": 1.0781221550155105, |
|
"learning_rate": 1.0172335892900645e-05, |
|
"loss": 0.7074, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.5106382978723404, |
|
"grad_norm": 1.2210785973855254, |
|
"learning_rate": 1.0147718419754335e-05, |
|
"loss": 0.7237, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.5113981762917933, |
|
"grad_norm": 1.171152830006125, |
|
"learning_rate": 1.0123100051175567e-05, |
|
"loss": 0.7524, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.5121580547112462, |
|
"grad_norm": 1.2062323077371615, |
|
"learning_rate": 1.0098480936394801e-05, |
|
"loss": 0.6973, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.5129179331306991, |
|
"grad_norm": 1.4246802896434376, |
|
"learning_rate": 1.0073861224647002e-05, |
|
"loss": 0.6907, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.513677811550152, |
|
"grad_norm": 1.180418488238661, |
|
"learning_rate": 1.0049241065170765e-05, |
|
"loss": 0.81, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.5144376899696048, |
|
"grad_norm": 1.1737335268386981, |
|
"learning_rate": 1.0024620607207393e-05, |
|
"loss": 0.701, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.5151975683890577, |
|
"grad_norm": 0.9601725612238251, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6626, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.5159574468085106, |
|
"grad_norm": 1.2102241908113591, |
|
"learning_rate": 9.97537939279261e-06, |
|
"loss": 0.7688, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.5167173252279635, |
|
"grad_norm": 1.3845812329157814, |
|
"learning_rate": 9.950758934829242e-06, |
|
"loss": 0.6194, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5174772036474165, |
|
"grad_norm": 1.302664216243635, |
|
"learning_rate": 9.926138775352998e-06, |
|
"loss": 0.6957, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.5182370820668692, |
|
"grad_norm": 1.274488027701452, |
|
"learning_rate": 9.901519063605204e-06, |
|
"loss": 0.7888, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.5189969604863222, |
|
"grad_norm": 1.0335838940477384, |
|
"learning_rate": 9.876899948824435e-06, |
|
"loss": 0.7131, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.5197568389057751, |
|
"grad_norm": 1.0212845146712737, |
|
"learning_rate": 9.85228158024567e-06, |
|
"loss": 0.7249, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.520516717325228, |
|
"grad_norm": 1.122222342354429, |
|
"learning_rate": 9.82766410709936e-06, |
|
"loss": 0.7623, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.5212765957446809, |
|
"grad_norm": 1.0687245736709179, |
|
"learning_rate": 9.80304767861052e-06, |
|
"loss": 0.6708, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.5220364741641338, |
|
"grad_norm": 1.204374697486561, |
|
"learning_rate": 9.77843244399785e-06, |
|
"loss": 0.7664, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.5227963525835866, |
|
"grad_norm": 1.2736332945104485, |
|
"learning_rate": 9.75381855247279e-06, |
|
"loss": 0.8362, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.5235562310030395, |
|
"grad_norm": 0.9624540616080698, |
|
"learning_rate": 9.729206153238658e-06, |
|
"loss": 0.7137, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.5243161094224924, |
|
"grad_norm": 1.0421247849343214, |
|
"learning_rate": 9.704595395489714e-06, |
|
"loss": 0.7408, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.5250759878419453, |
|
"grad_norm": 1.013035300998222, |
|
"learning_rate": 9.679986428410276e-06, |
|
"loss": 0.5776, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.5258358662613982, |
|
"grad_norm": 1.0871447199817086, |
|
"learning_rate": 9.655379401173804e-06, |
|
"loss": 0.7964, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.526595744680851, |
|
"grad_norm": 1.4237651645969185, |
|
"learning_rate": 9.630774462942002e-06, |
|
"loss": 0.6829, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.5273556231003039, |
|
"grad_norm": 1.0855572690595994, |
|
"learning_rate": 9.606171762863899e-06, |
|
"loss": 0.7261, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.5281155015197568, |
|
"grad_norm": 1.2445924867674263, |
|
"learning_rate": 9.581571450074969e-06, |
|
"loss": 0.7748, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.5288753799392097, |
|
"grad_norm": 1.1859548928831545, |
|
"learning_rate": 9.556973673696214e-06, |
|
"loss": 0.6548, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.5296352583586627, |
|
"grad_norm": 1.0687423053782519, |
|
"learning_rate": 9.53237858283326e-06, |
|
"loss": 0.8082, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.5303951367781155, |
|
"grad_norm": 1.2799853643131716, |
|
"learning_rate": 9.507786326575451e-06, |
|
"loss": 0.7998, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.5311550151975684, |
|
"grad_norm": 1.0776337137073257, |
|
"learning_rate": 9.483197053994948e-06, |
|
"loss": 0.7741, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.5319148936170213, |
|
"grad_norm": 0.9874137067298976, |
|
"learning_rate": 9.458610914145826e-06, |
|
"loss": 0.7094, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5326747720364742, |
|
"grad_norm": 1.1138458268182485, |
|
"learning_rate": 9.434028056063178e-06, |
|
"loss": 0.7488, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.5334346504559271, |
|
"grad_norm": 1.196985562955274, |
|
"learning_rate": 9.409448628762194e-06, |
|
"loss": 0.7638, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.53419452887538, |
|
"grad_norm": 1.2136898659918227, |
|
"learning_rate": 9.38487278123727e-06, |
|
"loss": 0.6823, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.5349544072948328, |
|
"grad_norm": 1.3892406474102839, |
|
"learning_rate": 9.360300662461104e-06, |
|
"loss": 0.6746, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.5357142857142857, |
|
"grad_norm": 1.1017160819969196, |
|
"learning_rate": 9.335732421383794e-06, |
|
"loss": 0.7706, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.5364741641337386, |
|
"grad_norm": 1.327308285762868, |
|
"learning_rate": 9.311168206931925e-06, |
|
"loss": 0.8612, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.5372340425531915, |
|
"grad_norm": 1.1205800863120123, |
|
"learning_rate": 9.286608168007678e-06, |
|
"loss": 0.6615, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.5379939209726444, |
|
"grad_norm": 1.1868064806482874, |
|
"learning_rate": 9.262052453487924e-06, |
|
"loss": 0.6602, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.5387537993920972, |
|
"grad_norm": 1.2255973213368228, |
|
"learning_rate": 9.237501212223314e-06, |
|
"loss": 0.7448, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.5395136778115501, |
|
"grad_norm": 1.0637993903719365, |
|
"learning_rate": 9.212954593037396e-06, |
|
"loss": 0.6991, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.540273556231003, |
|
"grad_norm": 1.3108472120873003, |
|
"learning_rate": 9.18841274472569e-06, |
|
"loss": 0.722, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.541033434650456, |
|
"grad_norm": 1.0000410345632875, |
|
"learning_rate": 9.163875816054795e-06, |
|
"loss": 0.7951, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.5417933130699089, |
|
"grad_norm": 1.0898157550563874, |
|
"learning_rate": 9.139343955761493e-06, |
|
"loss": 0.7302, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.5425531914893617, |
|
"grad_norm": 1.2462590031779057, |
|
"learning_rate": 9.11481731255184e-06, |
|
"loss": 0.8054, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.5433130699088146, |
|
"grad_norm": 1.0842623103782374, |
|
"learning_rate": 9.090296035100275e-06, |
|
"loss": 0.7488, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5440729483282675, |
|
"grad_norm": 1.126828508481787, |
|
"learning_rate": 9.065780272048703e-06, |
|
"loss": 0.7089, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.5448328267477204, |
|
"grad_norm": 1.1161194449285288, |
|
"learning_rate": 9.041270172005599e-06, |
|
"loss": 0.8428, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.5455927051671733, |
|
"grad_norm": 1.117177427711117, |
|
"learning_rate": 9.016765883545116e-06, |
|
"loss": 0.7703, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.5463525835866262, |
|
"grad_norm": 1.0526580123128249, |
|
"learning_rate": 8.992267555206185e-06, |
|
"loss": 0.7697, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.547112462006079, |
|
"grad_norm": 1.144212356521607, |
|
"learning_rate": 8.967775335491596e-06, |
|
"loss": 0.7864, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5478723404255319, |
|
"grad_norm": 1.080723967095971, |
|
"learning_rate": 8.943289372867118e-06, |
|
"loss": 0.6892, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.5486322188449848, |
|
"grad_norm": 1.0718604544768817, |
|
"learning_rate": 8.918809815760585e-06, |
|
"loss": 0.7212, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.5493920972644377, |
|
"grad_norm": 1.1174684303999995, |
|
"learning_rate": 8.894336812561006e-06, |
|
"loss": 0.7087, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.5501519756838906, |
|
"grad_norm": 1.0963600814875032, |
|
"learning_rate": 8.869870511617662e-06, |
|
"loss": 0.7443, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.5509118541033434, |
|
"grad_norm": 1.228940173574746, |
|
"learning_rate": 8.845411061239208e-06, |
|
"loss": 0.7431, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.5516717325227963, |
|
"grad_norm": 1.021217582634966, |
|
"learning_rate": 8.82095860969277e-06, |
|
"loss": 0.7284, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.5524316109422492, |
|
"grad_norm": 1.0842966533534397, |
|
"learning_rate": 8.796513305203049e-06, |
|
"loss": 0.7526, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.5531914893617021, |
|
"grad_norm": 1.3455009430552207, |
|
"learning_rate": 8.772075295951416e-06, |
|
"loss": 0.6505, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.5539513677811551, |
|
"grad_norm": 0.9568903741747474, |
|
"learning_rate": 8.74764473007504e-06, |
|
"loss": 0.6225, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.5547112462006079, |
|
"grad_norm": 1.1662620746932606, |
|
"learning_rate": 8.723221755665948e-06, |
|
"loss": 0.7685, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.5554711246200608, |
|
"grad_norm": 1.0179421849609032, |
|
"learning_rate": 8.698806520770161e-06, |
|
"loss": 0.6808, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.5562310030395137, |
|
"grad_norm": 1.0580889299423355, |
|
"learning_rate": 8.674399173386779e-06, |
|
"loss": 0.7488, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.5569908814589666, |
|
"grad_norm": 0.9153959677609983, |
|
"learning_rate": 8.6499998614671e-06, |
|
"loss": 0.6928, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.5577507598784195, |
|
"grad_norm": 1.2461673348388922, |
|
"learning_rate": 8.625608732913702e-06, |
|
"loss": 0.7417, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.5585106382978723, |
|
"grad_norm": 1.1843093976279093, |
|
"learning_rate": 8.60122593557956e-06, |
|
"loss": 0.7276, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.5592705167173252, |
|
"grad_norm": 1.1612172390993862, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.6551, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.5600303951367781, |
|
"grad_norm": 1.0345523205995302, |
|
"learning_rate": 8.55248592572755e-06, |
|
"loss": 0.7154, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.560790273556231, |
|
"grad_norm": 1.155661385060302, |
|
"learning_rate": 8.528129008659543e-06, |
|
"loss": 0.8645, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.5615501519756839, |
|
"grad_norm": 1.1374514153416948, |
|
"learning_rate": 8.50378101370873e-06, |
|
"loss": 0.7155, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.5623100303951368, |
|
"grad_norm": 1.1213383547786449, |
|
"learning_rate": 8.479442088466612e-06, |
|
"loss": 0.6853, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.5630699088145896, |
|
"grad_norm": 1.435739682577007, |
|
"learning_rate": 8.455112380469728e-06, |
|
"loss": 0.6802, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.5638297872340425, |
|
"grad_norm": 1.2143939462650815, |
|
"learning_rate": 8.430792037198738e-06, |
|
"loss": 0.6548, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.5645896656534954, |
|
"grad_norm": 0.9600860052959144, |
|
"learning_rate": 8.406481206077535e-06, |
|
"loss": 0.6814, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.5653495440729484, |
|
"grad_norm": 1.302452799435894, |
|
"learning_rate": 8.382180034472353e-06, |
|
"loss": 0.7308, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.5661094224924013, |
|
"grad_norm": 1.0653760433270474, |
|
"learning_rate": 8.357888669690876e-06, |
|
"loss": 0.7746, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.5668693009118541, |
|
"grad_norm": 1.3315367773834987, |
|
"learning_rate": 8.333607258981328e-06, |
|
"loss": 0.6738, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.567629179331307, |
|
"grad_norm": 1.2657872209400918, |
|
"learning_rate": 8.309335949531609e-06, |
|
"loss": 0.7687, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.5683890577507599, |
|
"grad_norm": 1.035527551805823, |
|
"learning_rate": 8.285074888468385e-06, |
|
"loss": 0.6656, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.5691489361702128, |
|
"grad_norm": 1.1689385914495711, |
|
"learning_rate": 8.260824222856189e-06, |
|
"loss": 0.7617, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.5699088145896657, |
|
"grad_norm": 1.182533572120935, |
|
"learning_rate": 8.236584099696553e-06, |
|
"loss": 0.6259, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.5706686930091185, |
|
"grad_norm": 1.2539832334039402, |
|
"learning_rate": 8.212354665927088e-06, |
|
"loss": 0.8626, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 1.16793885360512, |
|
"learning_rate": 8.188136068420627e-06, |
|
"loss": 0.7743, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.5721884498480243, |
|
"grad_norm": 1.3045884813334427, |
|
"learning_rate": 8.163928453984298e-06, |
|
"loss": 0.6861, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.5729483282674772, |
|
"grad_norm": 1.1578334058918511, |
|
"learning_rate": 8.139731969358665e-06, |
|
"loss": 0.6578, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.5737082066869301, |
|
"grad_norm": 1.085006394436604, |
|
"learning_rate": 8.115546761216822e-06, |
|
"loss": 0.7372, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.574468085106383, |
|
"grad_norm": 1.2728905675998503, |
|
"learning_rate": 8.091372976163496e-06, |
|
"loss": 0.7905, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.5752279635258358, |
|
"grad_norm": 1.2577270128595894, |
|
"learning_rate": 8.067210760734199e-06, |
|
"loss": 0.6852, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.5759878419452887, |
|
"grad_norm": 1.091320885267308, |
|
"learning_rate": 8.043060261394283e-06, |
|
"loss": 0.6856, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.5767477203647416, |
|
"grad_norm": 1.0634220427111116, |
|
"learning_rate": 8.01892162453809e-06, |
|
"loss": 0.7451, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.5775075987841946, |
|
"grad_norm": 0.965968319145033, |
|
"learning_rate": 7.994794996488055e-06, |
|
"loss": 0.7295, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.5782674772036475, |
|
"grad_norm": 1.4557035005068617, |
|
"learning_rate": 7.970680523493817e-06, |
|
"loss": 0.8156, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.5790273556231003, |
|
"grad_norm": 1.228414568854264, |
|
"learning_rate": 7.94657835173134e-06, |
|
"loss": 0.7676, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.5797872340425532, |
|
"grad_norm": 1.0662627348682954, |
|
"learning_rate": 7.922488627302016e-06, |
|
"loss": 0.8173, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.5805471124620061, |
|
"grad_norm": 1.0943911331242442, |
|
"learning_rate": 7.898411496231781e-06, |
|
"loss": 0.6445, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.581306990881459, |
|
"grad_norm": 1.1845902784067897, |
|
"learning_rate": 7.874347104470234e-06, |
|
"loss": 0.735, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.5820668693009119, |
|
"grad_norm": 1.0037170474828196, |
|
"learning_rate": 7.85029559788976e-06, |
|
"loss": 0.7256, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.5828267477203647, |
|
"grad_norm": 1.1300711418023441, |
|
"learning_rate": 7.82625712228463e-06, |
|
"loss": 0.6534, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.5835866261398176, |
|
"grad_norm": 1.2021840303759719, |
|
"learning_rate": 7.802231823370126e-06, |
|
"loss": 0.7048, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.5843465045592705, |
|
"grad_norm": 1.2229196377630545, |
|
"learning_rate": 7.778219846781654e-06, |
|
"loss": 0.6571, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.5851063829787234, |
|
"grad_norm": 1.22428433043158, |
|
"learning_rate": 7.754221338073863e-06, |
|
"loss": 0.7309, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.5858662613981763, |
|
"grad_norm": 1.1636052982261818, |
|
"learning_rate": 7.730236442719774e-06, |
|
"loss": 0.8828, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.5866261398176292, |
|
"grad_norm": 1.0557514909654202, |
|
"learning_rate": 7.706265306109872e-06, |
|
"loss": 0.7323, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.587386018237082, |
|
"grad_norm": 1.128705574309213, |
|
"learning_rate": 7.682308073551252e-06, |
|
"loss": 0.801, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.5881458966565349, |
|
"grad_norm": 1.085263535196822, |
|
"learning_rate": 7.658364890266713e-06, |
|
"loss": 0.6932, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.5889057750759878, |
|
"grad_norm": 1.1571362771119302, |
|
"learning_rate": 7.634435901393899e-06, |
|
"loss": 0.6665, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.5896656534954408, |
|
"grad_norm": 1.1363472000581079, |
|
"learning_rate": 7.6105212519844195e-06, |
|
"loss": 0.8424, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.5904255319148937, |
|
"grad_norm": 1.1893971887933428, |
|
"learning_rate": 7.586621087002945e-06, |
|
"loss": 0.7843, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.5911854103343465, |
|
"grad_norm": 1.3545350834474912, |
|
"learning_rate": 7.5627355513263545e-06, |
|
"loss": 0.6648, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.5919452887537994, |
|
"grad_norm": 1.2821343466577149, |
|
"learning_rate": 7.5388647897428445e-06, |
|
"loss": 0.7347, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.5927051671732523, |
|
"grad_norm": 1.0665277209889865, |
|
"learning_rate": 7.51500894695106e-06, |
|
"loss": 0.7554, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5934650455927052, |
|
"grad_norm": 0.9871422560187552, |
|
"learning_rate": 7.491168167559208e-06, |
|
"loss": 0.8435, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.5942249240121581, |
|
"grad_norm": 1.2007422072233171, |
|
"learning_rate": 7.467342596084179e-06, |
|
"loss": 0.6699, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.5949848024316109, |
|
"grad_norm": 1.2017986976755273, |
|
"learning_rate": 7.443532376950688e-06, |
|
"loss": 0.6769, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.5957446808510638, |
|
"grad_norm": 1.2069515723864184, |
|
"learning_rate": 7.419737654490379e-06, |
|
"loss": 0.6098, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.5965045592705167, |
|
"grad_norm": 1.213967528264772, |
|
"learning_rate": 7.3959585729409664e-06, |
|
"loss": 0.7866, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5972644376899696, |
|
"grad_norm": 1.190147133494981, |
|
"learning_rate": 7.372195276445349e-06, |
|
"loss": 0.7867, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.5980243161094225, |
|
"grad_norm": 1.1198316602618827, |
|
"learning_rate": 7.34844790905074e-06, |
|
"loss": 0.6284, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.5987841945288754, |
|
"grad_norm": 1.054934350969657, |
|
"learning_rate": 7.324716614707794e-06, |
|
"loss": 0.7569, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.5995440729483282, |
|
"grad_norm": 1.184969480600631, |
|
"learning_rate": 7.301001537269736e-06, |
|
"loss": 0.6951, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.6003039513677811, |
|
"grad_norm": 1.207437517303436, |
|
"learning_rate": 7.277302820491492e-06, |
|
"loss": 0.6511, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.601063829787234, |
|
"grad_norm": 1.0028977572232165, |
|
"learning_rate": 7.253620608028811e-06, |
|
"loss": 0.6651, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.601823708206687, |
|
"grad_norm": 1.364380210991326, |
|
"learning_rate": 7.229955043437391e-06, |
|
"loss": 0.7429, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.6025835866261399, |
|
"grad_norm": 1.214893916268479, |
|
"learning_rate": 7.206306270172019e-06, |
|
"loss": 0.6863, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.6033434650455927, |
|
"grad_norm": 1.1620810745177512, |
|
"learning_rate": 7.182674431585703e-06, |
|
"loss": 0.7676, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.6041033434650456, |
|
"grad_norm": 1.3414636551310861, |
|
"learning_rate": 7.1590596709287905e-06, |
|
"loss": 0.8243, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.6048632218844985, |
|
"grad_norm": 1.0261374718405134, |
|
"learning_rate": 7.135462131348107e-06, |
|
"loss": 0.6541, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.6056231003039514, |
|
"grad_norm": 1.050953400295176, |
|
"learning_rate": 7.111881955886093e-06, |
|
"loss": 0.7374, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.6063829787234043, |
|
"grad_norm": 1.186612811549936, |
|
"learning_rate": 7.088319287479919e-06, |
|
"loss": 0.7259, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.6071428571428571, |
|
"grad_norm": 1.1878827605247304, |
|
"learning_rate": 7.064774268960654e-06, |
|
"loss": 0.6904, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.60790273556231, |
|
"grad_norm": 1.198071994450376, |
|
"learning_rate": 7.041247043052357e-06, |
|
"loss": 0.771, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6086626139817629, |
|
"grad_norm": 1.1138795709931302, |
|
"learning_rate": 7.017737752371243e-06, |
|
"loss": 0.7284, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.6094224924012158, |
|
"grad_norm": 1.1239188974925398, |
|
"learning_rate": 6.994246539424809e-06, |
|
"loss": 0.7718, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.6101823708206687, |
|
"grad_norm": 1.0269669817827287, |
|
"learning_rate": 6.970773546610958e-06, |
|
"loss": 0.6545, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.6109422492401215, |
|
"grad_norm": 0.9849457179037013, |
|
"learning_rate": 6.947318916217167e-06, |
|
"loss": 0.7391, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.6117021276595744, |
|
"grad_norm": 1.2560459615613944, |
|
"learning_rate": 6.923882790419585e-06, |
|
"loss": 0.7587, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.6124620060790273, |
|
"grad_norm": 1.1244463345154598, |
|
"learning_rate": 6.9004653112822e-06, |
|
"loss": 0.7012, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.6132218844984803, |
|
"grad_norm": 1.2910529956660979, |
|
"learning_rate": 6.87706662075597e-06, |
|
"loss": 0.7811, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.6139817629179332, |
|
"grad_norm": 1.1279784294118858, |
|
"learning_rate": 6.853686860677949e-06, |
|
"loss": 0.7147, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.6147416413373861, |
|
"grad_norm": 1.023180421150499, |
|
"learning_rate": 6.830326172770463e-06, |
|
"loss": 0.7056, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.6155015197568389, |
|
"grad_norm": 1.1226610159276051, |
|
"learning_rate": 6.806984698640202e-06, |
|
"loss": 0.8144, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.6162613981762918, |
|
"grad_norm": 1.2161411828987057, |
|
"learning_rate": 6.783662579777402e-06, |
|
"loss": 0.6917, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.6170212765957447, |
|
"grad_norm": 1.1613987005065696, |
|
"learning_rate": 6.760359957554964e-06, |
|
"loss": 0.6466, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.6177811550151976, |
|
"grad_norm": 1.018688143415983, |
|
"learning_rate": 6.737076973227614e-06, |
|
"loss": 0.7506, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.6185410334346505, |
|
"grad_norm": 1.1689520435806657, |
|
"learning_rate": 6.713813767931032e-06, |
|
"loss": 0.6895, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.6193009118541033, |
|
"grad_norm": 1.1856195924843178, |
|
"learning_rate": 6.690570482681003e-06, |
|
"loss": 0.6359, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.6200607902735562, |
|
"grad_norm": 1.0650650175975844, |
|
"learning_rate": 6.66734725837256e-06, |
|
"loss": 0.7376, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.6208206686930091, |
|
"grad_norm": 1.1105670335666409, |
|
"learning_rate": 6.6441442357791315e-06, |
|
"loss": 0.6092, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.621580547112462, |
|
"grad_norm": 1.125965907106106, |
|
"learning_rate": 6.620961555551697e-06, |
|
"loss": 0.6722, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.6223404255319149, |
|
"grad_norm": 1.0691928517490077, |
|
"learning_rate": 6.597799358217915e-06, |
|
"loss": 0.5996, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.6231003039513677, |
|
"grad_norm": 1.0869838294675218, |
|
"learning_rate": 6.574657784181287e-06, |
|
"loss": 0.666, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.6238601823708206, |
|
"grad_norm": 1.0560312851091436, |
|
"learning_rate": 6.551536973720298e-06, |
|
"loss": 0.7692, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.6246200607902735, |
|
"grad_norm": 1.1435912397842571, |
|
"learning_rate": 6.5284370669875706e-06, |
|
"loss": 0.6902, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.6253799392097265, |
|
"grad_norm": 1.1566242840694625, |
|
"learning_rate": 6.505358204009018e-06, |
|
"loss": 0.6189, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.6261398176291794, |
|
"grad_norm": 1.1794303233552845, |
|
"learning_rate": 6.482300524682985e-06, |
|
"loss": 0.7805, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.6268996960486323, |
|
"grad_norm": 1.10223527961512, |
|
"learning_rate": 6.459264168779416e-06, |
|
"loss": 0.7555, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.6276595744680851, |
|
"grad_norm": 1.2528347286286639, |
|
"learning_rate": 6.436249275938977e-06, |
|
"loss": 0.7455, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.628419452887538, |
|
"grad_norm": 1.2248288878026101, |
|
"learning_rate": 6.413255985672262e-06, |
|
"loss": 0.6467, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.6291793313069909, |
|
"grad_norm": 1.1104651445554183, |
|
"learning_rate": 6.390284437358889e-06, |
|
"loss": 0.7305, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.6299392097264438, |
|
"grad_norm": 1.1907494972095103, |
|
"learning_rate": 6.367334770246692e-06, |
|
"loss": 0.6441, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.6306990881458967, |
|
"grad_norm": 1.3876439867214416, |
|
"learning_rate": 6.344407123450867e-06, |
|
"loss": 0.7022, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.6314589665653495, |
|
"grad_norm": 1.287098647943779, |
|
"learning_rate": 6.321501635953124e-06, |
|
"loss": 0.7059, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.6322188449848024, |
|
"grad_norm": 1.1581410475012297, |
|
"learning_rate": 6.298618446600856e-06, |
|
"loss": 0.6567, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.6329787234042553, |
|
"grad_norm": 1.4502182146441993, |
|
"learning_rate": 6.275757694106286e-06, |
|
"loss": 0.7023, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.6337386018237082, |
|
"grad_norm": 1.1216577138105488, |
|
"learning_rate": 6.252919517045626e-06, |
|
"loss": 0.7353, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.6344984802431611, |
|
"grad_norm": 1.1944810866958329, |
|
"learning_rate": 6.230104053858248e-06, |
|
"loss": 0.7686, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.6352583586626139, |
|
"grad_norm": 1.1299914280579517, |
|
"learning_rate": 6.207311442845834e-06, |
|
"loss": 0.6831, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.6360182370820668, |
|
"grad_norm": 1.1382941614712434, |
|
"learning_rate": 6.1845418221715455e-06, |
|
"loss": 0.79, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.6367781155015197, |
|
"grad_norm": 1.195754553541583, |
|
"learning_rate": 6.161795329859184e-06, |
|
"loss": 0.6401, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.6375379939209727, |
|
"grad_norm": 1.0571798668620749, |
|
"learning_rate": 6.13907210379234e-06, |
|
"loss": 0.6749, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.6382978723404256, |
|
"grad_norm": 1.2248375773498381, |
|
"learning_rate": 6.116372281713581e-06, |
|
"loss": 0.7163, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.6390577507598785, |
|
"grad_norm": 1.185301403413712, |
|
"learning_rate": 6.093696001223609e-06, |
|
"loss": 0.76, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.6398176291793313, |
|
"grad_norm": 1.2269450132243764, |
|
"learning_rate": 6.071043399780412e-06, |
|
"loss": 0.6985, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.6405775075987842, |
|
"grad_norm": 1.1859661222475697, |
|
"learning_rate": 6.0484146146984475e-06, |
|
"loss": 0.7265, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.6413373860182371, |
|
"grad_norm": 0.9394291150111883, |
|
"learning_rate": 6.025809783147803e-06, |
|
"loss": 0.6968, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.64209726443769, |
|
"grad_norm": 1.292071531499517, |
|
"learning_rate": 6.00322904215336e-06, |
|
"loss": 0.7807, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 1.1305562407779628, |
|
"learning_rate": 5.980672528593981e-06, |
|
"loss": 0.6842, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.6436170212765957, |
|
"grad_norm": 1.3063299823402525, |
|
"learning_rate": 5.95814037920166e-06, |
|
"loss": 0.7281, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.6443768996960486, |
|
"grad_norm": 0.9750204069424666, |
|
"learning_rate": 5.935632730560702e-06, |
|
"loss": 0.7723, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.6451367781155015, |
|
"grad_norm": 1.0243836626345881, |
|
"learning_rate": 5.913149719106896e-06, |
|
"loss": 0.6473, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.6458966565349544, |
|
"grad_norm": 1.071468090018402, |
|
"learning_rate": 5.89069148112668e-06, |
|
"loss": 0.7328, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.6466565349544073, |
|
"grad_norm": 1.252162612887728, |
|
"learning_rate": 5.868258152756336e-06, |
|
"loss": 0.7987, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.6474164133738601, |
|
"grad_norm": 1.1027125679904999, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.6656, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.648176291793313, |
|
"grad_norm": 1.0378684860406113, |
|
"learning_rate": 5.823466768634538e-06, |
|
"loss": 0.7004, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.648936170212766, |
|
"grad_norm": 1.3431434778834976, |
|
"learning_rate": 5.801108984397355e-06, |
|
"loss": 0.6317, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.6496960486322189, |
|
"grad_norm": 0.9368845010269132, |
|
"learning_rate": 5.778776652796936e-06, |
|
"loss": 0.6568, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.6504559270516718, |
|
"grad_norm": 1.0269610836317398, |
|
"learning_rate": 5.7564699092063345e-06, |
|
"loss": 0.7142, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.6512158054711246, |
|
"grad_norm": 1.1480218899479488, |
|
"learning_rate": 5.734188888843512e-06, |
|
"loss": 0.6641, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.6519756838905775, |
|
"grad_norm": 1.0163519592455907, |
|
"learning_rate": 5.711933726770487e-06, |
|
"loss": 0.6621, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.6527355623100304, |
|
"grad_norm": 1.1105102755810814, |
|
"learning_rate": 5.689704557892528e-06, |
|
"loss": 0.7129, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.6534954407294833, |
|
"grad_norm": 1.0773301612733883, |
|
"learning_rate": 5.667501516957365e-06, |
|
"loss": 0.7075, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6542553191489362, |
|
"grad_norm": 1.021179632469384, |
|
"learning_rate": 5.645324738554321e-06, |
|
"loss": 0.7549, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.6550151975683891, |
|
"grad_norm": 1.1504842543031002, |
|
"learning_rate": 5.623174357113528e-06, |
|
"loss": 0.7804, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.6557750759878419, |
|
"grad_norm": 1.2178677190514033, |
|
"learning_rate": 5.601050506905114e-06, |
|
"loss": 0.784, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.6565349544072948, |
|
"grad_norm": 1.1197155608727563, |
|
"learning_rate": 5.578953322038372e-06, |
|
"loss": 0.7202, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.6572948328267477, |
|
"grad_norm": 1.2098866589506239, |
|
"learning_rate": 5.556882936460966e-06, |
|
"loss": 0.702, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.6580547112462006, |
|
"grad_norm": 1.1204183667271879, |
|
"learning_rate": 5.5348394839580986e-06, |
|
"loss": 0.7055, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.6588145896656535, |
|
"grad_norm": 1.093622128522684, |
|
"learning_rate": 5.51282309815172e-06, |
|
"loss": 0.6023, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.6595744680851063, |
|
"grad_norm": 1.1103643197759976, |
|
"learning_rate": 5.4908339124997005e-06, |
|
"loss": 0.7477, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.6603343465045592, |
|
"grad_norm": 1.1612183387805102, |
|
"learning_rate": 5.468872060295034e-06, |
|
"loss": 0.7221, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.6610942249240122, |
|
"grad_norm": 1.1105119069016545, |
|
"learning_rate": 5.446937674665034e-06, |
|
"loss": 0.6597, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.6618541033434651, |
|
"grad_norm": 1.1494042117037129, |
|
"learning_rate": 5.425030888570506e-06, |
|
"loss": 0.701, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.662613981762918, |
|
"grad_norm": 1.2061798873302412, |
|
"learning_rate": 5.403151834804951e-06, |
|
"loss": 0.6631, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.6633738601823708, |
|
"grad_norm": 1.1556068599008187, |
|
"learning_rate": 5.381300645993779e-06, |
|
"loss": 0.7402, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.6641337386018237, |
|
"grad_norm": 1.138979966124941, |
|
"learning_rate": 5.359477454593483e-06, |
|
"loss": 0.7994, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.6648936170212766, |
|
"grad_norm": 1.0738647881906687, |
|
"learning_rate": 5.337682392890832e-06, |
|
"loss": 0.6859, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.6656534954407295, |
|
"grad_norm": 1.2662111227678947, |
|
"learning_rate": 5.3159155930021e-06, |
|
"loss": 0.7938, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.6664133738601824, |
|
"grad_norm": 1.3243150908230195, |
|
"learning_rate": 5.294177186872227e-06, |
|
"loss": 0.6169, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.6671732522796353, |
|
"grad_norm": 1.2313652148182201, |
|
"learning_rate": 5.27246730627404e-06, |
|
"loss": 0.7532, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.6679331306990881, |
|
"grad_norm": 1.199184492052818, |
|
"learning_rate": 5.250786082807462e-06, |
|
"loss": 0.8094, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.668693009118541, |
|
"grad_norm": 1.1766620461831854, |
|
"learning_rate": 5.229133647898696e-06, |
|
"loss": 0.8151, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.6694528875379939, |
|
"grad_norm": 1.1689636430398356, |
|
"learning_rate": 5.207510132799436e-06, |
|
"loss": 0.7648, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.6702127659574468, |
|
"grad_norm": 1.0004296601669387, |
|
"learning_rate": 5.185915668586066e-06, |
|
"loss": 0.6519, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.6709726443768997, |
|
"grad_norm": 1.0056620349400942, |
|
"learning_rate": 5.164350386158881e-06, |
|
"loss": 0.6794, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.6717325227963525, |
|
"grad_norm": 1.1405291924443834, |
|
"learning_rate": 5.1428144162412815e-06, |
|
"loss": 0.7398, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.6724924012158054, |
|
"grad_norm": 1.076546875498532, |
|
"learning_rate": 5.121307889378975e-06, |
|
"loss": 0.7544, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.6732522796352584, |
|
"grad_norm": 1.1985162180916387, |
|
"learning_rate": 5.099830935939203e-06, |
|
"loss": 0.6901, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.6740121580547113, |
|
"grad_norm": 1.1479988991791108, |
|
"learning_rate": 5.078383686109927e-06, |
|
"loss": 0.6769, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.6747720364741642, |
|
"grad_norm": 1.0843032147912806, |
|
"learning_rate": 5.05696626989907e-06, |
|
"loss": 0.7671, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.675531914893617, |
|
"grad_norm": 1.2193163798667448, |
|
"learning_rate": 5.035578817133692e-06, |
|
"loss": 0.7064, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.6762917933130699, |
|
"grad_norm": 1.139022165568261, |
|
"learning_rate": 5.01422145745924e-06, |
|
"loss": 0.6694, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.6770516717325228, |
|
"grad_norm": 1.1386427257420573, |
|
"learning_rate": 4.992894320338727e-06, |
|
"loss": 0.6988, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.6778115501519757, |
|
"grad_norm": 1.3423131800256485, |
|
"learning_rate": 4.971597535051977e-06, |
|
"loss": 0.7786, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.6785714285714286, |
|
"grad_norm": 1.3569449163170135, |
|
"learning_rate": 4.9503312306948294e-06, |
|
"loss": 0.6839, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.6793313069908815, |
|
"grad_norm": 1.258419000446593, |
|
"learning_rate": 4.929095536178347e-06, |
|
"loss": 0.7035, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.6800911854103343, |
|
"grad_norm": 1.264671663052783, |
|
"learning_rate": 4.907890580228042e-06, |
|
"loss": 0.72, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.6808510638297872, |
|
"grad_norm": 0.9729021624772011, |
|
"learning_rate": 4.886716491383111e-06, |
|
"loss": 0.7169, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.6816109422492401, |
|
"grad_norm": 1.1728477700061473, |
|
"learning_rate": 4.865573397995626e-06, |
|
"loss": 0.7557, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.682370820668693, |
|
"grad_norm": 1.0926411727337517, |
|
"learning_rate": 4.844461428229782e-06, |
|
"loss": 0.6697, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.6831306990881459, |
|
"grad_norm": 1.071162133552548, |
|
"learning_rate": 4.823380710061112e-06, |
|
"loss": 0.7911, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.6838905775075987, |
|
"grad_norm": 1.0062565374222423, |
|
"learning_rate": 4.802331371275703e-06, |
|
"loss": 0.7092, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6846504559270516, |
|
"grad_norm": 1.1386791316794218, |
|
"learning_rate": 4.7813135394694235e-06, |
|
"loss": 0.6663, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.6854103343465046, |
|
"grad_norm": 0.9831080114325188, |
|
"learning_rate": 4.760327342047167e-06, |
|
"loss": 0.6426, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.6861702127659575, |
|
"grad_norm": 1.0602127458280242, |
|
"learning_rate": 4.739372906222066e-06, |
|
"loss": 0.6984, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.6869300911854104, |
|
"grad_norm": 1.0843705272627073, |
|
"learning_rate": 4.718450359014713e-06, |
|
"loss": 0.6831, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.6876899696048632, |
|
"grad_norm": 1.098679645048744, |
|
"learning_rate": 4.697559827252398e-06, |
|
"loss": 0.8123, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.6884498480243161, |
|
"grad_norm": 1.3142719602757702, |
|
"learning_rate": 4.676701437568354e-06, |
|
"loss": 0.7175, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.689209726443769, |
|
"grad_norm": 1.1643291238805322, |
|
"learning_rate": 4.655875316400974e-06, |
|
"loss": 0.6634, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.6899696048632219, |
|
"grad_norm": 1.1947809431912677, |
|
"learning_rate": 4.635081589993033e-06, |
|
"loss": 0.6458, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.6907294832826748, |
|
"grad_norm": 1.3230117296353991, |
|
"learning_rate": 4.614320384390959e-06, |
|
"loss": 0.7312, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.6914893617021277, |
|
"grad_norm": 1.1844063741159998, |
|
"learning_rate": 4.593591825444028e-06, |
|
"loss": 0.7545, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.6922492401215805, |
|
"grad_norm": 1.0646849778857395, |
|
"learning_rate": 4.572896038803622e-06, |
|
"loss": 0.6961, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.6930091185410334, |
|
"grad_norm": 1.0945617066849158, |
|
"learning_rate": 4.552233149922483e-06, |
|
"loss": 0.7279, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.6937689969604863, |
|
"grad_norm": 1.0442736101302776, |
|
"learning_rate": 4.531603284053919e-06, |
|
"loss": 0.6863, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.6945288753799392, |
|
"grad_norm": 1.264079733440778, |
|
"learning_rate": 4.51100656625106e-06, |
|
"loss": 0.7263, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.6952887537993921, |
|
"grad_norm": 1.1671458044603604, |
|
"learning_rate": 4.490443121366105e-06, |
|
"loss": 0.7512, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.6960486322188449, |
|
"grad_norm": 1.381619409291954, |
|
"learning_rate": 4.4699130740495675e-06, |
|
"loss": 0.7452, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.6968085106382979, |
|
"grad_norm": 1.1536720377711638, |
|
"learning_rate": 4.4494165487495035e-06, |
|
"loss": 0.6957, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.6975683890577508, |
|
"grad_norm": 1.0571111154420783, |
|
"learning_rate": 4.428953669710764e-06, |
|
"loss": 0.7022, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.6983282674772037, |
|
"grad_norm": 1.0389429368627123, |
|
"learning_rate": 4.40852456097426e-06, |
|
"loss": 0.707, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.6990881458966566, |
|
"grad_norm": 1.1875919850127112, |
|
"learning_rate": 4.388129346376177e-06, |
|
"loss": 0.7821, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6998480243161094, |
|
"grad_norm": 1.2473748616867024, |
|
"learning_rate": 4.367768149547256e-06, |
|
"loss": 0.8524, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.7006079027355623, |
|
"grad_norm": 1.0994514001752107, |
|
"learning_rate": 4.34744109391203e-06, |
|
"loss": 0.69, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.7013677811550152, |
|
"grad_norm": 1.1799993750972086, |
|
"learning_rate": 4.327148302688069e-06, |
|
"loss": 0.6326, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.7021276595744681, |
|
"grad_norm": 1.0037350990689164, |
|
"learning_rate": 4.306889898885241e-06, |
|
"loss": 0.6607, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.702887537993921, |
|
"grad_norm": 1.0331513526916414, |
|
"learning_rate": 4.286666005304971e-06, |
|
"loss": 0.6456, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.7036474164133738, |
|
"grad_norm": 1.2114272294038748, |
|
"learning_rate": 4.2664767445394965e-06, |
|
"loss": 0.7048, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.7044072948328267, |
|
"grad_norm": 0.948969197372682, |
|
"learning_rate": 4.2463222389711045e-06, |
|
"loss": 0.7238, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.7051671732522796, |
|
"grad_norm": 1.259145840103064, |
|
"learning_rate": 4.226202610771419e-06, |
|
"loss": 0.6822, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.7059270516717325, |
|
"grad_norm": 1.147743823697958, |
|
"learning_rate": 4.206117981900636e-06, |
|
"loss": 0.6834, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.7066869300911854, |
|
"grad_norm": 1.3613688973426528, |
|
"learning_rate": 4.186068474106803e-06, |
|
"loss": 0.6058, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7074468085106383, |
|
"grad_norm": 1.0641233144969966, |
|
"learning_rate": 4.16605420892506e-06, |
|
"loss": 0.6037, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.7082066869300911, |
|
"grad_norm": 1.4404076348677173, |
|
"learning_rate": 4.146075307676932e-06, |
|
"loss": 0.7231, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.708966565349544, |
|
"grad_norm": 1.1265784012494935, |
|
"learning_rate": 4.126131891469561e-06, |
|
"loss": 0.7518, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.709726443768997, |
|
"grad_norm": 1.1259639811618485, |
|
"learning_rate": 4.10622408119499e-06, |
|
"loss": 0.7479, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.7104863221884499, |
|
"grad_norm": 1.12906985920995, |
|
"learning_rate": 4.086351997529441e-06, |
|
"loss": 0.7117, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.7112462006079028, |
|
"grad_norm": 1.1141762682174512, |
|
"learning_rate": 4.0665157609325565e-06, |
|
"loss": 0.6223, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.7120060790273556, |
|
"grad_norm": 0.9843606839695441, |
|
"learning_rate": 4.0467154916466835e-06, |
|
"loss": 0.6804, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.7127659574468085, |
|
"grad_norm": 1.0791368453161987, |
|
"learning_rate": 4.026951309696152e-06, |
|
"loss": 0.8217, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.7135258358662614, |
|
"grad_norm": 1.17924828361289, |
|
"learning_rate": 4.007223334886531e-06, |
|
"loss": 0.6319, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 1.086662042991453, |
|
"learning_rate": 3.98753168680392e-06, |
|
"loss": 0.7768, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.7150455927051672, |
|
"grad_norm": 1.1637104562064116, |
|
"learning_rate": 3.967876484814202e-06, |
|
"loss": 0.699, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.71580547112462, |
|
"grad_norm": 1.1821074776029854, |
|
"learning_rate": 3.948257848062351e-06, |
|
"loss": 0.7243, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.7165653495440729, |
|
"grad_norm": 1.2758482975392278, |
|
"learning_rate": 3.92867589547167e-06, |
|
"loss": 0.6933, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.7173252279635258, |
|
"grad_norm": 1.136673931415126, |
|
"learning_rate": 3.909130745743108e-06, |
|
"loss": 0.6787, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.7180851063829787, |
|
"grad_norm": 1.2505316932380202, |
|
"learning_rate": 3.889622517354523e-06, |
|
"loss": 0.7131, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.7188449848024316, |
|
"grad_norm": 1.1532069822589315, |
|
"learning_rate": 3.870151328559956e-06, |
|
"loss": 0.7163, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.7196048632218845, |
|
"grad_norm": 1.3010144592978872, |
|
"learning_rate": 3.850717297388926e-06, |
|
"loss": 0.6895, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.7203647416413373, |
|
"grad_norm": 1.2213309503213339, |
|
"learning_rate": 3.831320541645717e-06, |
|
"loss": 0.7386, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.7211246200607903, |
|
"grad_norm": 1.1249828865944345, |
|
"learning_rate": 3.8119611789086576e-06, |
|
"loss": 0.7562, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.7218844984802432, |
|
"grad_norm": 1.112085150276745, |
|
"learning_rate": 3.7926393265294016e-06, |
|
"loss": 0.7472, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.7226443768996961, |
|
"grad_norm": 1.1691517022721343, |
|
"learning_rate": 3.773355101632236e-06, |
|
"loss": 0.6366, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.723404255319149, |
|
"grad_norm": 1.065081114749969, |
|
"learning_rate": 3.7541086211133504e-06, |
|
"loss": 0.7091, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.7241641337386018, |
|
"grad_norm": 1.3061023932315732, |
|
"learning_rate": 3.734900001640135e-06, |
|
"loss": 0.7162, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.7249240121580547, |
|
"grad_norm": 1.309023206323555, |
|
"learning_rate": 3.7157293596504863e-06, |
|
"loss": 0.6626, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.7256838905775076, |
|
"grad_norm": 1.1235930459888246, |
|
"learning_rate": 3.696596811352087e-06, |
|
"loss": 0.6648, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.7264437689969605, |
|
"grad_norm": 1.1237782636304048, |
|
"learning_rate": 3.677502472721699e-06, |
|
"loss": 0.6832, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.7272036474164134, |
|
"grad_norm": 1.0942128565760725, |
|
"learning_rate": 3.6584464595044713e-06, |
|
"loss": 0.7049, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.7279635258358662, |
|
"grad_norm": 1.1946352330560444, |
|
"learning_rate": 3.6394288872132335e-06, |
|
"loss": 0.6031, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.7287234042553191, |
|
"grad_norm": 0.9925979700840246, |
|
"learning_rate": 3.6204498711278014e-06, |
|
"loss": 0.7806, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.729483282674772, |
|
"grad_norm": 1.1783905654043534, |
|
"learning_rate": 3.6015095262942578e-06, |
|
"loss": 0.7694, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.7302431610942249, |
|
"grad_norm": 1.1285297640686531, |
|
"learning_rate": 3.5826079675242897e-06, |
|
"loss": 0.7035, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.7310030395136778, |
|
"grad_norm": 1.2766209090145464, |
|
"learning_rate": 3.563745309394452e-06, |
|
"loss": 0.8306, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.7317629179331308, |
|
"grad_norm": 1.2802069631147242, |
|
"learning_rate": 3.54492166624551e-06, |
|
"loss": 0.6821, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.7325227963525835, |
|
"grad_norm": 1.4528178238422362, |
|
"learning_rate": 3.5261371521817247e-06, |
|
"loss": 0.8057, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.7332826747720365, |
|
"grad_norm": 1.1360673012907418, |
|
"learning_rate": 3.507391881070161e-06, |
|
"loss": 0.6928, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.7340425531914894, |
|
"grad_norm": 1.0857177186934868, |
|
"learning_rate": 3.4886859665400075e-06, |
|
"loss": 0.6418, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.7348024316109423, |
|
"grad_norm": 1.2429148105905763, |
|
"learning_rate": 3.470019521981882e-06, |
|
"loss": 0.7205, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.7355623100303952, |
|
"grad_norm": 1.1381324753940012, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.6926, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.736322188449848, |
|
"grad_norm": 1.2286178986824534, |
|
"learning_rate": 3.4328054951472267e-06, |
|
"loss": 0.6778, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.7370820668693009, |
|
"grad_norm": 1.249604798766476, |
|
"learning_rate": 3.4142581384528984e-06, |
|
"loss": 0.6991, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.7378419452887538, |
|
"grad_norm": 1.2152282939220294, |
|
"learning_rate": 3.395750702893651e-06, |
|
"loss": 0.8301, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.7386018237082067, |
|
"grad_norm": 1.1292510512641436, |
|
"learning_rate": 3.377283300656967e-06, |
|
"loss": 0.6164, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.7393617021276596, |
|
"grad_norm": 1.129932752453749, |
|
"learning_rate": 3.358856043687666e-06, |
|
"loss": 0.667, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.7401215805471124, |
|
"grad_norm": 1.2040509606498078, |
|
"learning_rate": 3.340469043687213e-06, |
|
"loss": 0.7359, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.7408814589665653, |
|
"grad_norm": 1.0966554016954166, |
|
"learning_rate": 3.322122412113047e-06, |
|
"loss": 0.6991, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.7416413373860182, |
|
"grad_norm": 1.2752114699698873, |
|
"learning_rate": 3.3038162601778944e-06, |
|
"loss": 0.7226, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.7424012158054711, |
|
"grad_norm": 1.1973005258559652, |
|
"learning_rate": 3.285550698849117e-06, |
|
"loss": 0.7434, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.743161094224924, |
|
"grad_norm": 1.2115166712763157, |
|
"learning_rate": 3.2673258388480235e-06, |
|
"loss": 0.6354, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.743920972644377, |
|
"grad_norm": 1.2342273069639336, |
|
"learning_rate": 3.2491417906491916e-06, |
|
"loss": 0.6306, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.7446808510638298, |
|
"grad_norm": 1.2359619179075527, |
|
"learning_rate": 3.230998664479823e-06, |
|
"loss": 0.6276, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.7454407294832827, |
|
"grad_norm": 1.0415850804754119, |
|
"learning_rate": 3.212896570319045e-06, |
|
"loss": 0.6845, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.7462006079027356, |
|
"grad_norm": 1.1802739171012435, |
|
"learning_rate": 3.194835617897273e-06, |
|
"loss": 0.622, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.7469604863221885, |
|
"grad_norm": 1.2439907155583805, |
|
"learning_rate": 3.176815916695518e-06, |
|
"loss": 0.7096, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.7477203647416414, |
|
"grad_norm": 1.147914771751723, |
|
"learning_rate": 3.158837575944751e-06, |
|
"loss": 0.7258, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.7484802431610942, |
|
"grad_norm": 1.2978049727772043, |
|
"learning_rate": 3.1409007046252114e-06, |
|
"loss": 0.6771, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.7492401215805471, |
|
"grad_norm": 1.1499766562757003, |
|
"learning_rate": 3.123005411465766e-06, |
|
"loss": 0.701, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.0693963917103544, |
|
"learning_rate": 3.105151804943256e-06, |
|
"loss": 0.7043, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.7507598784194529, |
|
"grad_norm": 1.2305140175608322, |
|
"learning_rate": 3.087339993281816e-06, |
|
"loss": 0.6566, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.7515197568389058, |
|
"grad_norm": 1.003867377853084, |
|
"learning_rate": 3.06957008445223e-06, |
|
"loss": 0.6741, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.7522796352583586, |
|
"grad_norm": 1.055359001129926, |
|
"learning_rate": 3.0518421861712845e-06, |
|
"loss": 0.7424, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.7530395136778115, |
|
"grad_norm": 1.0489625087030288, |
|
"learning_rate": 3.0341564059011086e-06, |
|
"loss": 0.6396, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.7537993920972644, |
|
"grad_norm": 1.1893706178742733, |
|
"learning_rate": 3.0165128508485166e-06, |
|
"loss": 0.6847, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.7545592705167173, |
|
"grad_norm": 1.0135298428599318, |
|
"learning_rate": 2.9989116279643637e-06, |
|
"loss": 0.5938, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.7553191489361702, |
|
"grad_norm": 1.1610621297187727, |
|
"learning_rate": 2.9813528439429074e-06, |
|
"loss": 0.7356, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.756079027355623, |
|
"grad_norm": 1.306043230804643, |
|
"learning_rate": 2.9638366052211387e-06, |
|
"loss": 0.7352, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.756838905775076, |
|
"grad_norm": 1.3878230222334922, |
|
"learning_rate": 2.946363017978159e-06, |
|
"loss": 0.7463, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.7575987841945289, |
|
"grad_norm": 1.2866088794863302, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.7971, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.7583586626139818, |
|
"grad_norm": 1.2283459458688568, |
|
"learning_rate": 2.911544221351608e-06, |
|
"loss": 0.6631, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.7591185410334347, |
|
"grad_norm": 1.024583863335167, |
|
"learning_rate": 2.894199223030948e-06, |
|
"loss": 0.7676, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.7598784194528876, |
|
"grad_norm": 1.1118576213588252, |
|
"learning_rate": 2.87689729831363e-06, |
|
"loss": 0.6562, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7606382978723404, |
|
"grad_norm": 0.9923751398641592, |
|
"learning_rate": 2.8596385520796365e-06, |
|
"loss": 0.6727, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.7613981762917933, |
|
"grad_norm": 1.1727199316684949, |
|
"learning_rate": 2.8424230889472047e-06, |
|
"loss": 0.6297, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.7621580547112462, |
|
"grad_norm": 1.2294185210188011, |
|
"learning_rate": 2.825251013272212e-06, |
|
"loss": 0.8385, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.7629179331306991, |
|
"grad_norm": 1.2168106027942747, |
|
"learning_rate": 2.8081224291475216e-06, |
|
"loss": 0.762, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.763677811550152, |
|
"grad_norm": 1.2253817236474687, |
|
"learning_rate": 2.7910374404023634e-06, |
|
"loss": 0.6375, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.7644376899696048, |
|
"grad_norm": 1.0665312801720293, |
|
"learning_rate": 2.7739961506017075e-06, |
|
"loss": 0.7151, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.7651975683890577, |
|
"grad_norm": 1.2265668347679475, |
|
"learning_rate": 2.7569986630456334e-06, |
|
"loss": 0.6618, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.7659574468085106, |
|
"grad_norm": 0.9884700225329818, |
|
"learning_rate": 2.740045080768694e-06, |
|
"loss": 0.6926, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.7667173252279635, |
|
"grad_norm": 1.1992772744751734, |
|
"learning_rate": 2.7231355065392996e-06, |
|
"loss": 0.7414, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.7674772036474165, |
|
"grad_norm": 1.1104745481751945, |
|
"learning_rate": 2.706270042859108e-06, |
|
"loss": 0.7639, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.7682370820668692, |
|
"grad_norm": 0.9965536525277243, |
|
"learning_rate": 2.689448791962377e-06, |
|
"loss": 0.8284, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.7689969604863222, |
|
"grad_norm": 1.1678112956685116, |
|
"learning_rate": 2.672671855815355e-06, |
|
"loss": 0.7828, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.7697568389057751, |
|
"grad_norm": 1.3156802308059738, |
|
"learning_rate": 2.6559393361156804e-06, |
|
"loss": 0.6398, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.770516717325228, |
|
"grad_norm": 1.1401901016167089, |
|
"learning_rate": 2.6392513342917327e-06, |
|
"loss": 0.6525, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.7712765957446809, |
|
"grad_norm": 1.0560539135353917, |
|
"learning_rate": 2.6226079515020507e-06, |
|
"loss": 0.7281, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.7720364741641338, |
|
"grad_norm": 1.1909614312462784, |
|
"learning_rate": 2.606009288634689e-06, |
|
"loss": 0.6785, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.7727963525835866, |
|
"grad_norm": 1.0645811009555441, |
|
"learning_rate": 2.5894554463066356e-06, |
|
"loss": 0.8196, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.7735562310030395, |
|
"grad_norm": 1.0744237761868103, |
|
"learning_rate": 2.5729465248631733e-06, |
|
"loss": 0.7089, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.7743161094224924, |
|
"grad_norm": 1.1501549180149673, |
|
"learning_rate": 2.5564826243772965e-06, |
|
"loss": 0.7186, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.7750759878419453, |
|
"grad_norm": 1.090718993467275, |
|
"learning_rate": 2.540063844649092e-06, |
|
"loss": 0.6967, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.7758358662613982, |
|
"grad_norm": 1.262645687632852, |
|
"learning_rate": 2.5236902852051314e-06, |
|
"loss": 0.6468, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.776595744680851, |
|
"grad_norm": 0.9878664141286467, |
|
"learning_rate": 2.5073620452978708e-06, |
|
"loss": 0.7271, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.7773556231003039, |
|
"grad_norm": 1.2611651073752586, |
|
"learning_rate": 2.4910792239050575e-06, |
|
"loss": 0.6891, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.7781155015197568, |
|
"grad_norm": 1.3770427550654896, |
|
"learning_rate": 2.474841919729122e-06, |
|
"loss": 0.6701, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.7788753799392097, |
|
"grad_norm": 1.1740807071337804, |
|
"learning_rate": 2.458650231196572e-06, |
|
"loss": 0.6969, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.7796352583586627, |
|
"grad_norm": 1.15327703282196, |
|
"learning_rate": 2.4425042564574186e-06, |
|
"loss": 0.7951, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.7803951367781155, |
|
"grad_norm": 1.0976455733746449, |
|
"learning_rate": 2.4264040933845535e-06, |
|
"loss": 0.6239, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.7811550151975684, |
|
"grad_norm": 1.1362177795153996, |
|
"learning_rate": 2.410349839573175e-06, |
|
"loss": 0.71, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.7819148936170213, |
|
"grad_norm": 1.2026870438152502, |
|
"learning_rate": 2.3943415923401923e-06, |
|
"loss": 0.6649, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.7826747720364742, |
|
"grad_norm": 1.2052658837448995, |
|
"learning_rate": 2.3783794487236367e-06, |
|
"loss": 0.7296, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.7834346504559271, |
|
"grad_norm": 1.1281897537962795, |
|
"learning_rate": 2.3624635054820633e-06, |
|
"loss": 0.7188, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.78419452887538, |
|
"grad_norm": 1.0897801958842848, |
|
"learning_rate": 2.346593859093974e-06, |
|
"loss": 0.7076, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.7849544072948328, |
|
"grad_norm": 1.0640259314164149, |
|
"learning_rate": 2.3307706057572354e-06, |
|
"loss": 0.7441, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.7857142857142857, |
|
"grad_norm": 1.0344057270478118, |
|
"learning_rate": 2.3149938413884886e-06, |
|
"loss": 0.71, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.7864741641337386, |
|
"grad_norm": 1.258022110174666, |
|
"learning_rate": 2.299263661622566e-06, |
|
"loss": 0.7375, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.7872340425531915, |
|
"grad_norm": 1.1067641335949387, |
|
"learning_rate": 2.2835801618119247e-06, |
|
"loss": 0.7371, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.7879939209726444, |
|
"grad_norm": 1.2002173951393722, |
|
"learning_rate": 2.2679434370260457e-06, |
|
"loss": 0.6505, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.7887537993920972, |
|
"grad_norm": 1.133912103250985, |
|
"learning_rate": 2.2523535820508844e-06, |
|
"loss": 0.7709, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.7895136778115501, |
|
"grad_norm": 1.1460622281855655, |
|
"learning_rate": 2.2368106913882814e-06, |
|
"loss": 0.718, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.790273556231003, |
|
"grad_norm": 1.3231479511702298, |
|
"learning_rate": 2.2213148592553847e-06, |
|
"loss": 0.6763, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.791033434650456, |
|
"grad_norm": 1.0916282763634857, |
|
"learning_rate": 2.205866179584084e-06, |
|
"loss": 0.6576, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.7917933130699089, |
|
"grad_norm": 1.1640565182207296, |
|
"learning_rate": 2.190464746020452e-06, |
|
"loss": 0.7421, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.7925531914893617, |
|
"grad_norm": 1.0117814584644247, |
|
"learning_rate": 2.175110651924165e-06, |
|
"loss": 0.6422, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.7933130699088146, |
|
"grad_norm": 1.0485542457299897, |
|
"learning_rate": 2.159803990367931e-06, |
|
"loss": 0.7094, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.7940729483282675, |
|
"grad_norm": 1.2116167735512808, |
|
"learning_rate": 2.1445448541369396e-06, |
|
"loss": 0.7358, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.7948328267477204, |
|
"grad_norm": 1.1462035059040563, |
|
"learning_rate": 2.1293333357282954e-06, |
|
"loss": 0.7469, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.7955927051671733, |
|
"grad_norm": 1.1941863771000016, |
|
"learning_rate": 2.1141695273504503e-06, |
|
"loss": 0.7137, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.7963525835866262, |
|
"grad_norm": 1.312889254090232, |
|
"learning_rate": 2.0990535209226547e-06, |
|
"loss": 0.8082, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.797112462006079, |
|
"grad_norm": 1.072468781454342, |
|
"learning_rate": 2.083985408074396e-06, |
|
"loss": 0.7231, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.7978723404255319, |
|
"grad_norm": 1.1860345229851978, |
|
"learning_rate": 2.068965280144837e-06, |
|
"loss": 0.672, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7986322188449848, |
|
"grad_norm": 1.2823965807740199, |
|
"learning_rate": 2.0539932281822685e-06, |
|
"loss": 0.7493, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.7993920972644377, |
|
"grad_norm": 1.1486470335368848, |
|
"learning_rate": 2.0390693429435626e-06, |
|
"loss": 0.6758, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.8001519756838906, |
|
"grad_norm": 1.2163530985688338, |
|
"learning_rate": 2.024193714893614e-06, |
|
"loss": 0.7542, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.8009118541033434, |
|
"grad_norm": 1.1418608494404552, |
|
"learning_rate": 2.0093664342047903e-06, |
|
"loss": 0.7042, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.8016717325227963, |
|
"grad_norm": 1.091740118029224, |
|
"learning_rate": 1.994587590756397e-06, |
|
"loss": 0.6547, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.8024316109422492, |
|
"grad_norm": 1.0548786543265325, |
|
"learning_rate": 1.979857274134115e-06, |
|
"loss": 0.6756, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.8031914893617021, |
|
"grad_norm": 1.1320023087914366, |
|
"learning_rate": 1.9651755736294785e-06, |
|
"loss": 0.6682, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.8039513677811551, |
|
"grad_norm": 1.2189572447533126, |
|
"learning_rate": 1.9505425782393117e-06, |
|
"loss": 0.7586, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.8047112462006079, |
|
"grad_norm": 1.0943551201130357, |
|
"learning_rate": 1.9359583766652135e-06, |
|
"loss": 0.714, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.8054711246200608, |
|
"grad_norm": 0.9997249186870598, |
|
"learning_rate": 1.9214230573129944e-06, |
|
"loss": 0.7033, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8062310030395137, |
|
"grad_norm": 1.0062498233336101, |
|
"learning_rate": 1.9069367082921542e-06, |
|
"loss": 0.6967, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.8069908814589666, |
|
"grad_norm": 1.2074679469839953, |
|
"learning_rate": 1.892499417415362e-06, |
|
"loss": 0.6098, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.8077507598784195, |
|
"grad_norm": 0.974094819682152, |
|
"learning_rate": 1.87811127219789e-06, |
|
"loss": 0.7002, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.8085106382978723, |
|
"grad_norm": 0.999777172254122, |
|
"learning_rate": 1.8637723598571078e-06, |
|
"loss": 0.6874, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.8092705167173252, |
|
"grad_norm": 1.3233453645588935, |
|
"learning_rate": 1.849482767311953e-06, |
|
"loss": 0.733, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.8100303951367781, |
|
"grad_norm": 1.093917921272683, |
|
"learning_rate": 1.8352425811823893e-06, |
|
"loss": 0.5386, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.810790273556231, |
|
"grad_norm": 1.203921911962039, |
|
"learning_rate": 1.8210518877889016e-06, |
|
"loss": 0.6852, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.8115501519756839, |
|
"grad_norm": 1.2387311631723403, |
|
"learning_rate": 1.8069107731519507e-06, |
|
"loss": 0.7721, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.8123100303951368, |
|
"grad_norm": 1.199552627375313, |
|
"learning_rate": 1.7928193229914747e-06, |
|
"loss": 0.6982, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.8130699088145896, |
|
"grad_norm": 0.9963886859919631, |
|
"learning_rate": 1.7787776227263464e-06, |
|
"loss": 0.5456, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.8138297872340425, |
|
"grad_norm": 1.3545208480064566, |
|
"learning_rate": 1.7647857574738759e-06, |
|
"loss": 0.7682, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.8145896656534954, |
|
"grad_norm": 1.2208367567982428, |
|
"learning_rate": 1.7508438120492864e-06, |
|
"loss": 0.5537, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.8153495440729484, |
|
"grad_norm": 1.1165843778790494, |
|
"learning_rate": 1.7369518709651923e-06, |
|
"loss": 0.7316, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.8161094224924013, |
|
"grad_norm": 1.1409271275287363, |
|
"learning_rate": 1.7231100184310955e-06, |
|
"loss": 0.6392, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.8168693009118541, |
|
"grad_norm": 1.0884083854734414, |
|
"learning_rate": 1.7093183383528776e-06, |
|
"loss": 0.6117, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.817629179331307, |
|
"grad_norm": 1.068969102945993, |
|
"learning_rate": 1.6955769143322898e-06, |
|
"loss": 0.4683, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.8183890577507599, |
|
"grad_norm": 0.9831886853581654, |
|
"learning_rate": 1.681885829666432e-06, |
|
"loss": 0.7013, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.8191489361702128, |
|
"grad_norm": 1.051184380106491, |
|
"learning_rate": 1.6682451673472734e-06, |
|
"loss": 0.6305, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.8199088145896657, |
|
"grad_norm": 1.1223495353706434, |
|
"learning_rate": 1.6546550100611237e-06, |
|
"loss": 0.7346, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.8206686930091185, |
|
"grad_norm": 1.1441585867010564, |
|
"learning_rate": 1.641115440188148e-06, |
|
"loss": 0.6982, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.8214285714285714, |
|
"grad_norm": 1.2192432945723835, |
|
"learning_rate": 1.6276265398018642e-06, |
|
"loss": 0.6753, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.8221884498480243, |
|
"grad_norm": 1.415282448423663, |
|
"learning_rate": 1.6141883906686484e-06, |
|
"loss": 0.7307, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.8229483282674772, |
|
"grad_norm": 1.2236317595223443, |
|
"learning_rate": 1.6008010742472257e-06, |
|
"loss": 0.7669, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.8237082066869301, |
|
"grad_norm": 1.0133830637305623, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.7271, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.824468085106383, |
|
"grad_norm": 1.1343068460941579, |
|
"learning_rate": 1.5741792638335096e-06, |
|
"loss": 0.6954, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.8252279635258358, |
|
"grad_norm": 1.073228514372066, |
|
"learning_rate": 1.5609449312160364e-06, |
|
"loss": 0.584, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.8259878419452887, |
|
"grad_norm": 1.2155988715843964, |
|
"learning_rate": 1.5477617540590128e-06, |
|
"loss": 0.6985, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.8267477203647416, |
|
"grad_norm": 1.0626067139609812, |
|
"learning_rate": 1.5346298122755932e-06, |
|
"loss": 0.6585, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.8275075987841946, |
|
"grad_norm": 1.147707100709591, |
|
"learning_rate": 1.5215491854683529e-06, |
|
"loss": 0.7551, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.8282674772036475, |
|
"grad_norm": 1.3468869928677656, |
|
"learning_rate": 1.5085199529288097e-06, |
|
"loss": 0.6943, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.8290273556231003, |
|
"grad_norm": 1.2096274012114665, |
|
"learning_rate": 1.4955421936369452e-06, |
|
"loss": 0.6694, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.8297872340425532, |
|
"grad_norm": 1.2410948107008724, |
|
"learning_rate": 1.4826159862607182e-06, |
|
"loss": 0.6617, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.8305471124620061, |
|
"grad_norm": 1.155591538053921, |
|
"learning_rate": 1.4697414091555918e-06, |
|
"loss": 0.7733, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.831306990881459, |
|
"grad_norm": 1.0068055977995527, |
|
"learning_rate": 1.456918540364065e-06, |
|
"loss": 0.7311, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.8320668693009119, |
|
"grad_norm": 1.3257082207534072, |
|
"learning_rate": 1.4441474576151915e-06, |
|
"loss": 0.652, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.8328267477203647, |
|
"grad_norm": 1.2197567822706545, |
|
"learning_rate": 1.4314282383241097e-06, |
|
"loss": 0.6927, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.8335866261398176, |
|
"grad_norm": 1.2881915459202287, |
|
"learning_rate": 1.4187609595915697e-06, |
|
"loss": 0.7052, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.8343465045592705, |
|
"grad_norm": 1.2023867797487267, |
|
"learning_rate": 1.4061456982034816e-06, |
|
"loss": 0.7201, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.8351063829787234, |
|
"grad_norm": 1.1691805510845608, |
|
"learning_rate": 1.3935825306304329e-06, |
|
"loss": 0.7011, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.8358662613981763, |
|
"grad_norm": 1.5529787151876815, |
|
"learning_rate": 1.3810715330272285e-06, |
|
"loss": 0.7339, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.8366261398176292, |
|
"grad_norm": 1.1484356790580228, |
|
"learning_rate": 1.36861278123244e-06, |
|
"loss": 0.6866, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.837386018237082, |
|
"grad_norm": 1.0559503515868425, |
|
"learning_rate": 1.3562063507679323e-06, |
|
"loss": 0.5985, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.8381458966565349, |
|
"grad_norm": 1.2989479834431623, |
|
"learning_rate": 1.3438523168384076e-06, |
|
"loss": 0.6846, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.8389057750759878, |
|
"grad_norm": 1.0806475644725628, |
|
"learning_rate": 1.3315507543309624e-06, |
|
"loss": 0.7625, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.8396656534954408, |
|
"grad_norm": 1.140920582486702, |
|
"learning_rate": 1.319301737814621e-06, |
|
"loss": 0.6834, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.8404255319148937, |
|
"grad_norm": 1.111450261346159, |
|
"learning_rate": 1.3071053415398816e-06, |
|
"loss": 0.7714, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.8411854103343465, |
|
"grad_norm": 1.075448474979407, |
|
"learning_rate": 1.2949616394382802e-06, |
|
"loss": 0.7337, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.8419452887537994, |
|
"grad_norm": 0.9601202189186884, |
|
"learning_rate": 1.2828707051219257e-06, |
|
"loss": 0.6875, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.8427051671732523, |
|
"grad_norm": 1.1368061357838368, |
|
"learning_rate": 1.2708326118830706e-06, |
|
"loss": 0.7314, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.8434650455927052, |
|
"grad_norm": 1.1618574536059776, |
|
"learning_rate": 1.2588474326936461e-06, |
|
"loss": 0.7322, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.8442249240121581, |
|
"grad_norm": 1.3830992981104948, |
|
"learning_rate": 1.2469152402048446e-06, |
|
"loss": 0.7353, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.8449848024316109, |
|
"grad_norm": 1.1182925134402968, |
|
"learning_rate": 1.2350361067466554e-06, |
|
"loss": 0.7403, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.8457446808510638, |
|
"grad_norm": 1.2115068644376168, |
|
"learning_rate": 1.2232101043274437e-06, |
|
"loss": 0.7798, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.8465045592705167, |
|
"grad_norm": 1.1090546276827984, |
|
"learning_rate": 1.2114373046335059e-06, |
|
"loss": 0.7117, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.8472644376899696, |
|
"grad_norm": 4.166507610410275, |
|
"learning_rate": 1.1997177790286362e-06, |
|
"loss": 0.6269, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.8480243161094225, |
|
"grad_norm": 1.0538132793554953, |
|
"learning_rate": 1.1880515985536911e-06, |
|
"loss": 0.7255, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.8487841945288754, |
|
"grad_norm": 1.0944056765505172, |
|
"learning_rate": 1.176438833926169e-06, |
|
"loss": 0.7143, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.8495440729483282, |
|
"grad_norm": 1.1702415211461719, |
|
"learning_rate": 1.1648795555397719e-06, |
|
"loss": 0.5761, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.8503039513677811, |
|
"grad_norm": 1.035813223646502, |
|
"learning_rate": 1.1533738334639787e-06, |
|
"loss": 0.667, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 1.1226982400342818, |
|
"learning_rate": 1.1419217374436231e-06, |
|
"loss": 0.5816, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.851823708206687, |
|
"grad_norm": 1.156939352942342, |
|
"learning_rate": 1.1305233368984792e-06, |
|
"loss": 0.6661, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.8525835866261399, |
|
"grad_norm": 1.0516669260442288, |
|
"learning_rate": 1.1191787009228194e-06, |
|
"loss": 0.7294, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.8533434650455927, |
|
"grad_norm": 1.2466026215142973, |
|
"learning_rate": 1.1078878982850193e-06, |
|
"loss": 0.6684, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.8541033434650456, |
|
"grad_norm": 1.1064008263905736, |
|
"learning_rate": 1.0966509974271289e-06, |
|
"loss": 0.7697, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.8548632218844985, |
|
"grad_norm": 1.2501255707066732, |
|
"learning_rate": 1.0854680664644534e-06, |
|
"loss": 0.7212, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.8556231003039514, |
|
"grad_norm": 1.236860045561205, |
|
"learning_rate": 1.0743391731851471e-06, |
|
"loss": 0.6379, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.8563829787234043, |
|
"grad_norm": 1.0468169815623685, |
|
"learning_rate": 1.0632643850498048e-06, |
|
"loss": 0.7328, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 1.347195466333952, |
|
"learning_rate": 1.0522437691910493e-06, |
|
"loss": 0.7241, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.85790273556231, |
|
"grad_norm": 1.238148071226312, |
|
"learning_rate": 1.0412773924131202e-06, |
|
"loss": 0.7605, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.8586626139817629, |
|
"grad_norm": 1.2506111985823192, |
|
"learning_rate": 1.0303653211914788e-06, |
|
"loss": 0.6754, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.8594224924012158, |
|
"grad_norm": 1.362081669421355, |
|
"learning_rate": 1.0195076216723931e-06, |
|
"loss": 0.7927, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.8601823708206687, |
|
"grad_norm": 1.0632919738921134, |
|
"learning_rate": 1.008704359672551e-06, |
|
"loss": 0.6896, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.8609422492401215, |
|
"grad_norm": 1.2383897549954153, |
|
"learning_rate": 9.97955600678644e-07, |
|
"loss": 0.6869, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.8617021276595744, |
|
"grad_norm": 1.1142541478646706, |
|
"learning_rate": 9.872614098469912e-07, |
|
"loss": 0.7687, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.8624620060790273, |
|
"grad_norm": 1.19587245527059, |
|
"learning_rate": 9.766218520031234e-07, |
|
"loss": 0.7303, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.8632218844984803, |
|
"grad_norm": 1.005657664076255, |
|
"learning_rate": 9.660369916414015e-07, |
|
"loss": 0.6556, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.8639817629179332, |
|
"grad_norm": 1.2812819362765668, |
|
"learning_rate": 9.555068929246324e-07, |
|
"loss": 0.6985, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.8647416413373861, |
|
"grad_norm": 1.2186831420915536, |
|
"learning_rate": 9.450316196836618e-07, |
|
"loss": 0.7435, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.8655015197568389, |
|
"grad_norm": 1.1290186973602714, |
|
"learning_rate": 9.346112354169978e-07, |
|
"loss": 0.7659, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.8662613981762918, |
|
"grad_norm": 1.0923307542579395, |
|
"learning_rate": 9.242458032904311e-07, |
|
"loss": 0.7297, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.8670212765957447, |
|
"grad_norm": 1.0764295889612814, |
|
"learning_rate": 9.139353861366385e-07, |
|
"loss": 0.612, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.8677811550151976, |
|
"grad_norm": 1.1162467272674055, |
|
"learning_rate": 9.036800464548157e-07, |
|
"loss": 0.715, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.8685410334346505, |
|
"grad_norm": 0.9378882497520132, |
|
"learning_rate": 8.934798464102923e-07, |
|
"loss": 0.7498, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.8693009118541033, |
|
"grad_norm": 1.3018566485684548, |
|
"learning_rate": 8.833348478341519e-07, |
|
"loss": 0.6843, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.8700607902735562, |
|
"grad_norm": 1.2038932549579577, |
|
"learning_rate": 8.732451122228592e-07, |
|
"loss": 0.7395, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.8708206686930091, |
|
"grad_norm": 1.1821132797317102, |
|
"learning_rate": 8.632107007378932e-07, |
|
"loss": 0.6861, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.871580547112462, |
|
"grad_norm": 1.103795186039256, |
|
"learning_rate": 8.532316742053715e-07, |
|
"loss": 0.7239, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.8723404255319149, |
|
"grad_norm": 1.0363457541271124, |
|
"learning_rate": 8.433080931156767e-07, |
|
"loss": 0.7178, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.8731003039513677, |
|
"grad_norm": 1.2387877877681717, |
|
"learning_rate": 8.334400176230983e-07, |
|
"loss": 0.6529, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.8738601823708206, |
|
"grad_norm": 1.2619716618586272, |
|
"learning_rate": 8.236275075454647e-07, |
|
"loss": 0.6424, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.8746200607902735, |
|
"grad_norm": 1.2394430031206916, |
|
"learning_rate": 8.138706223637827e-07, |
|
"loss": 0.6702, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.8753799392097265, |
|
"grad_norm": 1.3113427456180624, |
|
"learning_rate": 8.041694212218698e-07, |
|
"loss": 0.7173, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.8761398176291794, |
|
"grad_norm": 1.3913267096519508, |
|
"learning_rate": 7.945239629260037e-07, |
|
"loss": 0.752, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.8768996960486323, |
|
"grad_norm": 1.0704987317338162, |
|
"learning_rate": 7.849343059445635e-07, |
|
"loss": 0.718, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.8776595744680851, |
|
"grad_norm": 1.2345197139195363, |
|
"learning_rate": 7.75400508407671e-07, |
|
"loss": 0.7142, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.878419452887538, |
|
"grad_norm": 1.0980818944427368, |
|
"learning_rate": 7.659226281068444e-07, |
|
"loss": 0.6966, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.8791793313069909, |
|
"grad_norm": 1.0955001015674064, |
|
"learning_rate": 7.565007224946486e-07, |
|
"loss": 0.725, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.8799392097264438, |
|
"grad_norm": 1.1977248745894193, |
|
"learning_rate": 7.471348486843355e-07, |
|
"loss": 0.774, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.8806990881458967, |
|
"grad_norm": 1.3009467416912521, |
|
"learning_rate": 7.378250634495144e-07, |
|
"loss": 0.764, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.8814589665653495, |
|
"grad_norm": 1.0304684855411061, |
|
"learning_rate": 7.285714232237972e-07, |
|
"loss": 0.6621, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.8822188449848024, |
|
"grad_norm": 1.293036714289914, |
|
"learning_rate": 7.193739841004565e-07, |
|
"loss": 0.6839, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.8829787234042553, |
|
"grad_norm": 1.2337567888538918, |
|
"learning_rate": 7.102328018320859e-07, |
|
"loss": 0.7641, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.8837386018237082, |
|
"grad_norm": 0.9603759876515855, |
|
"learning_rate": 7.011479318302716e-07, |
|
"loss": 0.7511, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.8844984802431611, |
|
"grad_norm": 1.100495149114542, |
|
"learning_rate": 6.9211942916524e-07, |
|
"loss": 0.5739, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.8852583586626139, |
|
"grad_norm": 1.2567058277652308, |
|
"learning_rate": 6.831473485655393e-07, |
|
"loss": 0.7024, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.8860182370820668, |
|
"grad_norm": 1.1536219678957829, |
|
"learning_rate": 6.742317444176994e-07, |
|
"loss": 0.7553, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.8867781155015197, |
|
"grad_norm": 1.1568842131425099, |
|
"learning_rate": 6.653726707659014e-07, |
|
"loss": 0.642, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.8875379939209727, |
|
"grad_norm": 1.1748261916895442, |
|
"learning_rate": 6.565701813116543e-07, |
|
"loss": 0.736, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.8882978723404256, |
|
"grad_norm": 1.2272240589598433, |
|
"learning_rate": 6.478243294134678e-07, |
|
"loss": 0.6785, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.8890577507598785, |
|
"grad_norm": 1.0793925928940429, |
|
"learning_rate": 6.39135168086531e-07, |
|
"loss": 0.6815, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.8898176291793313, |
|
"grad_norm": 1.0113854296561349, |
|
"learning_rate": 6.305027500023841e-07, |
|
"loss": 0.6401, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.8905775075987842, |
|
"grad_norm": 1.2571607435026524, |
|
"learning_rate": 6.219271274886052e-07, |
|
"loss": 0.4813, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.8913373860182371, |
|
"grad_norm": 0.9180442946861914, |
|
"learning_rate": 6.13408352528495e-07, |
|
"loss": 0.7629, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.89209726443769, |
|
"grad_norm": 1.120989173886215, |
|
"learning_rate": 6.04946476760756e-07, |
|
"loss": 0.5761, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.8928571428571429, |
|
"grad_norm": 1.119368512790432, |
|
"learning_rate": 5.965415514791817e-07, |
|
"loss": 0.7439, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.8936170212765957, |
|
"grad_norm": 1.143580106138461, |
|
"learning_rate": 5.881936276323463e-07, |
|
"loss": 0.7, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.8943768996960486, |
|
"grad_norm": 0.9725000133492355, |
|
"learning_rate": 5.79902755823295e-07, |
|
"loss": 0.7308, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.8951367781155015, |
|
"grad_norm": 0.994127857335495, |
|
"learning_rate": 5.716689863092362e-07, |
|
"loss": 0.6033, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.8958966565349544, |
|
"grad_norm": 1.1075290736095693, |
|
"learning_rate": 5.634923690012451e-07, |
|
"loss": 0.7075, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.8966565349544073, |
|
"grad_norm": 1.167844867686388, |
|
"learning_rate": 5.553729534639463e-07, |
|
"loss": 0.5536, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.8974164133738601, |
|
"grad_norm": 1.0591506870039167, |
|
"learning_rate": 5.473107889152241e-07, |
|
"loss": 0.6447, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.898176291793313, |
|
"grad_norm": 1.0350933535569133, |
|
"learning_rate": 5.393059242259236e-07, |
|
"loss": 0.7002, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.898936170212766, |
|
"grad_norm": 1.2822200143678832, |
|
"learning_rate": 5.313584079195488e-07, |
|
"loss": 0.674, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.8996960486322189, |
|
"grad_norm": 1.0687168416496815, |
|
"learning_rate": 5.234682881719766e-07, |
|
"loss": 0.7696, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.9004559270516718, |
|
"grad_norm": 1.1775314415301947, |
|
"learning_rate": 5.156356128111551e-07, |
|
"loss": 0.7153, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.9012158054711246, |
|
"grad_norm": 1.0806460690940876, |
|
"learning_rate": 5.078604293168232e-07, |
|
"loss": 0.6388, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.9019756838905775, |
|
"grad_norm": 1.3658980832932874, |
|
"learning_rate": 5.001427848202145e-07, |
|
"loss": 0.6274, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.9027355623100304, |
|
"grad_norm": 1.0971465178481652, |
|
"learning_rate": 4.924827261037779e-07, |
|
"loss": 0.6832, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.9034954407294833, |
|
"grad_norm": 1.2530282400707315, |
|
"learning_rate": 4.848802996008928e-07, |
|
"loss": 0.606, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.9042553191489362, |
|
"grad_norm": 1.2286583520290673, |
|
"learning_rate": 4.773355513955847e-07, |
|
"loss": 0.721, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9050151975683891, |
|
"grad_norm": 1.2106924050350691, |
|
"learning_rate": 4.6984852722224307e-07, |
|
"loss": 0.7536, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.9057750759878419, |
|
"grad_norm": 0.9008661784706631, |
|
"learning_rate": 4.6241927246535645e-07, |
|
"loss": 0.5507, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.9065349544072948, |
|
"grad_norm": 1.0875351613738515, |
|
"learning_rate": 4.5504783215922775e-07, |
|
"loss": 0.6835, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.9072948328267477, |
|
"grad_norm": 1.122110061636014, |
|
"learning_rate": 4.4773425098769697e-07, |
|
"loss": 0.7824, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.9080547112462006, |
|
"grad_norm": 1.0468917022544524, |
|
"learning_rate": 4.4047857328388457e-07, |
|
"loss": 0.7099, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.9088145896656535, |
|
"grad_norm": 1.1327395463199081, |
|
"learning_rate": 4.332808430299085e-07, |
|
"loss": 0.704, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.9095744680851063, |
|
"grad_norm": 1.200395501431566, |
|
"learning_rate": 4.2614110385662544e-07, |
|
"loss": 0.6801, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.9103343465045592, |
|
"grad_norm": 1.2966065561282096, |
|
"learning_rate": 4.190593990433656e-07, |
|
"loss": 0.6921, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.9110942249240122, |
|
"grad_norm": 1.072234796169011, |
|
"learning_rate": 4.1203577151767037e-07, |
|
"loss": 0.7167, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.9118541033434651, |
|
"grad_norm": 1.2277287300371735, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.6231, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.912613981762918, |
|
"grad_norm": 1.2406804352583274, |
|
"learning_rate": 3.981629182786162e-07, |
|
"loss": 0.6251, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.9133738601823708, |
|
"grad_norm": 1.2565989611946244, |
|
"learning_rate": 3.913137766590569e-07, |
|
"loss": 0.6691, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.9141337386018237, |
|
"grad_norm": 1.0894521219118882, |
|
"learning_rate": 3.8452288051414765e-07, |
|
"loss": 0.6174, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.9148936170212766, |
|
"grad_norm": 1.0896043395915658, |
|
"learning_rate": 3.7779027100861785e-07, |
|
"loss": 0.7955, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.9156534954407295, |
|
"grad_norm": 1.1387911305845246, |
|
"learning_rate": 3.711159889538774e-07, |
|
"loss": 0.7165, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.9164133738601824, |
|
"grad_norm": 1.0581039732048783, |
|
"learning_rate": 3.645000748077709e-07, |
|
"loss": 0.6385, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.9171732522796353, |
|
"grad_norm": 1.142293054018674, |
|
"learning_rate": 3.5794256867432985e-07, |
|
"loss": 0.7349, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.9179331306990881, |
|
"grad_norm": 1.1115528973809246, |
|
"learning_rate": 3.5144351030353077e-07, |
|
"loss": 0.6063, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.918693009118541, |
|
"grad_norm": 1.2711522307666283, |
|
"learning_rate": 3.45002939091057e-07, |
|
"loss": 0.6267, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.9194528875379939, |
|
"grad_norm": 1.130354538612267, |
|
"learning_rate": 3.386208940780522e-07, |
|
"loss": 0.6871, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.9202127659574468, |
|
"grad_norm": 1.2610028948951497, |
|
"learning_rate": 3.3229741395089276e-07, |
|
"loss": 0.6161, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.9209726443768997, |
|
"grad_norm": 1.0519435539156983, |
|
"learning_rate": 3.260325370409501e-07, |
|
"loss": 0.7396, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.9217325227963525, |
|
"grad_norm": 1.173704186994253, |
|
"learning_rate": 3.19826301324353e-07, |
|
"loss": 0.667, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.9224924012158054, |
|
"grad_norm": 1.189649352239027, |
|
"learning_rate": 3.1367874442176485e-07, |
|
"loss": 0.7598, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.9232522796352584, |
|
"grad_norm": 1.0286871900514776, |
|
"learning_rate": 3.075899035981533e-07, |
|
"loss": 0.7767, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.9240121580547113, |
|
"grad_norm": 1.0572010661601794, |
|
"learning_rate": 3.0155981576255986e-07, |
|
"loss": 0.7177, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.9247720364741642, |
|
"grad_norm": 1.044342277658108, |
|
"learning_rate": 2.955885174678852e-07, |
|
"loss": 0.6784, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.925531914893617, |
|
"grad_norm": 1.0189565217507277, |
|
"learning_rate": 2.896760449106606e-07, |
|
"loss": 0.7161, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.9262917933130699, |
|
"grad_norm": 1.2660088084368877, |
|
"learning_rate": 2.83822433930826e-07, |
|
"loss": 0.7671, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.9270516717325228, |
|
"grad_norm": 1.2991870091587046, |
|
"learning_rate": 2.7802772001152224e-07, |
|
"loss": 0.6416, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.9278115501519757, |
|
"grad_norm": 1.1181116151401733, |
|
"learning_rate": 2.7229193827886913e-07, |
|
"loss": 0.7236, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.9285714285714286, |
|
"grad_norm": 1.0163129451962587, |
|
"learning_rate": 2.6661512350175556e-07, |
|
"loss": 0.7145, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.9293313069908815, |
|
"grad_norm": 1.1673052192523565, |
|
"learning_rate": 2.609973100916241e-07, |
|
"loss": 0.6913, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.9300911854103343, |
|
"grad_norm": 1.023824191239343, |
|
"learning_rate": 2.5543853210226566e-07, |
|
"loss": 0.689, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.9308510638297872, |
|
"grad_norm": 1.0056260338184262, |
|
"learning_rate": 2.499388232296174e-07, |
|
"loss": 0.692, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.9316109422492401, |
|
"grad_norm": 1.112601374178031, |
|
"learning_rate": 2.4449821681155284e-07, |
|
"loss": 0.7191, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.932370820668693, |
|
"grad_norm": 1.0663092173586473, |
|
"learning_rate": 2.3911674582767553e-07, |
|
"loss": 0.7152, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.9331306990881459, |
|
"grad_norm": 1.1835757827302227, |
|
"learning_rate": 2.3379444289913344e-07, |
|
"loss": 0.727, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.9338905775075987, |
|
"grad_norm": 1.3007405802649337, |
|
"learning_rate": 2.2853134028840594e-07, |
|
"loss": 0.6252, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.9346504559270516, |
|
"grad_norm": 1.1787216115316808, |
|
"learning_rate": 2.233274698991139e-07, |
|
"loss": 0.6884, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.9354103343465046, |
|
"grad_norm": 1.1226604599470809, |
|
"learning_rate": 2.1818286327583537e-07, |
|
"loss": 0.72, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.9361702127659575, |
|
"grad_norm": 1.1849786593331166, |
|
"learning_rate": 2.13097551603898e-07, |
|
"loss": 0.7118, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.9369300911854104, |
|
"grad_norm": 1.123307297377377, |
|
"learning_rate": 2.0807156570920027e-07, |
|
"loss": 0.689, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.9376899696048632, |
|
"grad_norm": 1.173356309084475, |
|
"learning_rate": 2.0310493605802395e-07, |
|
"loss": 0.6502, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.9384498480243161, |
|
"grad_norm": 1.1913393551696687, |
|
"learning_rate": 1.9819769275684742e-07, |
|
"loss": 0.6642, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.939209726443769, |
|
"grad_norm": 1.0249961412609725, |
|
"learning_rate": 1.9334986555216374e-07, |
|
"loss": 0.7158, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.9399696048632219, |
|
"grad_norm": 1.2126445450577445, |
|
"learning_rate": 1.8856148383029848e-07, |
|
"loss": 0.6607, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.9407294832826748, |
|
"grad_norm": 1.4093796517345238, |
|
"learning_rate": 1.8383257661723664e-07, |
|
"loss": 0.6734, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.9414893617021277, |
|
"grad_norm": 1.2786023133275601, |
|
"learning_rate": 1.791631725784404e-07, |
|
"loss": 0.6996, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.9422492401215805, |
|
"grad_norm": 1.0960561462544673, |
|
"learning_rate": 1.7455330001868054e-07, |
|
"loss": 0.7487, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.9430091185410334, |
|
"grad_norm": 1.1055539525407532, |
|
"learning_rate": 1.7000298688186312e-07, |
|
"loss": 0.6456, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.9437689969604863, |
|
"grad_norm": 0.9960735919562198, |
|
"learning_rate": 1.6551226075085746e-07, |
|
"loss": 0.7192, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.9445288753799392, |
|
"grad_norm": 1.275738801543123, |
|
"learning_rate": 1.6108114884733184e-07, |
|
"loss": 0.8265, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.9452887537993921, |
|
"grad_norm": 1.1633162842566498, |
|
"learning_rate": 1.567096780315891e-07, |
|
"loss": 0.7043, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.9460486322188449, |
|
"grad_norm": 1.2665944074906847, |
|
"learning_rate": 1.5239787480240353e-07, |
|
"loss": 0.6394, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.9468085106382979, |
|
"grad_norm": 1.1431525657028725, |
|
"learning_rate": 1.4814576529685543e-07, |
|
"loss": 0.6584, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.9475683890577508, |
|
"grad_norm": 1.0445102532211836, |
|
"learning_rate": 1.4395337529018116e-07, |
|
"loss": 0.627, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.9483282674772037, |
|
"grad_norm": 1.1371799365903608, |
|
"learning_rate": 1.3982073019560782e-07, |
|
"loss": 0.6978, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.9490881458966566, |
|
"grad_norm": 1.0721131322888444, |
|
"learning_rate": 1.3574785506420773e-07, |
|
"loss": 0.7239, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.9498480243161094, |
|
"grad_norm": 1.177780740815343, |
|
"learning_rate": 1.317347745847386e-07, |
|
"loss": 0.7071, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.9506079027355623, |
|
"grad_norm": 0.9764352141315131, |
|
"learning_rate": 1.277815130835014e-07, |
|
"loss": 0.5987, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.9513677811550152, |
|
"grad_norm": 1.0597707069561533, |
|
"learning_rate": 1.2388809452418716e-07, |
|
"loss": 0.6819, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.9521276595744681, |
|
"grad_norm": 1.0357219353126603, |
|
"learning_rate": 1.2005454250773262e-07, |
|
"loss": 0.7386, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.952887537993921, |
|
"grad_norm": 1.3688991177073506, |
|
"learning_rate": 1.1628088027218265e-07, |
|
"loss": 0.8196, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.9536474164133738, |
|
"grad_norm": 1.0794978094116288, |
|
"learning_rate": 1.1256713069254244e-07, |
|
"loss": 0.6784, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.9544072948328267, |
|
"grad_norm": 1.2978388589542718, |
|
"learning_rate": 1.0891331628063884e-07, |
|
"loss": 0.7526, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.9551671732522796, |
|
"grad_norm": 1.0991604780745179, |
|
"learning_rate": 1.0531945918499265e-07, |
|
"loss": 0.6504, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.9559270516717325, |
|
"grad_norm": 1.1316949589748224, |
|
"learning_rate": 1.0178558119067316e-07, |
|
"loss": 0.6864, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.9566869300911854, |
|
"grad_norm": 1.0535009629368148, |
|
"learning_rate": 9.831170371917276e-08, |
|
"loss": 0.7574, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.9574468085106383, |
|
"grad_norm": 1.0988680557329953, |
|
"learning_rate": 9.489784782827582e-08, |
|
"loss": 0.6674, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.9582066869300911, |
|
"grad_norm": 1.2012576178042316, |
|
"learning_rate": 9.154403421193226e-08, |
|
"loss": 0.6475, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.958966565349544, |
|
"grad_norm": 1.1647637296827538, |
|
"learning_rate": 8.825028320012752e-08, |
|
"loss": 0.6579, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.959726443768997, |
|
"grad_norm": 1.1590888732361975, |
|
"learning_rate": 8.5016614758765e-08, |
|
"loss": 0.7138, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.9604863221884499, |
|
"grad_norm": 1.2104828609323357, |
|
"learning_rate": 8.18430484895405e-08, |
|
"loss": 0.7079, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.9612462006079028, |
|
"grad_norm": 1.0676473427692756, |
|
"learning_rate": 7.872960362982684e-08, |
|
"loss": 0.6328, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.9620060790273556, |
|
"grad_norm": 1.1050638827297063, |
|
"learning_rate": 7.567629905255503e-08, |
|
"loss": 0.5908, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.9627659574468085, |
|
"grad_norm": 1.1570194134365799, |
|
"learning_rate": 7.268315326609988e-08, |
|
"loss": 0.7251, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.9635258358662614, |
|
"grad_norm": 1.055972426540126, |
|
"learning_rate": 6.975018441417126e-08, |
|
"loss": 0.6951, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.9642857142857143, |
|
"grad_norm": 1.1340255700749204, |
|
"learning_rate": 6.68774102756975e-08, |
|
"loss": 0.729, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.9650455927051672, |
|
"grad_norm": 1.1116704872217635, |
|
"learning_rate": 6.406484826472547e-08, |
|
"loss": 0.7146, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.96580547112462, |
|
"grad_norm": 1.2238885966555615, |
|
"learning_rate": 6.131251543030848e-08, |
|
"loss": 0.6778, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.9665653495440729, |
|
"grad_norm": 1.1140657613414842, |
|
"learning_rate": 5.862042845640403e-08, |
|
"loss": 0.6196, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.9673252279635258, |
|
"grad_norm": 1.4034971558332028, |
|
"learning_rate": 5.59886036617785e-08, |
|
"loss": 0.6726, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.9680851063829787, |
|
"grad_norm": 1.247850365035091, |
|
"learning_rate": 5.3417056999901515e-08, |
|
"loss": 0.8024, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.9688449848024316, |
|
"grad_norm": 1.1403707142366584, |
|
"learning_rate": 5.0905804058850595e-08, |
|
"loss": 0.6981, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.9696048632218845, |
|
"grad_norm": 1.1512503282266044, |
|
"learning_rate": 4.845486006121891e-08, |
|
"loss": 0.7136, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.9703647416413373, |
|
"grad_norm": 1.1205146371244328, |
|
"learning_rate": 4.6064239864020975e-08, |
|
"loss": 0.6572, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.9711246200607903, |
|
"grad_norm": 1.0640740176630763, |
|
"learning_rate": 4.3733957958607134e-08, |
|
"loss": 0.6775, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.9718844984802432, |
|
"grad_norm": 1.21170834732034, |
|
"learning_rate": 4.146402847056474e-08, |
|
"loss": 0.6142, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.9726443768996961, |
|
"grad_norm": 1.2060891666752993, |
|
"learning_rate": 3.925446515964604e-08, |
|
"loss": 0.6963, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.973404255319149, |
|
"grad_norm": 1.1413982945678454, |
|
"learning_rate": 3.7105281419675954e-08, |
|
"loss": 0.701, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.9741641337386018, |
|
"grad_norm": 1.1353912953364804, |
|
"learning_rate": 3.5016490278473316e-08, |
|
"loss": 0.733, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.9749240121580547, |
|
"grad_norm": 0.9731905027567818, |
|
"learning_rate": 3.2988104397773115e-08, |
|
"loss": 0.6818, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.9756838905775076, |
|
"grad_norm": 1.1467760181112903, |
|
"learning_rate": 3.1020136073146575e-08, |
|
"loss": 0.755, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.9764437689969605, |
|
"grad_norm": 1.2500496670477363, |
|
"learning_rate": 2.9112597233931228e-08, |
|
"loss": 0.7724, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.9772036474164134, |
|
"grad_norm": 1.0884238091336034, |
|
"learning_rate": 2.7265499443154264e-08, |
|
"loss": 0.7486, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.9779635258358662, |
|
"grad_norm": 1.0088684484023063, |
|
"learning_rate": 2.547885389746485e-08, |
|
"loss": 0.7156, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.9787234042553191, |
|
"grad_norm": 1.0334713239157454, |
|
"learning_rate": 2.3752671427065276e-08, |
|
"loss": 0.6725, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.979483282674772, |
|
"grad_norm": 1.1735061403799454, |
|
"learning_rate": 2.208696249564657e-08, |
|
"loss": 0.6288, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.9802431610942249, |
|
"grad_norm": 1.0449694502580413, |
|
"learning_rate": 2.048173720032298e-08, |
|
"loss": 0.7597, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.9810030395136778, |
|
"grad_norm": 1.1234942762561577, |
|
"learning_rate": 1.8937005271572052e-08, |
|
"loss": 0.5683, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.9817629179331308, |
|
"grad_norm": 1.4071207404835981, |
|
"learning_rate": 1.7452776073175748e-08, |
|
"loss": 0.7275, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.9825227963525835, |
|
"grad_norm": 1.4128938156204476, |
|
"learning_rate": 1.602905860216497e-08, |
|
"loss": 0.8493, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.9832826747720365, |
|
"grad_norm": 1.1248277575727312, |
|
"learning_rate": 1.4665861488761813e-08, |
|
"loss": 0.7518, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.9840425531914894, |
|
"grad_norm": 1.2244461973302783, |
|
"learning_rate": 1.3363192996328488e-08, |
|
"loss": 0.7051, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.9848024316109423, |
|
"grad_norm": 1.1039705050782245, |
|
"learning_rate": 1.2121061021318492e-08, |
|
"loss": 0.7085, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.9855623100303952, |
|
"grad_norm": 1.2017909438185823, |
|
"learning_rate": 1.0939473093229958e-08, |
|
"loss": 0.7568, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.986322188449848, |
|
"grad_norm": 1.1888312357208546, |
|
"learning_rate": 9.818436374553487e-09, |
|
"loss": 0.6483, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.9870820668693009, |
|
"grad_norm": 1.1075281495725198, |
|
"learning_rate": 8.757957660737726e-09, |
|
"loss": 0.6389, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.9878419452887538, |
|
"grad_norm": 1.2252737686145327, |
|
"learning_rate": 7.758043380140523e-09, |
|
"loss": 0.6934, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.9886018237082067, |
|
"grad_norm": 1.3500685962348613, |
|
"learning_rate": 6.818699593996725e-09, |
|
"loss": 0.743, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.9893617021276596, |
|
"grad_norm": 1.1391941942219126, |
|
"learning_rate": 5.939931996372661e-09, |
|
"loss": 0.726, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.9901215805471124, |
|
"grad_norm": 1.322232366894908, |
|
"learning_rate": 5.1217459141406074e-09, |
|
"loss": 0.6969, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.9908814589665653, |
|
"grad_norm": 1.0905255226681096, |
|
"learning_rate": 4.364146306943262e-09, |
|
"loss": 0.7189, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.9916413373860182, |
|
"grad_norm": 1.0414862403250007, |
|
"learning_rate": 3.6671377671604337e-09, |
|
"loss": 0.69, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.9924012158054711, |
|
"grad_norm": 1.431716637357648, |
|
"learning_rate": 3.0307245198857303e-09, |
|
"loss": 0.746, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.993161094224924, |
|
"grad_norm": 0.9315482774719349, |
|
"learning_rate": 2.454910422897694e-09, |
|
"loss": 0.6958, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.993920972644377, |
|
"grad_norm": 0.9621526906912882, |
|
"learning_rate": 1.9396989666398137e-09, |
|
"loss": 0.568, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.9946808510638298, |
|
"grad_norm": 1.1947376206574818, |
|
"learning_rate": 1.485093274194993e-09, |
|
"loss": 0.6485, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.9954407294832827, |
|
"grad_norm": 1.193575775298368, |
|
"learning_rate": 1.0910961012711163e-09, |
|
"loss": 0.7499, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.9962006079027356, |
|
"grad_norm": 1.1202479836190646, |
|
"learning_rate": 7.577098361810641e-10, |
|
"loss": 0.6712, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.9969604863221885, |
|
"grad_norm": 1.150541600033343, |
|
"learning_rate": 4.849364998305017e-10, |
|
"loss": 0.6162, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.9977203647416414, |
|
"grad_norm": 1.1841504466478256, |
|
"learning_rate": 2.7277774570233507e-10, |
|
"loss": 0.6731, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.9984802431610942, |
|
"grad_norm": 1.1634988910801727, |
|
"learning_rate": 1.2123485985227058e-10, |
|
"loss": 0.6473, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.9992401215805471, |
|
"grad_norm": 1.264183108117609, |
|
"learning_rate": 3.030876089438195e-11, |
|
"loss": 0.5803, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.114121273386629, |
|
"learning_rate": 0.0, |
|
"loss": 0.6488, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1316, |
|
"total_flos": 141830150160384.0, |
|
"train_loss": 0.7534279622205485, |
|
"train_runtime": 5282.496, |
|
"train_samples_per_second": 15.938, |
|
"train_steps_per_second": 0.249 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1316, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 395, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 141830150160384.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|