|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 8.0, |
|
"global_step": 616, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 12.2148, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 12.0312, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 12.3086, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 12.1172, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 12.0117, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 12.2656, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 12.125, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 11.2266, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 11.1523, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 9.5234, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1578947368421053e-05, |
|
"loss": 9.4688, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 9.25, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.3684210526315791e-05, |
|
"loss": 7.7285, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.4736842105263159e-05, |
|
"loss": 7.6367, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 7.4844, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 7.2422, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.7894736842105264e-05, |
|
"loss": 7.0938, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 6.7266, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2e-05, |
|
"loss": 6.5234, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9999861541352416e-05, |
|
"loss": 6.3477, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9999446169243816e-05, |
|
"loss": 6.127, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9998753895176576e-05, |
|
"loss": 5.8555, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.999778473832096e-05, |
|
"loss": 5.7402, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9996538725514597e-05, |
|
"loss": 5.5605, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.999501589126174e-05, |
|
"loss": 5.4199, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9993216277732302e-05, |
|
"loss": 5.3242, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.999113993476069e-05, |
|
"loss": 5.2148, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9988786919844437e-05, |
|
"loss": 5.1016, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9986157298142595e-05, |
|
"loss": 5.0488, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9983251142473935e-05, |
|
"loss": 4.9258, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9980068533314937e-05, |
|
"loss": 4.9531, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9976609558797545e-05, |
|
"loss": 4.8535, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9972874314706755e-05, |
|
"loss": 4.8203, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9968862904477936e-05, |
|
"loss": 4.8535, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9964575439193966e-05, |
|
"loss": 4.7168, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.996001203758218e-05, |
|
"loss": 4.6875, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.995517282601106e-05, |
|
"loss": 4.6172, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9950057938486745e-05, |
|
"loss": 4.6523, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.994466751664932e-05, |
|
"loss": 4.5195, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.993900170976888e-05, |
|
"loss": 4.5117, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9933060674741422e-05, |
|
"loss": 4.4141, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9926844576084483e-05, |
|
"loss": 4.3398, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.992035358593258e-05, |
|
"loss": 4.3232, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.991358788403246e-05, |
|
"loss": 4.2305, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.990654765773811e-05, |
|
"loss": 4.1641, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9899233102005573e-05, |
|
"loss": 4.0674, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9891644419387545e-05, |
|
"loss": 3.915, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9883781820027777e-05, |
|
"loss": 3.7822, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.987564552165524e-05, |
|
"loss": 3.709, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9867235749578108e-05, |
|
"loss": 3.4131, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9858552736677516e-05, |
|
"loss": 3.1318, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.984959672340111e-05, |
|
"loss": 2.834, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.984036795775638e-05, |
|
"loss": 2.5654, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.9830866695303817e-05, |
|
"loss": 2.417, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.9821093199149806e-05, |
|
"loss": 2.1909, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.981104773993936e-05, |
|
"loss": 2.2568, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.980073059584862e-05, |
|
"loss": 2.2744, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.9790142052577148e-05, |
|
"loss": 2.0771, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.977928240334002e-05, |
|
"loss": 2.1729, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.9768151948859705e-05, |
|
"loss": 2.123, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.9756750997357738e-05, |
|
"loss": 2.0356, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9745079864546184e-05, |
|
"loss": 2.0142, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.97331388736189e-05, |
|
"loss": 2.061, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.972092835524257e-05, |
|
"loss": 2.0508, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.9708448647547575e-05, |
|
"loss": 2.0171, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9695700096118594e-05, |
|
"loss": 2.1284, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9682683053985073e-05, |
|
"loss": 2.0166, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.966939788161142e-05, |
|
"loss": 2.062, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.9655844946887035e-05, |
|
"loss": 2.0142, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.9642024625116117e-05, |
|
"loss": 2.0103, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.9627937299007286e-05, |
|
"loss": 1.9956, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.961358335866296e-05, |
|
"loss": 1.9868, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.959896320156857e-05, |
|
"loss": 2.0435, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.958407723258156e-05, |
|
"loss": 2.0112, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9568925863920155e-05, |
|
"loss": 2.0908, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.955350951515195e-05, |
|
"loss": 1.9795, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.9537828613182314e-05, |
|
"loss": 2.0112, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.9521883592242537e-05, |
|
"loss": 2.0459, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.950567489387783e-05, |
|
"loss": 2.0117, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.9489202966935084e-05, |
|
"loss": 2.0156, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.947246826755044e-05, |
|
"loss": 2.0547, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.945547125913667e-05, |
|
"loss": 1.9639, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.943821241237034e-05, |
|
"loss": 2.019, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.9420692205178753e-05, |
|
"loss": 1.9771, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.9402911122726756e-05, |
|
"loss": 1.9492, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.9384869657403277e-05, |
|
"loss": 1.9702, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.9366568308807685e-05, |
|
"loss": 1.9946, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.9348007583735985e-05, |
|
"loss": 1.9854, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.9329187996166747e-05, |
|
"loss": 1.959, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.9310110067246905e-05, |
|
"loss": 1.9722, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.9290774325277305e-05, |
|
"loss": 2.0376, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.9271181305698084e-05, |
|
"loss": 1.9834, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.9251331551073843e-05, |
|
"loss": 2.0049, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.923122561107861e-05, |
|
"loss": 1.9824, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.9210864042480645e-05, |
|
"loss": 1.9624, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.9190247409126993e-05, |
|
"loss": 1.9395, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.916937628192789e-05, |
|
"loss": 1.9746, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.9148251238840947e-05, |
|
"loss": 1.9507, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.9126872864855142e-05, |
|
"loss": 2.0054, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.9105241751974624e-05, |
|
"loss": 1.9409, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.9083358499202323e-05, |
|
"loss": 1.9912, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.9061223712523352e-05, |
|
"loss": 1.9404, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.903883800488824e-05, |
|
"loss": 1.9102, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.9016201996195943e-05, |
|
"loss": 1.9248, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.8993316313276694e-05, |
|
"loss": 1.8984, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.8970181589874637e-05, |
|
"loss": 1.9331, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.894679846663027e-05, |
|
"loss": 1.9561, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.8923167591062723e-05, |
|
"loss": 1.8901, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.8899289617551803e-05, |
|
"loss": 1.9922, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.8875165207319902e-05, |
|
"loss": 1.9277, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.8850795028413658e-05, |
|
"loss": 1.9185, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.882617975568547e-05, |
|
"loss": 1.9575, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.880132007077482e-05, |
|
"loss": 1.957, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.8776216662089373e-05, |
|
"loss": 1.8984, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.875087022478594e-05, |
|
"loss": 1.9429, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.8725281460751198e-05, |
|
"loss": 1.8701, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.869945107858228e-05, |
|
"loss": 1.9497, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.867337979356715e-05, |
|
"loss": 1.8921, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.8647068327664774e-05, |
|
"loss": 1.8569, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.8620517409485148e-05, |
|
"loss": 1.8882, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.8593727774269122e-05, |
|
"loss": 1.8765, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.8566700163868027e-05, |
|
"loss": 1.9282, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8539435326723135e-05, |
|
"loss": 1.8384, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.851193401784495e-05, |
|
"loss": 1.9185, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.848419699879227e-05, |
|
"loss": 1.834, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.845622503765113e-05, |
|
"loss": 1.8657, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.842801890901351e-05, |
|
"loss": 1.8457, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.8399579393955893e-05, |
|
"loss": 1.7671, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.837090728001764e-05, |
|
"loss": 1.8462, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.834200336117918e-05, |
|
"loss": 1.8296, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.8312868437840002e-05, |
|
"loss": 1.8262, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.8283503316796536e-05, |
|
"loss": 1.835, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.8253908811219764e-05, |
|
"loss": 1.8979, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.822408574063273e-05, |
|
"loss": 1.8496, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8194034930887842e-05, |
|
"loss": 1.8252, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8163757214143993e-05, |
|
"loss": 1.7812, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.8133253428843524e-05, |
|
"loss": 1.8364, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.810252441968901e-05, |
|
"loss": 1.8013, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.8071571037619856e-05, |
|
"loss": 1.8203, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.804039413978875e-05, |
|
"loss": 1.7729, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.8008994589537913e-05, |
|
"loss": 1.8491, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.7977373256375194e-05, |
|
"loss": 1.7998, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.7945531015950008e-05, |
|
"loss": 1.8364, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.791346875002905e-05, |
|
"loss": 1.8125, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.7881187346471924e-05, |
|
"loss": 1.832, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.784868769920653e-05, |
|
"loss": 1.8271, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.7815970708204296e-05, |
|
"loss": 1.7959, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.77830372794553e-05, |
|
"loss": 1.7798, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.774988832494314e-05, |
|
"loss": 1.7651, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7716524762619695e-05, |
|
"loss": 1.8076, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7682947516379706e-05, |
|
"loss": 1.8379, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7649157516035205e-05, |
|
"loss": 1.8228, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.7615155697289734e-05, |
|
"loss": 1.7783, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.7580943001712457e-05, |
|
"loss": 1.8188, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.7546520376712093e-05, |
|
"loss": 1.7974, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.7511888775510662e-05, |
|
"loss": 1.7964, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.7477049157117093e-05, |
|
"loss": 1.7515, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.744200248630068e-05, |
|
"loss": 1.7725, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.7406749733564344e-05, |
|
"loss": 1.7534, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.737129187511779e-05, |
|
"loss": 1.8408, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.7335629892850436e-05, |
|
"loss": 1.7686, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.729976477430425e-05, |
|
"loss": 1.7642, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.7263697512646397e-05, |
|
"loss": 1.8047, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.7227429106641726e-05, |
|
"loss": 1.8301, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.7190960560625127e-05, |
|
"loss": 1.7588, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.7154292884473712e-05, |
|
"loss": 1.7749, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.711742709357886e-05, |
|
"loss": 1.7251, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.708036420881807e-05, |
|
"loss": 1.7603, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.7043105256526723e-05, |
|
"loss": 1.7339, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.7005651268469652e-05, |
|
"loss": 1.731, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.6968003281812563e-05, |
|
"loss": 1.7598, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.693016233909332e-05, |
|
"loss": 1.7007, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.689212948819307e-05, |
|
"loss": 1.7183, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.6853905782307235e-05, |
|
"loss": 1.7173, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.681549227991634e-05, |
|
"loss": 1.7856, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.67768900447567e-05, |
|
"loss": 1.7329, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.6738100145790977e-05, |
|
"loss": 1.7578, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.6699123657178553e-05, |
|
"loss": 1.6846, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.6659961658245813e-05, |
|
"loss": 1.791, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.6620615233456235e-05, |
|
"loss": 1.7798, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.658108547238038e-05, |
|
"loss": 1.6987, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.6541373469665688e-05, |
|
"loss": 1.7202, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.6501480325006206e-05, |
|
"loss": 1.7285, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.64614071431121e-05, |
|
"loss": 1.7417, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.6421155033679085e-05, |
|
"loss": 1.79, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.6380725111357693e-05, |
|
"loss": 1.7876, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.634011849572239e-05, |
|
"loss": 1.7734, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.6299336311240593e-05, |
|
"loss": 1.7686, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6258379687241533e-05, |
|
"loss": 1.7993, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6217249757884954e-05, |
|
"loss": 1.708, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.6175947662129735e-05, |
|
"loss": 1.7065, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6134474543702353e-05, |
|
"loss": 1.7324, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.609283155106517e-05, |
|
"loss": 1.7686, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.605101983738468e-05, |
|
"loss": 1.7563, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.6009040560499548e-05, |
|
"loss": 1.7373, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.596689488288856e-05, |
|
"loss": 1.7104, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.5924583971638416e-05, |
|
"loss": 1.7368, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.5882108998411427e-05, |
|
"loss": 1.7886, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.5839471139413065e-05, |
|
"loss": 1.6855, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.5796671575359382e-05, |
|
"loss": 1.7158, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.5753711491444336e-05, |
|
"loss": 1.7144, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.571059207730695e-05, |
|
"loss": 1.6909, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.5667314526998373e-05, |
|
"loss": 1.8003, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.5623880038948828e-05, |
|
"loss": 1.7231, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.55802898159344e-05, |
|
"loss": 1.6816, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.553654506504377e-05, |
|
"loss": 1.6826, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.5492646997644737e-05, |
|
"loss": 1.7085, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.5448596829350706e-05, |
|
"loss": 1.6797, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.540439577998703e-05, |
|
"loss": 1.708, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.5360045073557214e-05, |
|
"loss": 1.7036, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.5315545938209016e-05, |
|
"loss": 1.7129, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.527089960620046e-05, |
|
"loss": 1.6855, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.5226107313865701e-05, |
|
"loss": 1.645, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.5181170301580776e-05, |
|
"loss": 1.6982, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.5136089813729276e-05, |
|
"loss": 1.731, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.509086709866788e-05, |
|
"loss": 1.7192, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.5045503408691776e-05, |
|
"loss": 1.6982, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.7266, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.495435813266064e-05, |
|
"loss": 1.6958, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.4908579070575936e-05, |
|
"loss": 1.7056, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.4862664081447297e-05, |
|
"loss": 1.6943, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.4816614436740184e-05, |
|
"loss": 1.6724, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.4770431411648898e-05, |
|
"loss": 1.6641, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4724116285061278e-05, |
|
"loss": 1.7461, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4677670339523285e-05, |
|
"loss": 1.7207, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.4631094861203478e-05, |
|
"loss": 1.7061, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.4584391139857407e-05, |
|
"loss": 1.6758, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.4537560468791889e-05, |
|
"loss": 1.73, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.4490604144829204e-05, |
|
"loss": 1.7314, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.4443523468271168e-05, |
|
"loss": 1.7114, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.4396319742863145e-05, |
|
"loss": 1.7212, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.4348994275757933e-05, |
|
"loss": 1.7036, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.4301548377479562e-05, |
|
"loss": 1.71, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.4253983361887017e-05, |
|
"loss": 1.7432, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.4206300546137844e-05, |
|
"loss": 1.6992, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.415850125065168e-05, |
|
"loss": 1.7271, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.4110586799073684e-05, |
|
"loss": 1.6792, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.4062558518237893e-05, |
|
"loss": 1.73, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.4014417738130464e-05, |
|
"loss": 1.7192, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.3966165791852862e-05, |
|
"loss": 1.7476, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.3917804015584932e-05, |
|
"loss": 1.6958, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.3869333748547901e-05, |
|
"loss": 1.6865, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.3820756332967294e-05, |
|
"loss": 1.668, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.3772073114035762e-05, |
|
"loss": 1.6826, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.3723285439875836e-05, |
|
"loss": 1.7227, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.3674394661502595e-05, |
|
"loss": 1.7163, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.3625402132786247e-05, |
|
"loss": 1.6606, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.3576309210414646e-05, |
|
"loss": 1.7085, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.352711725385572e-05, |
|
"loss": 1.668, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.3477827625319826e-05, |
|
"loss": 1.7173, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.3428441689722023e-05, |
|
"loss": 1.7656, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.3378960814644283e-05, |
|
"loss": 1.6812, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.3329386370297615e-05, |
|
"loss": 1.6953, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.3279719729484117e-05, |
|
"loss": 1.665, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.3229962267558982e-05, |
|
"loss": 1.6587, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.3180115362392383e-05, |
|
"loss": 1.6797, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.3130180394331335e-05, |
|
"loss": 1.6992, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 1.3080158746161468e-05, |
|
"loss": 1.6567, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.3030051803068729e-05, |
|
"loss": 1.6641, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.2979860952601038e-05, |
|
"loss": 1.6841, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.2929587584629845e-05, |
|
"loss": 1.6777, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.2879233091311667e-05, |
|
"loss": 1.7065, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.2828798867049504e-05, |
|
"loss": 1.6997, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.2778286308454255e-05, |
|
"loss": 1.6704, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.2727696814306034e-05, |
|
"loss": 1.6489, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.2677031785515423e-05, |
|
"loss": 1.6777, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.26262926250847e-05, |
|
"loss": 1.6284, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.2575480738068971e-05, |
|
"loss": 1.6445, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.2524597531537261e-05, |
|
"loss": 1.626, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.2473644414533573e-05, |
|
"loss": 1.626, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.2422622798037833e-05, |
|
"loss": 1.6919, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.2371534094926852e-05, |
|
"loss": 1.6602, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.232037971993517e-05, |
|
"loss": 1.6401, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.2269161089615902e-05, |
|
"loss": 1.7026, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.2217879622301514e-05, |
|
"loss": 1.6875, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.2166536738064523e-05, |
|
"loss": 1.6646, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.2115133858678192e-05, |
|
"loss": 1.6631, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.2063672407577154e-05, |
|
"loss": 1.6196, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.2012153809817992e-05, |
|
"loss": 1.6606, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.1960579492039783e-05, |
|
"loss": 1.6719, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 1.1908950882424581e-05, |
|
"loss": 1.6958, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.1857269410657883e-05, |
|
"loss": 1.645, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.1805536507889021e-05, |
|
"loss": 1.6782, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.1753753606691554e-05, |
|
"loss": 1.6724, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.1701922141023566e-05, |
|
"loss": 1.6108, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.1650043546187994e-05, |
|
"loss": 1.6313, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.1598119258792848e-05, |
|
"loss": 1.647, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.1546150716711448e-05, |
|
"loss": 1.6816, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.1494139359042612e-05, |
|
"loss": 1.6846, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.1442086626070781e-05, |
|
"loss": 1.6602, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 1.1389993959226163e-05, |
|
"loss": 1.6133, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.1337862801044792e-05, |
|
"loss": 1.6997, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.1285694595128606e-05, |
|
"loss": 1.6172, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.123349078610545e-05, |
|
"loss": 1.6479, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.1181252819589081e-05, |
|
"loss": 1.6851, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.1128982142139142e-05, |
|
"loss": 1.6533, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 1.1076680201221093e-05, |
|
"loss": 1.6367, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.1024348445166133e-05, |
|
"loss": 1.6426, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 1.0971988323131099e-05, |
|
"loss": 1.6509, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 1.091960128505833e-05, |
|
"loss": 1.6997, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.086718878163551e-05, |
|
"loss": 1.6187, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.0814752264255508e-05, |
|
"loss": 1.6914, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.0762293184976178e-05, |
|
"loss": 1.6421, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 1.070981299648016e-05, |
|
"loss": 1.6631, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.0657313152034634e-05, |
|
"loss": 1.7046, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.0604795105451096e-05, |
|
"loss": 1.5845, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.0552260311045082e-05, |
|
"loss": 1.6621, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1.0499710223595913e-05, |
|
"loss": 1.6782, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 1.0447146298306394e-05, |
|
"loss": 1.6611, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 1.0394569990762528e-05, |
|
"loss": 1.6626, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 1.0341982756893203e-05, |
|
"loss": 1.6406, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.0289386052929874e-05, |
|
"loss": 1.6455, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 1.0236781335366239e-05, |
|
"loss": 1.7051, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 1.0184170060917914e-05, |
|
"loss": 1.5967, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 1.0131553686482077e-05, |
|
"loss": 1.6772, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.0078933669097135e-05, |
|
"loss": 1.625, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.002631146590238e-05, |
|
"loss": 1.6572, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 9.973688534097624e-06, |
|
"loss": 1.6694, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 9.92106633090287e-06, |
|
"loss": 1.6377, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 9.868446313517927e-06, |
|
"loss": 1.6782, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 9.815829939082087e-06, |
|
"loss": 1.6147, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 9.763218664633763e-06, |
|
"loss": 1.6826, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 9.710613947070127e-06, |
|
"loss": 1.7041, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 9.658017243106802e-06, |
|
"loss": 1.6343, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 9.605430009237474e-06, |
|
"loss": 1.6724, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 9.552853701693606e-06, |
|
"loss": 1.6812, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 9.50028977640409e-06, |
|
"loss": 1.6289, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 9.44773968895492e-06, |
|
"loss": 1.6313, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 9.395204894548907e-06, |
|
"loss": 1.6274, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 9.342686847965367e-06, |
|
"loss": 1.6572, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 9.290187003519841e-06, |
|
"loss": 1.6333, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 9.237706815023824e-06, |
|
"loss": 1.687, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 9.185247735744495e-06, |
|
"loss": 1.6626, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 9.132811218364494e-06, |
|
"loss": 1.6431, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 9.080398714941672e-06, |
|
"loss": 1.6562, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 9.028011676868901e-06, |
|
"loss": 1.6714, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.975651554833869e-06, |
|
"loss": 1.604, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 8.92331979877891e-06, |
|
"loss": 1.6719, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 8.871017857860863e-06, |
|
"loss": 1.707, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 8.81874718041092e-06, |
|
"loss": 1.647, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 8.766509213894552e-06, |
|
"loss": 1.6675, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 8.714305404871397e-06, |
|
"loss": 1.6636, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 8.662137198955211e-06, |
|
"loss": 1.6768, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 8.610006040773844e-06, |
|
"loss": 1.5864, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 8.557913373929222e-06, |
|
"loss": 1.6304, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 8.50586064095739e-06, |
|
"loss": 1.6289, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 8.453849283288554e-06, |
|
"loss": 1.6436, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 8.401880741207155e-06, |
|
"loss": 1.6221, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 8.349956453812009e-06, |
|
"loss": 1.6904, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 8.298077858976435e-06, |
|
"loss": 1.5898, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 8.246246393308448e-06, |
|
"loss": 1.667, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 8.194463492110982e-06, |
|
"loss": 1.6543, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 8.142730589342119e-06, |
|
"loss": 1.6572, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 8.091049117575424e-06, |
|
"loss": 1.6685, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 8.03942050796022e-06, |
|
"loss": 1.6484, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 7.98784619018201e-06, |
|
"loss": 1.5405, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 7.93632759242285e-06, |
|
"loss": 1.644, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 7.884866141321811e-06, |
|
"loss": 1.6206, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 7.833463261935482e-06, |
|
"loss": 1.6079, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 7.782120377698489e-06, |
|
"loss": 1.6108, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 7.730838910384098e-06, |
|
"loss": 1.5625, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 7.679620280064837e-06, |
|
"loss": 1.647, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 7.6284659050731525e-06, |
|
"loss": 1.5493, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 7.57737720196217e-06, |
|
"loss": 1.6362, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 7.526355585466432e-06, |
|
"loss": 1.6294, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 7.4754024684627405e-06, |
|
"loss": 1.6675, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 7.424519261931036e-06, |
|
"loss": 1.6519, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 7.373707374915303e-06, |
|
"loss": 1.6807, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 7.322968214484583e-06, |
|
"loss": 1.6221, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 7.27230318569397e-06, |
|
"loss": 1.6523, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 7.221713691545746e-06, |
|
"loss": 1.6118, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 7.171201132950502e-06, |
|
"loss": 1.6279, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 7.1207669086883366e-06, |
|
"loss": 1.6416, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 7.070412415370158e-06, |
|
"loss": 1.605, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 7.020139047398966e-06, |
|
"loss": 1.627, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 6.969948196931272e-06, |
|
"loss": 1.6123, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 6.919841253838537e-06, |
|
"loss": 1.6333, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 6.869819605668669e-06, |
|
"loss": 1.5981, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 6.819884637607619e-06, |
|
"loss": 1.646, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 6.770037732441019e-06, |
|
"loss": 1.6641, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 6.720280270515882e-06, |
|
"loss": 1.6362, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 6.670613629702391e-06, |
|
"loss": 1.6562, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 6.62103918535572e-06, |
|
"loss": 1.6772, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 6.5715583102779815e-06, |
|
"loss": 1.6729, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 6.522172374680177e-06, |
|
"loss": 1.6597, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 6.472882746144282e-06, |
|
"loss": 1.6348, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 6.423690789585359e-06, |
|
"loss": 1.6108, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 6.374597867213756e-06, |
|
"loss": 1.6421, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 6.3256053384974105e-06, |
|
"loss": 1.6455, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 6.276714560124166e-06, |
|
"loss": 1.6616, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 6.2279268859642396e-06, |
|
"loss": 1.6162, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 6.179243667032709e-06, |
|
"loss": 1.6646, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 6.130666251452102e-06, |
|
"loss": 1.6445, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 6.082195984415069e-06, |
|
"loss": 1.6299, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 6.03383420814714e-06, |
|
"loss": 1.6221, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 5.9855822618695385e-06, |
|
"loss": 1.647, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 5.937441481762112e-06, |
|
"loss": 1.6147, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 5.889413200926317e-06, |
|
"loss": 1.6025, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 5.841498749348322e-06, |
|
"loss": 1.6064, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 5.793699453862161e-06, |
|
"loss": 1.6587, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 5.746016638112986e-06, |
|
"loss": 1.6255, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 5.698451622520442e-06, |
|
"loss": 1.6523, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 5.651005724242072e-06, |
|
"loss": 1.6367, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 5.603680257136857e-06, |
|
"loss": 1.6006, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 5.556476531728836e-06, |
|
"loss": 1.6294, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 5.509395855170798e-06, |
|
"loss": 1.6284, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 5.4624395312081125e-06, |
|
"loss": 1.6392, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 5.415608860142593e-06, |
|
"loss": 1.625, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 5.368905138796523e-06, |
|
"loss": 1.6162, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 5.322329660476715e-06, |
|
"loss": 1.5752, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 5.275883714938726e-06, |
|
"loss": 1.6655, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 5.2295685883511086e-06, |
|
"loss": 1.5972, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 5.183385563259819e-06, |
|
"loss": 1.6421, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 5.137335918552702e-06, |
|
"loss": 1.5869, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 5.091420929424065e-06, |
|
"loss": 1.6333, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 5.045641867339361e-06, |
|
"loss": 1.6445, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 1.6597, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 4.954496591308227e-06, |
|
"loss": 1.6387, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.909132901332122e-06, |
|
"loss": 1.6489, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 4.863910186270726e-06, |
|
"loss": 1.6318, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 4.818829698419225e-06, |
|
"loss": 1.6841, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 4.773892686134301e-06, |
|
"loss": 1.666, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 4.729100393799538e-06, |
|
"loss": 1.6162, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 4.684454061790987e-06, |
|
"loss": 1.5957, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 4.639954926442792e-06, |
|
"loss": 1.6201, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.5956042200129725e-06, |
|
"loss": 1.6533, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 4.551403170649299e-06, |
|
"loss": 1.624, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 4.507353002355269e-06, |
|
"loss": 1.604, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 4.4634549349562315e-06, |
|
"loss": 1.6089, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 4.4197101840656e-06, |
|
"loss": 1.5962, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 4.376119961051175e-06, |
|
"loss": 1.5962, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 4.33268547300163e-06, |
|
"loss": 1.6313, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 4.289407922693053e-06, |
|
"loss": 1.6626, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 4.2462885085556635e-06, |
|
"loss": 1.5796, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 4.203328424640619e-06, |
|
"loss": 1.6836, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 4.1605288605869365e-06, |
|
"loss": 1.6675, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.117891001588574e-06, |
|
"loss": 1.6807, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 4.075416028361584e-06, |
|
"loss": 1.6167, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 4.033105117111441e-06, |
|
"loss": 1.6851, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 3.9909594395004545e-06, |
|
"loss": 1.6191, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 3.948980162615323e-06, |
|
"loss": 1.6362, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 3.907168448934836e-06, |
|
"loss": 1.5825, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 3.865525456297652e-06, |
|
"loss": 1.6182, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 3.824052337870263e-06, |
|
"loss": 1.5908, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 3.7827502421150497e-06, |
|
"loss": 1.6162, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 3.741620312758469e-06, |
|
"loss": 1.6021, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 3.7006636887594095e-06, |
|
"loss": 1.6479, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 3.6598815042776135e-06, |
|
"loss": 1.6294, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 3.619274888642309e-06, |
|
"loss": 1.6914, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 3.578844966320917e-06, |
|
"loss": 1.6226, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 3.5385928568879012e-06, |
|
"loss": 1.6196, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 3.4985196749937976e-06, |
|
"loss": 1.5977, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 3.458626530334316e-06, |
|
"loss": 1.5786, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 3.4189145276196244e-06, |
|
"loss": 1.6113, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 3.3793847665437674e-06, |
|
"loss": 1.6025, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 3.340038341754189e-06, |
|
"loss": 1.6191, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 3.300876342821451e-06, |
|
"loss": 1.604, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 3.2618998542090263e-06, |
|
"loss": 1.6274, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 3.2231099552433e-06, |
|
"loss": 1.6543, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 3.1845077200836638e-06, |
|
"loss": 1.6265, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 3.1460942176927666e-06, |
|
"loss": 1.6123, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3.107870511806934e-06, |
|
"loss": 1.6401, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 3.0698376609066828e-06, |
|
"loss": 1.6094, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.0319967181874366e-06, |
|
"loss": 1.5859, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 2.9943487315303486e-06, |
|
"loss": 1.6182, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 2.9568947434732777e-06, |
|
"loss": 1.6196, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.919635791181934e-06, |
|
"loss": 1.6367, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 2.882572906421145e-06, |
|
"loss": 1.7124, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 2.8457071155262885e-06, |
|
"loss": 1.623, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.809039439374878e-06, |
|
"loss": 1.5874, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 2.7725708933582785e-06, |
|
"loss": 1.6362, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 2.7363024873536093e-06, |
|
"loss": 1.6221, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 2.700235225695752e-06, |
|
"loss": 1.6416, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 2.6643701071495644e-06, |
|
"loss": 1.668, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 2.628708124882212e-06, |
|
"loss": 1.5928, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 2.5932502664356553e-06, |
|
"loss": 1.6172, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 2.5579975136993253e-06, |
|
"loss": 1.6162, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 2.52295084288291e-06, |
|
"loss": 1.6636, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.4881112244893403e-06, |
|
"loss": 1.6748, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 2.453479623287909e-06, |
|
"loss": 1.6167, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 2.419056998287547e-06, |
|
"loss": 1.6763, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 2.3848443027102706e-06, |
|
"loss": 1.6587, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 2.3508424839647994e-06, |
|
"loss": 1.6538, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 2.3170524836202936e-06, |
|
"loss": 1.5952, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 2.2834752373803094e-06, |
|
"loss": 1.6348, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 2.250111675056863e-06, |
|
"loss": 1.6074, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 2.216962720544703e-06, |
|
"loss": 1.6284, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.184029291795705e-06, |
|
"loss": 1.6143, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 2.151312300793473e-06, |
|
"loss": 1.6323, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 2.118812653528077e-06, |
|
"loss": 1.6387, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 2.086531249970952e-06, |
|
"loss": 1.6016, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 2.0544689840499988e-06, |
|
"loss": 1.6616, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 2.022626743624807e-06, |
|
"loss": 1.6211, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.991005410462089e-06, |
|
"loss": 1.6504, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.9596058602112533e-06, |
|
"loss": 1.6748, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.928428962380148e-06, |
|
"loss": 1.6597, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.8974755803109968e-06, |
|
"loss": 1.6133, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.866746571156479e-06, |
|
"loss": 1.6294, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.8362427858560094e-06, |
|
"loss": 1.6074, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.8059650691121611e-06, |
|
"loss": 1.645, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.7759142593672707e-06, |
|
"loss": 1.5884, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.74609118878024e-06, |
|
"loss": 1.6245, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.7164966832034668e-06, |
|
"loss": 1.6309, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.6871315621599982e-06, |
|
"loss": 1.6035, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.6579966388208257e-06, |
|
"loss": 1.5688, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.6290927199823604e-06, |
|
"loss": 1.5762, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.6004206060441096e-06, |
|
"loss": 1.6323, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.5719810909864941e-06, |
|
"loss": 1.5884, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.543774962348874e-06, |
|
"loss": 1.6382, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.5158030012077329e-06, |
|
"loss": 1.6279, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.4880659821550547e-06, |
|
"loss": 1.6304, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.4605646732768685e-06, |
|
"loss": 1.6289, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.4332998361319783e-06, |
|
"loss": 1.5889, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.4062722257308803e-06, |
|
"loss": 1.6221, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.3794825905148557e-06, |
|
"loss": 1.604, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.3529316723352303e-06, |
|
"loss": 1.6099, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.3266202064328548e-06, |
|
"loss": 1.6045, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.3005489214177213e-06, |
|
"loss": 1.6289, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.2747185392488048e-06, |
|
"loss": 1.6519, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.249129775214064e-06, |
|
"loss": 1.6338, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.2237833379106257e-06, |
|
"loss": 1.6196, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.1986799292251816e-06, |
|
"loss": 1.6104, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.1738202443145307e-06, |
|
"loss": 1.6309, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.1492049715863464e-06, |
|
"loss": 1.5845, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.1248347926801029e-06, |
|
"loss": 1.582, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.100710382448198e-06, |
|
"loss": 1.6553, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.0768324089372816e-06, |
|
"loss": 1.5771, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.053201533369731e-06, |
|
"loss": 1.6611, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.029818410125365e-06, |
|
"loss": 1.6128, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.0066836867233087e-06, |
|
"loss": 1.5957, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 9.837980038040607e-07, |
|
"loss": 1.6299, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 9.611619951117657e-07, |
|
"loss": 1.6147, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 9.387762874766515e-07, |
|
"loss": 1.5864, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 9.166415007976803e-07, |
|
"loss": 1.6245, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 8.94758248025378e-07, |
|
"loss": 1.5781, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 8.7312713514486e-07, |
|
"loss": 1.5845, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 8.517487611590558e-07, |
|
"loss": 1.624, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 8.306237180721121e-07, |
|
"loss": 1.5811, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 8.097525908730108e-07, |
|
"loss": 1.5898, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 7.891359575193613e-07, |
|
"loss": 1.5542, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 7.687743889213939e-07, |
|
"loss": 1.6382, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 7.486684489261609e-07, |
|
"loss": 1.6597, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 7.288186943019171e-07, |
|
"loss": 1.5918, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 7.092256747226944e-07, |
|
"loss": 1.6226, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 6.89889932753095e-07, |
|
"loss": 1.563, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 6.708120038332533e-07, |
|
"loss": 1.6348, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 6.519924162640168e-07, |
|
"loss": 1.6089, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 6.334316911923155e-07, |
|
"loss": 1.6143, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 6.151303425967259e-07, |
|
"loss": 1.6396, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 5.970888772732453e-07, |
|
"loss": 1.6387, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 5.793077948212478e-07, |
|
"loss": 1.5835, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 5.617875876296641e-07, |
|
"loss": 1.6489, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 5.445287408633304e-07, |
|
"loss": 1.6318, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 5.27531732449561e-07, |
|
"loss": 1.645, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 5.107970330649204e-07, |
|
"loss": 1.5996, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 4.943251061221721e-07, |
|
"loss": 1.5962, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 4.78116407757464e-07, |
|
"loss": 1.6211, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 4.6217138681769026e-07, |
|
"loss": 1.6011, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 4.464904848480522e-07, |
|
"loss": 1.6392, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 4.310741360798498e-07, |
|
"loss": 1.6265, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 4.1592276741844075e-07, |
|
"loss": 1.6255, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 4.0103679843142895e-07, |
|
"loss": 1.6196, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.864166413370429e-07, |
|
"loss": 1.6201, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 3.720627009927158e-07, |
|
"loss": 1.6396, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.5797537488388326e-07, |
|
"loss": 1.6553, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 3.441550531129667e-07, |
|
"loss": 1.6431, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 3.3060211838858104e-07, |
|
"loss": 1.6362, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 3.1731694601492834e-07, |
|
"loss": 1.5654, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 3.042999038814076e-07, |
|
"loss": 1.6074, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 2.915513524524294e-07, |
|
"loss": 1.6094, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 2.790716447574304e-07, |
|
"loss": 1.6758, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 2.668611263811016e-07, |
|
"loss": 1.6313, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 2.5492013545381666e-07, |
|
"loss": 1.5835, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 2.4324900264226405e-07, |
|
"loss": 1.5972, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 2.3184805114029872e-07, |
|
"loss": 1.6689, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 2.2071759665998282e-07, |
|
"loss": 1.6304, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 2.098579474228546e-07, |
|
"loss": 1.6035, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.9926940415138206e-07, |
|
"loss": 1.5952, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.8895226006064084e-07, |
|
"loss": 1.584, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.7890680085019597e-07, |
|
"loss": 1.6064, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.6913330469618628e-07, |
|
"loss": 1.6235, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.5963204224362261e-07, |
|
"loss": 1.6294, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.504032765988961e-07, |
|
"loss": 1.6055, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.4144726332248726e-07, |
|
"loss": 1.6353, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.327642504218951e-07, |
|
"loss": 1.6108, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.2435447834476254e-07, |
|
"loss": 1.6201, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.1621817997222507e-07, |
|
"loss": 1.6128, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.0835558061245587e-07, |
|
"loss": 1.6196, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.0076689799442874e-07, |
|
"loss": 1.6621, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 9.34523422618916e-08, |
|
"loss": 1.6216, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 8.641211596754129e-08, |
|
"loss": 1.6289, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 7.964641406742135e-08, |
|
"loss": 1.6279, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 7.315542391551966e-08, |
|
"loss": 1.6187, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 6.693932525857927e-08, |
|
"loss": 1.6445, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 6.099829023112236e-08, |
|
"loss": 1.6226, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 5.533248335068409e-08, |
|
"loss": 1.6025, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 4.994206151325509e-08, |
|
"loss": 1.5981, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 4.482717398894165e-08, |
|
"loss": 1.6479, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 3.998796241782232e-08, |
|
"loss": 1.6494, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 3.5424560806036625e-08, |
|
"loss": 1.6328, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 3.1137095522068006e-08, |
|
"loss": 1.5732, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 2.7125685293245552e-08, |
|
"loss": 1.6196, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 2.3390441202455484e-08, |
|
"loss": 1.5894, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.993146668506585e-08, |
|
"loss": 1.6172, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.6748857526066588e-08, |
|
"loss": 1.604, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.3842701857406104e-08, |
|
"loss": 1.6172, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.1213080155564327e-08, |
|
"loss": 1.6377, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 8.860065239311155e-09, |
|
"loss": 1.6064, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 6.783722267701409e-09, |
|
"loss": 1.6211, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 4.984108738261828e-09, |
|
"loss": 1.6274, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 3.4612744854045645e-09, |
|
"loss": 1.6328, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 2.215261679042735e-09, |
|
"loss": 1.583, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.246104823426908e-09, |
|
"loss": 1.6318, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 5.538307561858691e-10, |
|
"loss": 1.6265, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.3845864758610384e-10, |
|
"loss": 1.605, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.6025, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"step": 616, |
|
"total_flos": 1.5114021399418634e+18, |
|
"train_loss": 2.115578391335227, |
|
"train_runtime": 34978.7912, |
|
"train_samples_per_second": 2.252, |
|
"train_steps_per_second": 0.018 |
|
} |
|
], |
|
"max_steps": 616, |
|
"num_train_epochs": 8, |
|
"total_flos": 1.5114021399418634e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|