|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0294117647058822, |
|
"eval_steps": 500, |
|
"global_step": 280, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003676470588235294, |
|
"grad_norm": 0.20527108013629913, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 1.7943, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.007352941176470588, |
|
"grad_norm": 0.24291086196899414, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.8345, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.011029411764705883, |
|
"grad_norm": 0.20741452276706696, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 1.998, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.014705882352941176, |
|
"grad_norm": 0.242351695895195, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.814, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01838235294117647, |
|
"grad_norm": 0.20806454122066498, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 1.8541, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.022058823529411766, |
|
"grad_norm": 0.2232266068458557, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 1.8491, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.025735294117647058, |
|
"grad_norm": 0.24171140789985657, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 1.7849, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.029411764705882353, |
|
"grad_norm": 0.2743687331676483, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 1.7188, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03308823529411765, |
|
"grad_norm": 0.17957501113414764, |
|
"learning_rate": 1.8000000000000001e-06, |
|
"loss": 2.0571, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03676470588235294, |
|
"grad_norm": 0.201459139585495, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.9404, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04044117647058824, |
|
"grad_norm": 0.2665609121322632, |
|
"learning_rate": 2.2e-06, |
|
"loss": 1.9073, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04411764705882353, |
|
"grad_norm": 0.2173280417919159, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 1.8472, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04779411764705882, |
|
"grad_norm": 1.175387978553772, |
|
"learning_rate": 2.6e-06, |
|
"loss": 1.9748, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.051470588235294115, |
|
"grad_norm": 0.22836479544639587, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 1.799, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05514705882352941, |
|
"grad_norm": 0.22088803350925446, |
|
"learning_rate": 3e-06, |
|
"loss": 1.735, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 0.20766077935695648, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 1.8871, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0625, |
|
"grad_norm": 0.2599094808101654, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 1.6319, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0661764705882353, |
|
"grad_norm": 0.21609334647655487, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 1.845, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06985294117647059, |
|
"grad_norm": 0.21392510831356049, |
|
"learning_rate": 3.8000000000000005e-06, |
|
"loss": 1.925, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.07352941176470588, |
|
"grad_norm": 0.22204363346099854, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.83, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07720588235294118, |
|
"grad_norm": 0.24028566479682922, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 1.7375, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08088235294117647, |
|
"grad_norm": 0.26085978746414185, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.7598, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08455882352941177, |
|
"grad_norm": 0.29647570848464966, |
|
"learning_rate": 4.600000000000001e-06, |
|
"loss": 1.7059, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08823529411764706, |
|
"grad_norm": 0.2606264054775238, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.7886, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.09191176470588236, |
|
"grad_norm": 0.20121997594833374, |
|
"learning_rate": 5e-06, |
|
"loss": 2.069, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09558823529411764, |
|
"grad_norm": 0.23160211741924286, |
|
"learning_rate": 4.9999541991311605e-06, |
|
"loss": 1.9422, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09926470588235294, |
|
"grad_norm": 0.2353476583957672, |
|
"learning_rate": 4.999816798202817e-06, |
|
"loss": 1.8665, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.10294117647058823, |
|
"grad_norm": 0.24767348170280457, |
|
"learning_rate": 4.999587802249433e-06, |
|
"loss": 1.7446, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.10661764705882353, |
|
"grad_norm": 0.24555310606956482, |
|
"learning_rate": 4.999267219661583e-06, |
|
"loss": 1.8733, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.11029411764705882, |
|
"grad_norm": 2.8036930561065674, |
|
"learning_rate": 4.9988550621856336e-06, |
|
"loss": 1.829, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11397058823529412, |
|
"grad_norm": 0.1886998862028122, |
|
"learning_rate": 4.998351344923323e-06, |
|
"loss": 1.8259, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.23365086317062378, |
|
"learning_rate": 4.997756086331198e-06, |
|
"loss": 1.9775, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1213235294117647, |
|
"grad_norm": 0.18128617107868195, |
|
"learning_rate": 4.997069308219952e-06, |
|
"loss": 1.8398, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 0.21758407354354858, |
|
"learning_rate": 4.996291035753608e-06, |
|
"loss": 1.7429, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12867647058823528, |
|
"grad_norm": 0.17675141990184784, |
|
"learning_rate": 4.995421297448614e-06, |
|
"loss": 1.9317, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1323529411764706, |
|
"grad_norm": 0.18282735347747803, |
|
"learning_rate": 4.994460125172783e-06, |
|
"loss": 1.8007, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.13602941176470587, |
|
"grad_norm": 0.17422187328338623, |
|
"learning_rate": 4.993407554144137e-06, |
|
"loss": 1.9293, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.13970588235294118, |
|
"grad_norm": 0.20074324309825897, |
|
"learning_rate": 4.992263622929609e-06, |
|
"loss": 1.7154, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.14338235294117646, |
|
"grad_norm": 0.16826124489307404, |
|
"learning_rate": 4.991028373443635e-06, |
|
"loss": 1.8976, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.14705882352941177, |
|
"grad_norm": 0.1737484484910965, |
|
"learning_rate": 4.989701850946613e-06, |
|
"loss": 1.9842, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15073529411764705, |
|
"grad_norm": 0.17321883141994476, |
|
"learning_rate": 4.988284104043251e-06, |
|
"loss": 1.691, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.15441176470588236, |
|
"grad_norm": 0.16045594215393066, |
|
"learning_rate": 4.986775184680782e-06, |
|
"loss": 1.9388, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.15808823529411764, |
|
"grad_norm": 0.1485867202281952, |
|
"learning_rate": 4.985175148147057e-06, |
|
"loss": 1.9412, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.16176470588235295, |
|
"grad_norm": 0.15609779953956604, |
|
"learning_rate": 4.983484053068529e-06, |
|
"loss": 1.8341, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.16544117647058823, |
|
"grad_norm": 0.14435409009456635, |
|
"learning_rate": 4.981701961408096e-06, |
|
"loss": 1.8187, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.16911764705882354, |
|
"grad_norm": 0.14100129902362823, |
|
"learning_rate": 4.979828938462836e-06, |
|
"loss": 1.8016, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.17279411764705882, |
|
"grad_norm": 0.13618572056293488, |
|
"learning_rate": 4.977865052861611e-06, |
|
"loss": 1.7432, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 0.6496532559394836, |
|
"learning_rate": 4.975810376562555e-06, |
|
"loss": 1.7203, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1801470588235294, |
|
"grad_norm": 0.1398126780986786, |
|
"learning_rate": 4.973664984850435e-06, |
|
"loss": 1.6294, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.18382352941176472, |
|
"grad_norm": 0.13840018212795258, |
|
"learning_rate": 4.971428956333896e-06, |
|
"loss": 1.7861, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1875, |
|
"grad_norm": 0.11684554070234299, |
|
"learning_rate": 4.969102372942575e-06, |
|
"loss": 1.9307, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.19117647058823528, |
|
"grad_norm": 0.12185637652873993, |
|
"learning_rate": 4.966685319924105e-06, |
|
"loss": 1.8909, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1948529411764706, |
|
"grad_norm": 0.11658155173063278, |
|
"learning_rate": 4.96417788584099e-06, |
|
"loss": 1.8678, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.19852941176470587, |
|
"grad_norm": 0.1272619068622589, |
|
"learning_rate": 4.961580162567358e-06, |
|
"loss": 1.8719, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.20220588235294118, |
|
"grad_norm": 0.12430202960968018, |
|
"learning_rate": 4.958892245285594e-06, |
|
"loss": 1.849, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.20588235294117646, |
|
"grad_norm": 0.11343392729759216, |
|
"learning_rate": 4.956114232482854e-06, |
|
"loss": 2.0185, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.20955882352941177, |
|
"grad_norm": 0.10528125613927841, |
|
"learning_rate": 4.953246225947461e-06, |
|
"loss": 1.8122, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.21323529411764705, |
|
"grad_norm": 0.11682136356830597, |
|
"learning_rate": 4.950288330765167e-06, |
|
"loss": 1.819, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.21691176470588236, |
|
"grad_norm": 0.10487648099660873, |
|
"learning_rate": 4.947240655315306e-06, |
|
"loss": 1.8969, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.22058823529411764, |
|
"grad_norm": 0.10324753075838089, |
|
"learning_rate": 4.944103311266827e-06, |
|
"loss": 1.9131, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22426470588235295, |
|
"grad_norm": 0.11773849278688431, |
|
"learning_rate": 4.9408764135741955e-06, |
|
"loss": 1.8771, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.22794117647058823, |
|
"grad_norm": 0.10764829814434052, |
|
"learning_rate": 4.937560080473186e-06, |
|
"loss": 1.8007, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.23161764705882354, |
|
"grad_norm": 0.09463761746883392, |
|
"learning_rate": 4.934154433476548e-06, |
|
"loss": 1.7991, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.11386000365018845, |
|
"learning_rate": 4.9306595973695545e-06, |
|
"loss": 1.8523, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.23897058823529413, |
|
"grad_norm": 0.10437195003032684, |
|
"learning_rate": 4.927075700205431e-06, |
|
"loss": 1.7621, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2426470588235294, |
|
"grad_norm": 0.11349602788686752, |
|
"learning_rate": 4.923402873300659e-06, |
|
"loss": 1.7418, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.24632352941176472, |
|
"grad_norm": 0.16744942963123322, |
|
"learning_rate": 4.91964125123017e-06, |
|
"loss": 1.8985, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.11906889081001282, |
|
"learning_rate": 4.915790971822412e-06, |
|
"loss": 1.7156, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2536764705882353, |
|
"grad_norm": 0.10837192088365555, |
|
"learning_rate": 4.911852176154298e-06, |
|
"loss": 1.9407, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.25735294117647056, |
|
"grad_norm": 0.12406501919031143, |
|
"learning_rate": 4.907825008546039e-06, |
|
"loss": 1.8413, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2610294117647059, |
|
"grad_norm": 0.10750163346529007, |
|
"learning_rate": 4.903709616555854e-06, |
|
"loss": 1.9044, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2647058823529412, |
|
"grad_norm": 0.11223389953374863, |
|
"learning_rate": 4.899506150974568e-06, |
|
"loss": 1.7632, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.26838235294117646, |
|
"grad_norm": 0.1262880116701126, |
|
"learning_rate": 4.8952147658200815e-06, |
|
"loss": 1.6656, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.27205882352941174, |
|
"grad_norm": 0.1263853758573532, |
|
"learning_rate": 4.890835618331729e-06, |
|
"loss": 1.7326, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2757352941176471, |
|
"grad_norm": 0.10728048533201218, |
|
"learning_rate": 4.886368868964517e-06, |
|
"loss": 1.6571, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.27941176470588236, |
|
"grad_norm": 0.10924043506383896, |
|
"learning_rate": 4.8818146813832475e-06, |
|
"loss": 1.8581, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.28308823529411764, |
|
"grad_norm": 0.12963686883449554, |
|
"learning_rate": 4.877173222456521e-06, |
|
"loss": 1.6903, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2867647058823529, |
|
"grad_norm": 0.11058393120765686, |
|
"learning_rate": 4.872444662250617e-06, |
|
"loss": 1.9864, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.29044117647058826, |
|
"grad_norm": 0.10086097568273544, |
|
"learning_rate": 4.867629174023269e-06, |
|
"loss": 1.7289, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.29411764705882354, |
|
"grad_norm": 0.19624628126621246, |
|
"learning_rate": 4.862726934217311e-06, |
|
"loss": 1.8319, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2977941176470588, |
|
"grad_norm": 0.10972262173891068, |
|
"learning_rate": 4.857738122454219e-06, |
|
"loss": 1.9062, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3014705882352941, |
|
"grad_norm": 0.1163967102766037, |
|
"learning_rate": 4.852662921527523e-06, |
|
"loss": 1.7979, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.30514705882352944, |
|
"grad_norm": 0.26644641160964966, |
|
"learning_rate": 4.847501517396111e-06, |
|
"loss": 1.8551, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.3088235294117647, |
|
"grad_norm": 0.097193643450737, |
|
"learning_rate": 4.84225409917742e-06, |
|
"loss": 1.8247, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.09594879299402237, |
|
"learning_rate": 4.8369208591405e-06, |
|
"loss": 1.6824, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.3161764705882353, |
|
"grad_norm": 0.10404081642627716, |
|
"learning_rate": 4.831501992698972e-06, |
|
"loss": 1.8342, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.31985294117647056, |
|
"grad_norm": 0.10298274457454681, |
|
"learning_rate": 4.825997698403871e-06, |
|
"loss": 1.9121, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3235294117647059, |
|
"grad_norm": 0.12172818183898926, |
|
"learning_rate": 4.820408177936365e-06, |
|
"loss": 1.8046, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3272058823529412, |
|
"grad_norm": 0.10750740766525269, |
|
"learning_rate": 4.814733636100369e-06, |
|
"loss": 1.9354, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.33088235294117646, |
|
"grad_norm": 0.10367678105831146, |
|
"learning_rate": 4.808974280815039e-06, |
|
"loss": 2.0473, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.33455882352941174, |
|
"grad_norm": 0.10471010208129883, |
|
"learning_rate": 4.803130323107157e-06, |
|
"loss": 1.8647, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3382352941176471, |
|
"grad_norm": 0.11597222834825516, |
|
"learning_rate": 4.797201977103395e-06, |
|
"loss": 1.6487, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.34191176470588236, |
|
"grad_norm": 0.10744346678256989, |
|
"learning_rate": 4.791189460022472e-06, |
|
"loss": 1.8301, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.34558823529411764, |
|
"grad_norm": 0.1046157106757164, |
|
"learning_rate": 4.785092992167192e-06, |
|
"loss": 1.8117, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3492647058823529, |
|
"grad_norm": 0.10467652231454849, |
|
"learning_rate": 4.778912796916374e-06, |
|
"loss": 1.932, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.35294117647058826, |
|
"grad_norm": 0.10906960070133209, |
|
"learning_rate": 4.77264910071667e-06, |
|
"loss": 1.8958, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.35661764705882354, |
|
"grad_norm": 0.1271820366382599, |
|
"learning_rate": 4.766302133074261e-06, |
|
"loss": 1.5758, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3602941176470588, |
|
"grad_norm": 0.10758531838655472, |
|
"learning_rate": 4.759872126546452e-06, |
|
"loss": 1.7336, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3639705882352941, |
|
"grad_norm": 0.11692432314157486, |
|
"learning_rate": 4.753359316733154e-06, |
|
"loss": 1.7036, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.36764705882352944, |
|
"grad_norm": 0.11493757367134094, |
|
"learning_rate": 4.746763942268243e-06, |
|
"loss": 1.648, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3713235294117647, |
|
"grad_norm": 0.11132095009088516, |
|
"learning_rate": 4.740086244810825e-06, |
|
"loss": 1.8441, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.11245792359113693, |
|
"learning_rate": 4.733326469036377e-06, |
|
"loss": 1.8768, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3786764705882353, |
|
"grad_norm": 0.11370263248682022, |
|
"learning_rate": 4.726484862627779e-06, |
|
"loss": 1.8407, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.38235294117647056, |
|
"grad_norm": 0.1197124496102333, |
|
"learning_rate": 4.719561676266249e-06, |
|
"loss": 1.6798, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3860294117647059, |
|
"grad_norm": 0.114149309694767, |
|
"learning_rate": 4.712557163622145e-06, |
|
"loss": 1.7261, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3897058823529412, |
|
"grad_norm": 0.1167868822813034, |
|
"learning_rate": 4.7054715813456795e-06, |
|
"loss": 1.7402, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.39338235294117646, |
|
"grad_norm": 0.10888572782278061, |
|
"learning_rate": 4.698305189057512e-06, |
|
"loss": 1.7388, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.39705882352941174, |
|
"grad_norm": 0.11005932092666626, |
|
"learning_rate": 4.691058249339238e-06, |
|
"loss": 1.8437, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.4007352941176471, |
|
"grad_norm": 0.11730121821165085, |
|
"learning_rate": 4.683731027723764e-06, |
|
"loss": 1.7581, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.40441176470588236, |
|
"grad_norm": 0.10690360516309738, |
|
"learning_rate": 4.676323792685585e-06, |
|
"loss": 1.8829, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.40808823529411764, |
|
"grad_norm": 0.11015469580888748, |
|
"learning_rate": 4.668836815630939e-06, |
|
"loss": 1.856, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.4117647058823529, |
|
"grad_norm": 0.12613315880298615, |
|
"learning_rate": 4.661270370887872e-06, |
|
"loss": 1.737, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.41544117647058826, |
|
"grad_norm": 0.12080664932727814, |
|
"learning_rate": 4.6536247356961775e-06, |
|
"loss": 1.7363, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.41911764705882354, |
|
"grad_norm": 0.11145513504743576, |
|
"learning_rate": 4.645900190197242e-06, |
|
"loss": 1.6671, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4227941176470588, |
|
"grad_norm": 0.11746977269649506, |
|
"learning_rate": 4.638097017423783e-06, |
|
"loss": 1.6717, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.4264705882352941, |
|
"grad_norm": 0.10946469008922577, |
|
"learning_rate": 4.6302155032894745e-06, |
|
"loss": 1.7082, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.43014705882352944, |
|
"grad_norm": 0.11189309507608414, |
|
"learning_rate": 4.622255936578473e-06, |
|
"loss": 1.7705, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4338235294117647, |
|
"grad_norm": 0.12764301896095276, |
|
"learning_rate": 4.614218608934834e-06, |
|
"loss": 1.6192, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4375, |
|
"grad_norm": 0.11146359890699387, |
|
"learning_rate": 4.606103814851829e-06, |
|
"loss": 1.8188, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.4411764705882353, |
|
"grad_norm": 0.15550808608531952, |
|
"learning_rate": 4.597911851661155e-06, |
|
"loss": 1.9713, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.44485294117647056, |
|
"grad_norm": 0.12268511950969696, |
|
"learning_rate": 4.589643019522036e-06, |
|
"loss": 1.575, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4485294117647059, |
|
"grad_norm": 0.11507008224725723, |
|
"learning_rate": 4.581297621410231e-06, |
|
"loss": 1.8125, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4522058823529412, |
|
"grad_norm": 0.13275226950645447, |
|
"learning_rate": 4.572875963106924e-06, |
|
"loss": 1.6451, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.45588235294117646, |
|
"grad_norm": 0.1096140444278717, |
|
"learning_rate": 4.564378353187533e-06, |
|
"loss": 1.879, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.45955882352941174, |
|
"grad_norm": 0.11247947067022324, |
|
"learning_rate": 4.555805103010388e-06, |
|
"loss": 1.5851, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4632352941176471, |
|
"grad_norm": 0.12080512940883636, |
|
"learning_rate": 4.5471565267053365e-06, |
|
"loss": 1.6433, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.46691176470588236, |
|
"grad_norm": 0.1230054646730423, |
|
"learning_rate": 4.538432941162227e-06, |
|
"loss": 1.9295, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.11661480367183685, |
|
"learning_rate": 4.529634666019294e-06, |
|
"loss": 1.8518, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4742647058823529, |
|
"grad_norm": 0.10992003977298737, |
|
"learning_rate": 4.520762023651456e-06, |
|
"loss": 1.8141, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.47794117647058826, |
|
"grad_norm": 0.11171197146177292, |
|
"learning_rate": 4.511815339158497e-06, |
|
"loss": 1.8021, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.48161764705882354, |
|
"grad_norm": 0.1056443303823471, |
|
"learning_rate": 4.502794940353155e-06, |
|
"loss": 1.9079, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4852941176470588, |
|
"grad_norm": 0.10654587298631668, |
|
"learning_rate": 4.493701157749112e-06, |
|
"loss": 1.7223, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4889705882352941, |
|
"grad_norm": 0.11630342155694962, |
|
"learning_rate": 4.484534324548883e-06, |
|
"loss": 1.7638, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.49264705882352944, |
|
"grad_norm": 0.10967176407575607, |
|
"learning_rate": 4.4752947766316094e-06, |
|
"loss": 1.7722, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4963235294117647, |
|
"grad_norm": 0.10630583018064499, |
|
"learning_rate": 4.465982852540747e-06, |
|
"loss": 1.7088, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.12106428295373917, |
|
"learning_rate": 4.456598893471668e-06, |
|
"loss": 1.7594, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.5036764705882353, |
|
"grad_norm": 0.11997083574533463, |
|
"learning_rate": 4.447143243259155e-06, |
|
"loss": 1.9027, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.5073529411764706, |
|
"grad_norm": 0.12016075849533081, |
|
"learning_rate": 4.437616248364805e-06, |
|
"loss": 1.8426, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.5110294117647058, |
|
"grad_norm": 0.1092967689037323, |
|
"learning_rate": 4.428018257864333e-06, |
|
"loss": 1.7154, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.5147058823529411, |
|
"grad_norm": 0.12067247182130814, |
|
"learning_rate": 4.41834962343478e-06, |
|
"loss": 1.7537, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.5183823529411765, |
|
"grad_norm": 0.10798091441392899, |
|
"learning_rate": 4.408610699341634e-06, |
|
"loss": 1.8887, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5220588235294118, |
|
"grad_norm": 0.12265045195817947, |
|
"learning_rate": 4.398801842425842e-06, |
|
"loss": 1.5765, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.5257352941176471, |
|
"grad_norm": 0.112462118268013, |
|
"learning_rate": 4.38892341209074e-06, |
|
"loss": 1.924, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.5294117647058824, |
|
"grad_norm": 0.11943833529949188, |
|
"learning_rate": 4.378975770288881e-06, |
|
"loss": 1.793, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5330882352941176, |
|
"grad_norm": 0.13217097520828247, |
|
"learning_rate": 4.368959281508776e-06, |
|
"loss": 1.7578, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5367647058823529, |
|
"grad_norm": 0.13434284925460815, |
|
"learning_rate": 4.358874312761535e-06, |
|
"loss": 1.8399, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5404411764705882, |
|
"grad_norm": 0.11146039515733719, |
|
"learning_rate": 4.348721233567424e-06, |
|
"loss": 1.9686, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5441176470588235, |
|
"grad_norm": 0.11943879723548889, |
|
"learning_rate": 4.3385004159423195e-06, |
|
"loss": 1.7348, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5477941176470589, |
|
"grad_norm": 0.1157916858792305, |
|
"learning_rate": 4.328212234384085e-06, |
|
"loss": 1.7748, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5514705882352942, |
|
"grad_norm": 0.1146322712302208, |
|
"learning_rate": 4.317857065858843e-06, |
|
"loss": 1.607, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5551470588235294, |
|
"grad_norm": 0.11403496563434601, |
|
"learning_rate": 4.307435289787169e-06, |
|
"loss": 1.8049, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5588235294117647, |
|
"grad_norm": 0.11926942318677902, |
|
"learning_rate": 4.296947288030178e-06, |
|
"loss": 1.8483, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5625, |
|
"grad_norm": 0.11152566224336624, |
|
"learning_rate": 4.286393444875546e-06, |
|
"loss": 1.8334, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5661764705882353, |
|
"grad_norm": 0.11430760473012924, |
|
"learning_rate": 4.2757741470234214e-06, |
|
"loss": 1.9025, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5698529411764706, |
|
"grad_norm": 0.11427745968103409, |
|
"learning_rate": 4.26508978357226e-06, |
|
"loss": 1.787, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5735294117647058, |
|
"grad_norm": 0.11434771120548248, |
|
"learning_rate": 4.254340746004564e-06, |
|
"loss": 1.868, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5772058823529411, |
|
"grad_norm": 0.11024492233991623, |
|
"learning_rate": 4.243527428172541e-06, |
|
"loss": 1.7228, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5808823529411765, |
|
"grad_norm": 0.11791540682315826, |
|
"learning_rate": 4.232650226283672e-06, |
|
"loss": 1.9425, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5845588235294118, |
|
"grad_norm": 0.1227816492319107, |
|
"learning_rate": 4.221709538886197e-06, |
|
"loss": 1.6622, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5882352941176471, |
|
"grad_norm": 0.12510444223880768, |
|
"learning_rate": 4.210705766854505e-06, |
|
"loss": 1.7713, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5919117647058824, |
|
"grad_norm": 0.1140972450375557, |
|
"learning_rate": 4.199639313374451e-06, |
|
"loss": 1.936, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5955882352941176, |
|
"grad_norm": 0.12299997359514236, |
|
"learning_rate": 4.188510583928583e-06, |
|
"loss": 1.7037, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5992647058823529, |
|
"grad_norm": 0.13803604245185852, |
|
"learning_rate": 4.177319986281285e-06, |
|
"loss": 1.9056, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.6029411764705882, |
|
"grad_norm": 0.13489902019500732, |
|
"learning_rate": 4.166067930463831e-06, |
|
"loss": 1.6053, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.6066176470588235, |
|
"grad_norm": 0.11227516829967499, |
|
"learning_rate": 4.154754828759368e-06, |
|
"loss": 1.7738, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.6102941176470589, |
|
"grad_norm": 0.1096302792429924, |
|
"learning_rate": 4.143381095687805e-06, |
|
"loss": 1.8948, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.6139705882352942, |
|
"grad_norm": 0.11712092161178589, |
|
"learning_rate": 4.131947147990629e-06, |
|
"loss": 1.8913, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.6176470588235294, |
|
"grad_norm": 0.12525494396686554, |
|
"learning_rate": 4.120453404615628e-06, |
|
"loss": 1.7482, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.6213235294117647, |
|
"grad_norm": 0.11608923971652985, |
|
"learning_rate": 4.108900286701553e-06, |
|
"loss": 1.6711, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.12490076571702957, |
|
"learning_rate": 4.097288217562669e-06, |
|
"loss": 1.797, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.6286764705882353, |
|
"grad_norm": 0.11467793583869934, |
|
"learning_rate": 4.085617622673265e-06, |
|
"loss": 1.7588, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6323529411764706, |
|
"grad_norm": 0.11370235681533813, |
|
"learning_rate": 4.073888929652048e-06, |
|
"loss": 1.8153, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.6360294117647058, |
|
"grad_norm": 0.11949747055768967, |
|
"learning_rate": 4.062102568246482e-06, |
|
"loss": 1.6628, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.6397058823529411, |
|
"grad_norm": 0.11381959915161133, |
|
"learning_rate": 4.050258970317042e-06, |
|
"loss": 1.7939, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.6433823529411765, |
|
"grad_norm": 0.12485770136117935, |
|
"learning_rate": 4.0383585698213874e-06, |
|
"loss": 1.9112, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6470588235294118, |
|
"grad_norm": 0.12126533687114716, |
|
"learning_rate": 4.0264018027984654e-06, |
|
"loss": 1.7908, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.6507352941176471, |
|
"grad_norm": 0.10532236844301224, |
|
"learning_rate": 4.01438910735253e-06, |
|
"loss": 1.857, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6544117647058824, |
|
"grad_norm": 0.11462371796369553, |
|
"learning_rate": 4.002320923637091e-06, |
|
"loss": 1.961, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6580882352941176, |
|
"grad_norm": 0.11390509456396103, |
|
"learning_rate": 3.99019769383879e-06, |
|
"loss": 1.7004, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.6617647058823529, |
|
"grad_norm": 0.11384452134370804, |
|
"learning_rate": 3.978019862161191e-06, |
|
"loss": 1.719, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6654411764705882, |
|
"grad_norm": 0.12574177980422974, |
|
"learning_rate": 3.965787874808513e-06, |
|
"loss": 1.7728, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6691176470588235, |
|
"grad_norm": 0.11700152605772018, |
|
"learning_rate": 3.953502179969274e-06, |
|
"loss": 1.7115, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6727941176470589, |
|
"grad_norm": 0.15770043432712555, |
|
"learning_rate": 3.941163227799872e-06, |
|
"loss": 1.711, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6764705882352942, |
|
"grad_norm": 0.11906450986862183, |
|
"learning_rate": 3.928771470408092e-06, |
|
"loss": 1.7453, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.6801470588235294, |
|
"grad_norm": 0.1260623037815094, |
|
"learning_rate": 3.916327361836536e-06, |
|
"loss": 1.8312, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6838235294117647, |
|
"grad_norm": 0.11971792578697205, |
|
"learning_rate": 3.903831358045994e-06, |
|
"loss": 1.6005, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6875, |
|
"grad_norm": 0.12143218517303467, |
|
"learning_rate": 3.891283916898729e-06, |
|
"loss": 1.7532, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6911764705882353, |
|
"grad_norm": 0.13371998071670532, |
|
"learning_rate": 3.8786854981417064e-06, |
|
"loss": 1.7866, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6948529411764706, |
|
"grad_norm": 0.13985855877399445, |
|
"learning_rate": 3.866036563389747e-06, |
|
"loss": 1.7175, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6985294117647058, |
|
"grad_norm": 0.11457841098308563, |
|
"learning_rate": 3.85333757610861e-06, |
|
"loss": 2.0277, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.7022058823529411, |
|
"grad_norm": 0.1197565421462059, |
|
"learning_rate": 3.840589001598018e-06, |
|
"loss": 1.939, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 0.117172010242939, |
|
"learning_rate": 3.827791306974602e-06, |
|
"loss": 1.8403, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.7095588235294118, |
|
"grad_norm": 0.11386429518461227, |
|
"learning_rate": 3.814944961154788e-06, |
|
"loss": 1.8311, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.7132352941176471, |
|
"grad_norm": 0.11297334730625153, |
|
"learning_rate": 3.802050434837615e-06, |
|
"loss": 1.8393, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.7169117647058824, |
|
"grad_norm": 0.11836741119623184, |
|
"learning_rate": 3.789108200487493e-06, |
|
"loss": 1.7931, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.7205882352941176, |
|
"grad_norm": 0.1100229024887085, |
|
"learning_rate": 3.77611873231688e-06, |
|
"loss": 1.8247, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.7242647058823529, |
|
"grad_norm": 0.13370762765407562, |
|
"learning_rate": 3.763082506268922e-06, |
|
"loss": 1.5297, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.7279411764705882, |
|
"grad_norm": 0.11646929383277893, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.6622, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.7316176470588235, |
|
"grad_norm": 0.17706073820590973, |
|
"learning_rate": 3.736871692862239e-06, |
|
"loss": 1.7438, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.7352941176470589, |
|
"grad_norm": 0.1213546171784401, |
|
"learning_rate": 3.723698065885936e-06, |
|
"loss": 1.5792, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.7389705882352942, |
|
"grad_norm": 0.11523102223873138, |
|
"learning_rate": 3.7104796017619416e-06, |
|
"loss": 1.8361, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.7426470588235294, |
|
"grad_norm": 0.11374305188655853, |
|
"learning_rate": 3.6972167848239677e-06, |
|
"loss": 1.7189, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.7463235294117647, |
|
"grad_norm": 0.2544911503791809, |
|
"learning_rate": 3.683910101030846e-06, |
|
"loss": 1.7848, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.11955133825540543, |
|
"learning_rate": 3.6705600379487204e-06, |
|
"loss": 1.8594, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.7536764705882353, |
|
"grad_norm": 0.11935199052095413, |
|
"learning_rate": 3.6571670847331802e-06, |
|
"loss": 1.9072, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7573529411764706, |
|
"grad_norm": 0.12341475486755371, |
|
"learning_rate": 3.6437317321113415e-06, |
|
"loss": 1.6725, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.7610294117647058, |
|
"grad_norm": 0.1144358366727829, |
|
"learning_rate": 3.6302544723638623e-06, |
|
"loss": 1.7683, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7647058823529411, |
|
"grad_norm": 0.11911847442388535, |
|
"learning_rate": 3.6167357993069075e-06, |
|
"loss": 1.7537, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7683823529411765, |
|
"grad_norm": 0.11903777718544006, |
|
"learning_rate": 3.603176208274054e-06, |
|
"loss": 1.8182, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.7720588235294118, |
|
"grad_norm": 0.1177648976445198, |
|
"learning_rate": 3.5895761960981423e-06, |
|
"loss": 1.8037, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7757352941176471, |
|
"grad_norm": 0.11667867004871368, |
|
"learning_rate": 3.5759362610930733e-06, |
|
"loss": 1.7944, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.7794117647058824, |
|
"grad_norm": 0.11360645294189453, |
|
"learning_rate": 3.5622569030355434e-06, |
|
"loss": 1.6972, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.7830882352941176, |
|
"grad_norm": 0.12743256986141205, |
|
"learning_rate": 3.5485386231467417e-06, |
|
"loss": 1.6698, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7867647058823529, |
|
"grad_norm": 0.1186446100473404, |
|
"learning_rate": 3.5347819240739783e-06, |
|
"loss": 1.7351, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.7904411764705882, |
|
"grad_norm": 0.13953393697738647, |
|
"learning_rate": 3.5209873098722693e-06, |
|
"loss": 1.8201, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7941176470588235, |
|
"grad_norm": 0.11326795816421509, |
|
"learning_rate": 3.507155285985866e-06, |
|
"loss": 1.7309, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7977941176470589, |
|
"grad_norm": 0.11922699958086014, |
|
"learning_rate": 3.4932863592297393e-06, |
|
"loss": 1.7146, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.8014705882352942, |
|
"grad_norm": 0.13740628957748413, |
|
"learning_rate": 3.4793810377710048e-06, |
|
"loss": 1.7193, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.8051470588235294, |
|
"grad_norm": 0.12394211441278458, |
|
"learning_rate": 3.465439831110306e-06, |
|
"loss": 1.8768, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.8088235294117647, |
|
"grad_norm": 0.12386978417634964, |
|
"learning_rate": 3.451463250063146e-06, |
|
"loss": 1.5901, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.8125, |
|
"grad_norm": 0.12373729050159454, |
|
"learning_rate": 3.4374518067411674e-06, |
|
"loss": 1.712, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.8161764705882353, |
|
"grad_norm": 0.12053066492080688, |
|
"learning_rate": 3.4234060145333937e-06, |
|
"loss": 1.8149, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.8198529411764706, |
|
"grad_norm": 0.1302081197500229, |
|
"learning_rate": 3.409326388087414e-06, |
|
"loss": 1.8146, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.8235294117647058, |
|
"grad_norm": 0.12400317192077637, |
|
"learning_rate": 3.3952134432905275e-06, |
|
"loss": 1.6355, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.8272058823529411, |
|
"grad_norm": 0.1284218281507492, |
|
"learning_rate": 3.3810676972508405e-06, |
|
"loss": 1.8496, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8308823529411765, |
|
"grad_norm": 0.12382645905017853, |
|
"learning_rate": 3.3668896682783216e-06, |
|
"loss": 1.7146, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.8345588235294118, |
|
"grad_norm": 0.12787872552871704, |
|
"learning_rate": 3.3526798758658062e-06, |
|
"loss": 1.8304, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.8382352941176471, |
|
"grad_norm": 0.16194690763950348, |
|
"learning_rate": 3.338438840669964e-06, |
|
"loss": 1.6469, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.8419117647058824, |
|
"grad_norm": 0.1237102746963501, |
|
"learning_rate": 3.324167084492226e-06, |
|
"loss": 1.7982, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.8455882352941176, |
|
"grad_norm": 0.12435570359230042, |
|
"learning_rate": 3.3098651302596565e-06, |
|
"loss": 1.7429, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.8492647058823529, |
|
"grad_norm": 0.12152646481990814, |
|
"learning_rate": 3.2955335020057994e-06, |
|
"loss": 1.8001, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.8529411764705882, |
|
"grad_norm": 0.11374185979366302, |
|
"learning_rate": 3.281172724851476e-06, |
|
"loss": 1.8071, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.8566176470588235, |
|
"grad_norm": 0.12285105139017105, |
|
"learning_rate": 3.266783324985543e-06, |
|
"loss": 1.6797, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.8602941176470589, |
|
"grad_norm": 0.12639090418815613, |
|
"learning_rate": 3.252365829645612e-06, |
|
"loss": 1.7243, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.8639705882352942, |
|
"grad_norm": 0.11098191887140274, |
|
"learning_rate": 3.2379207670987352e-06, |
|
"loss": 1.8761, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8676470588235294, |
|
"grad_norm": 0.11968422681093216, |
|
"learning_rate": 3.2234486666220437e-06, |
|
"loss": 1.8813, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.8713235294117647, |
|
"grad_norm": 0.12833014130592346, |
|
"learning_rate": 3.2089500584833577e-06, |
|
"loss": 1.6301, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.875, |
|
"grad_norm": 0.1229129508137703, |
|
"learning_rate": 3.1944254739217584e-06, |
|
"loss": 1.8822, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.8786764705882353, |
|
"grad_norm": 0.12925758957862854, |
|
"learning_rate": 3.179875445128119e-06, |
|
"loss": 1.9632, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.8823529411764706, |
|
"grad_norm": 0.12624742090702057, |
|
"learning_rate": 3.165300505225608e-06, |
|
"loss": 1.7239, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8860294117647058, |
|
"grad_norm": 0.1159316673874855, |
|
"learning_rate": 3.150701188250153e-06, |
|
"loss": 1.7914, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.8897058823529411, |
|
"grad_norm": 0.12504705786705017, |
|
"learning_rate": 3.136078029130877e-06, |
|
"loss": 1.9467, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.8933823529411765, |
|
"grad_norm": 0.12300896644592285, |
|
"learning_rate": 3.1214315636704928e-06, |
|
"loss": 1.8546, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.8970588235294118, |
|
"grad_norm": 0.15699361264705658, |
|
"learning_rate": 3.106762328525677e-06, |
|
"loss": 1.7644, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.9007352941176471, |
|
"grad_norm": 1.1493163108825684, |
|
"learning_rate": 3.0920708611874006e-06, |
|
"loss": 1.8477, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.9044117647058824, |
|
"grad_norm": 0.1137952208518982, |
|
"learning_rate": 3.0773576999612375e-06, |
|
"loss": 1.7101, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.9080882352941176, |
|
"grad_norm": 0.1225443109869957, |
|
"learning_rate": 3.0626233839476434e-06, |
|
"loss": 1.8754, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.9117647058823529, |
|
"grad_norm": 0.12916752696037292, |
|
"learning_rate": 3.0478684530221977e-06, |
|
"loss": 1.8084, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.9154411764705882, |
|
"grad_norm": 0.12458333373069763, |
|
"learning_rate": 3.033093447815825e-06, |
|
"loss": 1.9428, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.9191176470588235, |
|
"grad_norm": 0.1416483372449875, |
|
"learning_rate": 3.018298909694986e-06, |
|
"loss": 1.8178, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.9227941176470589, |
|
"grad_norm": 0.1401221603155136, |
|
"learning_rate": 3.0034853807418412e-06, |
|
"loss": 1.7829, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.9264705882352942, |
|
"grad_norm": 0.11938274651765823, |
|
"learning_rate": 2.9886534037343872e-06, |
|
"loss": 1.6625, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.9301470588235294, |
|
"grad_norm": 0.1268637776374817, |
|
"learning_rate": 2.973803522126571e-06, |
|
"loss": 1.7527, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.9338235294117647, |
|
"grad_norm": 0.13126443326473236, |
|
"learning_rate": 2.9589362800283774e-06, |
|
"loss": 1.6907, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.13425901532173157, |
|
"learning_rate": 2.9440522221858886e-06, |
|
"loss": 1.7868, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.12785860896110535, |
|
"learning_rate": 2.9291518939613317e-06, |
|
"loss": 1.8627, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.9448529411764706, |
|
"grad_norm": 0.12060689181089401, |
|
"learning_rate": 2.914235841313088e-06, |
|
"loss": 1.7925, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.9485294117647058, |
|
"grad_norm": 0.12087797373533249, |
|
"learning_rate": 2.899304610775695e-06, |
|
"loss": 1.741, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.9522058823529411, |
|
"grad_norm": 0.1257392317056656, |
|
"learning_rate": 2.8843587494398177e-06, |
|
"loss": 1.9245, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.9558823529411765, |
|
"grad_norm": 0.13595956563949585, |
|
"learning_rate": 2.869398804932204e-06, |
|
"loss": 1.6015, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9595588235294118, |
|
"grad_norm": 0.11528779566287994, |
|
"learning_rate": 2.854425325395619e-06, |
|
"loss": 1.8843, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.9632352941176471, |
|
"grad_norm": 0.17899318039417267, |
|
"learning_rate": 2.83943885946876e-06, |
|
"loss": 1.7506, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.9669117647058824, |
|
"grad_norm": 0.12404326349496841, |
|
"learning_rate": 2.824439956266156e-06, |
|
"loss": 1.8862, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.9705882352941176, |
|
"grad_norm": 0.14045919477939606, |
|
"learning_rate": 2.809429165358045e-06, |
|
"loss": 1.6858, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.9742647058823529, |
|
"grad_norm": 0.1323767602443695, |
|
"learning_rate": 2.7944070367502404e-06, |
|
"loss": 1.7981, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9779411764705882, |
|
"grad_norm": 0.1255924552679062, |
|
"learning_rate": 2.7793741208639746e-06, |
|
"loss": 1.8193, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.9816176470588235, |
|
"grad_norm": 0.13566090166568756, |
|
"learning_rate": 2.7643309685157355e-06, |
|
"loss": 1.8234, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9852941176470589, |
|
"grad_norm": 0.14218254387378693, |
|
"learning_rate": 2.7492781308970805e-06, |
|
"loss": 1.6932, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.9889705882352942, |
|
"grad_norm": 0.14237117767333984, |
|
"learning_rate": 2.7342161595544443e-06, |
|
"loss": 1.9034, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.9926470588235294, |
|
"grad_norm": 0.1268174946308136, |
|
"learning_rate": 2.7191456063689235e-06, |
|
"loss": 1.7338, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9963235294117647, |
|
"grad_norm": 0.11195939034223557, |
|
"learning_rate": 2.7040670235360643e-06, |
|
"loss": 1.7169, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.12390360236167908, |
|
"learning_rate": 2.688980963545621e-06, |
|
"loss": 1.5995, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.0036764705882353, |
|
"grad_norm": 0.13594205677509308, |
|
"learning_rate": 2.6738879791613183e-06, |
|
"loss": 1.881, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.0073529411764706, |
|
"grad_norm": 0.12911252677440643, |
|
"learning_rate": 2.658788623400595e-06, |
|
"loss": 1.96, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.0110294117647058, |
|
"grad_norm": 0.12003444135189056, |
|
"learning_rate": 2.6436834495143398e-06, |
|
"loss": 1.8635, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.0147058823529411, |
|
"grad_norm": 0.15043824911117554, |
|
"learning_rate": 2.6285730109666245e-06, |
|
"loss": 1.6302, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.0183823529411764, |
|
"grad_norm": 0.12194662541151047, |
|
"learning_rate": 2.61345786141442e-06, |
|
"loss": 1.8137, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.0220588235294117, |
|
"grad_norm": 0.13448664546012878, |
|
"learning_rate": 2.598338554687312e-06, |
|
"loss": 1.6858, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.025735294117647, |
|
"grad_norm": 0.12283740192651749, |
|
"learning_rate": 2.5832156447672074e-06, |
|
"loss": 1.6681, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.0294117647058822, |
|
"grad_norm": 0.1438128501176834, |
|
"learning_rate": 2.568089685768038e-06, |
|
"loss": 1.8845, |
|
"step": 280 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 544, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 28, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.063239288435507e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|