|
{ |
|
"best_metric": 0.8919714689254761, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.6756756756756757, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0033783783783783786, |
|
"grad_norm": 3.790795087814331, |
|
"learning_rate": 5e-06, |
|
"loss": 1.4948, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0033783783783783786, |
|
"eval_loss": 2.366645336151123, |
|
"eval_runtime": 45.5726, |
|
"eval_samples_per_second": 10.95, |
|
"eval_steps_per_second": 5.486, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006756756756756757, |
|
"grad_norm": 4.27876091003418, |
|
"learning_rate": 1e-05, |
|
"loss": 1.6839, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010135135135135136, |
|
"grad_norm": 4.081669807434082, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.5767, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.013513513513513514, |
|
"grad_norm": 3.980726480484009, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5418, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.016891891891891893, |
|
"grad_norm": 3.4310128688812256, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.5372, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02027027027027027, |
|
"grad_norm": 2.8119819164276123, |
|
"learning_rate": 3e-05, |
|
"loss": 1.3767, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02364864864864865, |
|
"grad_norm": 2.1277658939361572, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.2593, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02702702702702703, |
|
"grad_norm": 1.5391322374343872, |
|
"learning_rate": 4e-05, |
|
"loss": 1.2748, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030405405405405407, |
|
"grad_norm": 1.341151237487793, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.1627, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.033783783783783786, |
|
"grad_norm": 1.4433046579360962, |
|
"learning_rate": 5e-05, |
|
"loss": 1.1408, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.037162162162162164, |
|
"grad_norm": 1.3264684677124023, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.0924, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04054054054054054, |
|
"grad_norm": 1.5089952945709229, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1045, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04391891891891892, |
|
"grad_norm": 1.2900017499923706, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.0468, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0472972972972973, |
|
"grad_norm": 1.226200819015503, |
|
"learning_rate": 7e-05, |
|
"loss": 1.0241, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05067567567567568, |
|
"grad_norm": 1.1079437732696533, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.0797, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05405405405405406, |
|
"grad_norm": 1.118066430091858, |
|
"learning_rate": 8e-05, |
|
"loss": 1.064, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.057432432432432436, |
|
"grad_norm": 1.14432954788208, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.0531, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.060810810810810814, |
|
"grad_norm": 1.10897696018219, |
|
"learning_rate": 9e-05, |
|
"loss": 1.1692, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06418918918918919, |
|
"grad_norm": 1.2004364728927612, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.233, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06756756756756757, |
|
"grad_norm": 1.031334638595581, |
|
"learning_rate": 0.0001, |
|
"loss": 0.9639, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07094594594594594, |
|
"grad_norm": 1.0391218662261963, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 1.0314, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07432432432432433, |
|
"grad_norm": 1.0851644277572632, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 0.9859, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0777027027027027, |
|
"grad_norm": 1.3139114379882812, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 1.1277, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08108108108108109, |
|
"grad_norm": 1.143263339996338, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 1.1157, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08445945945945946, |
|
"grad_norm": 1.2163656949996948, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 1.0792, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08783783783783784, |
|
"grad_norm": 1.844299077987671, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.1085, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09121621621621621, |
|
"grad_norm": 1.5571035146713257, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 1.0205, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0945945945945946, |
|
"grad_norm": 1.331886887550354, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 1.1869, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09797297297297297, |
|
"grad_norm": 1.3357473611831665, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 1.059, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10135135135135136, |
|
"grad_norm": 1.6065927743911743, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 1.2931, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10472972972972973, |
|
"grad_norm": 1.4034627676010132, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 1.2066, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10810810810810811, |
|
"grad_norm": 1.4192296266555786, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 1.216, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11148648648648649, |
|
"grad_norm": 1.8513731956481934, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 1.1236, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11486486486486487, |
|
"grad_norm": 1.535695195198059, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 1.2842, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11824324324324324, |
|
"grad_norm": 1.70310640335083, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 1.0982, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12162162162162163, |
|
"grad_norm": 1.8845494985580444, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 1.1928, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.125, |
|
"grad_norm": 1.9401278495788574, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 1.1353, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12837837837837837, |
|
"grad_norm": 1.7827504873275757, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.0933, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13175675675675674, |
|
"grad_norm": 2.116203546524048, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 1.2047, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 1.9922860860824585, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 1.2051, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13851351351351351, |
|
"grad_norm": 1.739909052848816, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 1.0593, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14189189189189189, |
|
"grad_norm": 1.9242291450500488, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 1.0776, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14527027027027026, |
|
"grad_norm": 2.0772931575775146, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 1.0936, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14864864864864866, |
|
"grad_norm": 2.72910475730896, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.2138, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15202702702702703, |
|
"grad_norm": 2.3262908458709717, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.8939, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1554054054054054, |
|
"grad_norm": 2.338627815246582, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 0.919, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15878378378378377, |
|
"grad_norm": 2.6159088611602783, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 0.9041, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16216216216216217, |
|
"grad_norm": 3.0837512016296387, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 0.9639, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16554054054054054, |
|
"grad_norm": 3.6426544189453125, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 1.1832, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16891891891891891, |
|
"grad_norm": 3.982973575592041, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.9804, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16891891891891891, |
|
"eval_loss": 1.1289108991622925, |
|
"eval_runtime": 46.3711, |
|
"eval_samples_per_second": 10.761, |
|
"eval_steps_per_second": 5.391, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17229729729729729, |
|
"grad_norm": 1.6470714807510376, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 0.9835, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.17567567567567569, |
|
"grad_norm": 1.65348482131958, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 1.044, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17905405405405406, |
|
"grad_norm": 0.9217798709869385, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.9206, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18243243243243243, |
|
"grad_norm": 0.6984456181526184, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 0.724, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1858108108108108, |
|
"grad_norm": 0.8570162057876587, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.9921, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1891891891891892, |
|
"grad_norm": 0.8727167248725891, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.9956, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19256756756756757, |
|
"grad_norm": 0.9744833707809448, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 0.9819, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19594594594594594, |
|
"grad_norm": 0.98649662733078, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 0.887, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19932432432432431, |
|
"grad_norm": 0.8099063634872437, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.8632, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20270270270270271, |
|
"grad_norm": 0.8015736937522888, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.9062, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20608108108108109, |
|
"grad_norm": 0.868614137172699, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 0.9653, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.20945945945945946, |
|
"grad_norm": 0.7105808258056641, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.8513, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21283783783783783, |
|
"grad_norm": 0.7809450030326843, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 0.9441, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21621621621621623, |
|
"grad_norm": 0.910719096660614, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.929, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.2195945945945946, |
|
"grad_norm": 0.8238301873207092, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.8879, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22297297297297297, |
|
"grad_norm": 0.8302669525146484, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 0.8487, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22635135135135134, |
|
"grad_norm": 0.9159395694732666, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 0.939, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.22972972972972974, |
|
"grad_norm": 0.8282083868980408, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.9193, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23310810810810811, |
|
"grad_norm": 0.877231240272522, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 0.9358, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.23648648648648649, |
|
"grad_norm": 1.0099470615386963, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.9245, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23986486486486486, |
|
"grad_norm": 1.0175057649612427, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 1.0411, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24324324324324326, |
|
"grad_norm": 0.9246781468391418, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.9697, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24662162162162163, |
|
"grad_norm": 0.9959827661514282, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 0.936, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1503347158432007, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.9295, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2533783783783784, |
|
"grad_norm": 1.245553731918335, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 1.1697, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.25675675675675674, |
|
"grad_norm": 1.1103030443191528, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.851, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.26013513513513514, |
|
"grad_norm": 1.039109468460083, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.9779, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.2635135135135135, |
|
"grad_norm": 1.1795552968978882, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 1.0418, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2668918918918919, |
|
"grad_norm": 1.2785820960998535, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 1.1016, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 1.204482913017273, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.136, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27364864864864863, |
|
"grad_norm": 1.3328748941421509, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 1.1576, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.27702702702702703, |
|
"grad_norm": 1.5070085525512695, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 1.2457, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.28040540540540543, |
|
"grad_norm": 1.3167310953140259, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 1.0597, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.28378378378378377, |
|
"grad_norm": 1.436022400856018, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 1.0113, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.28716216216216217, |
|
"grad_norm": 1.5455635786056519, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 1.046, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2905405405405405, |
|
"grad_norm": 1.286605954170227, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.979, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2939189189189189, |
|
"grad_norm": 1.3941338062286377, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 0.9772, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2972972972972973, |
|
"grad_norm": 1.5202627182006836, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 1.0439, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.30067567567567566, |
|
"grad_norm": 1.6103655099868774, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 1.1001, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30405405405405406, |
|
"grad_norm": 1.8977413177490234, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 1.1716, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.30743243243243246, |
|
"grad_norm": 1.7053625583648682, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 0.853, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3108108108108108, |
|
"grad_norm": 1.7277348041534424, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.0563, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3141891891891892, |
|
"grad_norm": 1.7474844455718994, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 0.8952, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.31756756756756754, |
|
"grad_norm": 1.7933131456375122, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 0.9742, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.32094594594594594, |
|
"grad_norm": 1.790142297744751, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.9332, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32432432432432434, |
|
"grad_norm": 2.3483188152313232, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 0.8821, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3277027027027027, |
|
"grad_norm": 2.0624172687530518, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 0.7516, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3310810810810811, |
|
"grad_norm": 2.341597318649292, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.7419, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3344594594594595, |
|
"grad_norm": 2.4898014068603516, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 0.8187, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"grad_norm": 3.681002140045166, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 1.0257, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"eval_loss": 0.9962962865829468, |
|
"eval_runtime": 46.3235, |
|
"eval_samples_per_second": 10.772, |
|
"eval_steps_per_second": 5.397, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34121621621621623, |
|
"grad_norm": 1.0150301456451416, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.8399, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.34459459459459457, |
|
"grad_norm": 0.9101819396018982, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 0.8332, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.34797297297297297, |
|
"grad_norm": 0.7298635244369507, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 0.7701, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.35135135135135137, |
|
"grad_norm": 0.7302980422973633, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.8608, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3547297297297297, |
|
"grad_norm": 0.7403108477592468, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.8571, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3581081081081081, |
|
"grad_norm": 0.7119635939598083, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 0.8834, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3614864864864865, |
|
"grad_norm": 0.7150499820709229, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 0.8476, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.36486486486486486, |
|
"grad_norm": 2.773348331451416, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 0.8951, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.36824324324324326, |
|
"grad_norm": 0.7925100326538086, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 0.8753, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.3716216216216216, |
|
"grad_norm": 0.9120268821716309, |
|
"learning_rate": 5e-05, |
|
"loss": 0.9304, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.375, |
|
"grad_norm": 0.923784613609314, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 0.9109, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3783783783783784, |
|
"grad_norm": 0.7621331214904785, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 0.8805, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.38175675675675674, |
|
"grad_norm": 0.989507794380188, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 0.9356, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.38513513513513514, |
|
"grad_norm": 1.1281342506408691, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 0.9762, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3885135135135135, |
|
"grad_norm": 1.0303744077682495, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 0.9971, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3918918918918919, |
|
"grad_norm": 0.8193778991699219, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.9003, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3952702702702703, |
|
"grad_norm": 0.8731766939163208, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 0.9704, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.39864864864864863, |
|
"grad_norm": 1.064092755317688, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 1.0779, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.40202702702702703, |
|
"grad_norm": 0.8438169360160828, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.9074, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 0.8278259634971619, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.9602, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.40878378378378377, |
|
"grad_norm": 0.8631948232650757, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 0.8994, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.41216216216216217, |
|
"grad_norm": 0.9589402079582214, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 1.0671, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4155405405405405, |
|
"grad_norm": 0.9513259530067444, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 1.0584, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4189189189189189, |
|
"grad_norm": 1.0139130353927612, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 0.9865, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4222972972972973, |
|
"grad_norm": 0.9505813121795654, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.9182, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.42567567567567566, |
|
"grad_norm": 0.9823381900787354, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 0.9159, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.42905405405405406, |
|
"grad_norm": 1.0299394130706787, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 0.9573, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.43243243243243246, |
|
"grad_norm": 1.2664399147033691, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.1676, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4358108108108108, |
|
"grad_norm": 1.107443928718567, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 0.9744, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4391891891891892, |
|
"grad_norm": 1.2630161046981812, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 1.0434, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44256756756756754, |
|
"grad_norm": 1.2209265232086182, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 0.9721, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.44594594594594594, |
|
"grad_norm": 1.1842795610427856, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 0.9618, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.44932432432432434, |
|
"grad_norm": 1.22838294506073, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 0.981, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.4527027027027027, |
|
"grad_norm": 1.260001301765442, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.9968, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4560810810810811, |
|
"grad_norm": 1.4171466827392578, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 1.0873, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4594594594594595, |
|
"grad_norm": 1.376778244972229, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 1.0623, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.46283783783783783, |
|
"grad_norm": 1.6616677045822144, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 1.1181, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.46621621621621623, |
|
"grad_norm": 1.489789605140686, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 1.0679, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.46959459459459457, |
|
"grad_norm": 1.6120611429214478, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 1.1404, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.47297297297297297, |
|
"grad_norm": 1.7737764120101929, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 1.0704, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.47635135135135137, |
|
"grad_norm": 1.4966835975646973, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 0.903, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4797297297297297, |
|
"grad_norm": 1.6529886722564697, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 0.9047, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4831081081081081, |
|
"grad_norm": 1.9524998664855957, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 0.9717, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4864864864864865, |
|
"grad_norm": 1.7451844215393066, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 0.9651, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.48986486486486486, |
|
"grad_norm": 1.7876458168029785, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 0.9632, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.49324324324324326, |
|
"grad_norm": 1.8912980556488037, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.9248, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4966216216216216, |
|
"grad_norm": 2.041308641433716, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 0.826, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.146843433380127, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 0.8596, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5033783783783784, |
|
"grad_norm": 3.079761028289795, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 1.0932, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5067567567567568, |
|
"grad_norm": 4.34760046005249, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.9517, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5067567567567568, |
|
"eval_loss": 0.910146951675415, |
|
"eval_runtime": 46.335, |
|
"eval_samples_per_second": 10.769, |
|
"eval_steps_per_second": 5.395, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5101351351351351, |
|
"grad_norm": 0.617638111114502, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 0.7233, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5135135135135135, |
|
"grad_norm": 0.8564380407333374, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.9559, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5168918918918919, |
|
"grad_norm": 0.6549626588821411, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 0.7579, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5202702702702703, |
|
"grad_norm": 0.6548464894294739, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 0.7968, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5236486486486487, |
|
"grad_norm": 0.6791085004806519, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.8266, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.527027027027027, |
|
"grad_norm": 0.6822385191917419, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 0.8525, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5304054054054054, |
|
"grad_norm": 0.699497640132904, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 0.8513, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.5337837837837838, |
|
"grad_norm": 0.8761206865310669, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.8481, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5371621621621622, |
|
"grad_norm": 0.7131243944168091, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 0.8898, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 0.7251644730567932, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.8904, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.543918918918919, |
|
"grad_norm": 0.7635348439216614, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 0.8293, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5472972972972973, |
|
"grad_norm": 0.796482503414154, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 0.9214, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5506756756756757, |
|
"grad_norm": 0.7892021536827087, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 0.8489, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5540540540540541, |
|
"grad_norm": 0.7896630764007568, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.8908, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5574324324324325, |
|
"grad_norm": 0.9114834070205688, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 0.8966, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5608108108108109, |
|
"grad_norm": 0.7687644958496094, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 0.8819, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5641891891891891, |
|
"grad_norm": 0.860630214214325, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 0.923, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5675675675675675, |
|
"grad_norm": 0.8365357518196106, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 0.9266, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5709459459459459, |
|
"grad_norm": 0.8165171146392822, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 0.838, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5743243243243243, |
|
"grad_norm": 0.864069938659668, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.934, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5777027027027027, |
|
"grad_norm": 0.9135668873786926, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 0.9902, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.581081081081081, |
|
"grad_norm": 0.8596118688583374, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 0.8607, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5844594594594594, |
|
"grad_norm": 1.0075477361679077, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 1.006, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5878378378378378, |
|
"grad_norm": 0.9625608325004578, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 0.9199, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5912162162162162, |
|
"grad_norm": 1.0007152557373047, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 0.9741, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5945945945945946, |
|
"grad_norm": 1.2116408348083496, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 1.1235, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.597972972972973, |
|
"grad_norm": 1.0656737089157104, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 0.9497, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6013513513513513, |
|
"grad_norm": 1.095234751701355, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 1.0694, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6047297297297297, |
|
"grad_norm": 1.0279932022094727, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 0.9642, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.6081081081081081, |
|
"grad_norm": 1.1247590780258179, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.9799, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6114864864864865, |
|
"grad_norm": 1.228450894355774, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 0.9902, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6148648648648649, |
|
"grad_norm": 1.2460905313491821, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.0458, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6182432432432432, |
|
"grad_norm": 1.3238197565078735, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 1.0183, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6216216216216216, |
|
"grad_norm": 1.2620553970336914, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 1.0289, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 1.400741457939148, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 1.0397, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6283783783783784, |
|
"grad_norm": 1.3458335399627686, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 0.922, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6317567567567568, |
|
"grad_norm": 1.3344112634658813, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 0.9703, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.6351351351351351, |
|
"grad_norm": 1.5010539293289185, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 1.0322, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6385135135135135, |
|
"grad_norm": 1.4925593137741089, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 1.0375, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6418918918918919, |
|
"grad_norm": 1.4849199056625366, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.9365, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6452702702702703, |
|
"grad_norm": 1.4886884689331055, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 0.9195, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6486486486486487, |
|
"grad_norm": 1.625990390777588, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 0.9385, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.652027027027027, |
|
"grad_norm": 1.832520842552185, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 0.8738, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6554054054054054, |
|
"grad_norm": 2.0530941486358643, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 1.035, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6587837837837838, |
|
"grad_norm": 2.198253870010376, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 0.955, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6621621621621622, |
|
"grad_norm": 1.8382649421691895, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 0.8156, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6655405405405406, |
|
"grad_norm": 1.9348886013031006, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 0.8991, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.668918918918919, |
|
"grad_norm": 2.128573417663574, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 0.7261, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.6722972972972973, |
|
"grad_norm": 2.5201845169067383, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 0.921, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 3.6946332454681396, |
|
"learning_rate": 0.0, |
|
"loss": 0.9138, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"eval_loss": 0.8919714689254761, |
|
"eval_runtime": 46.3883, |
|
"eval_samples_per_second": 10.757, |
|
"eval_steps_per_second": 5.389, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.1421973496489574e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|