|
{ |
|
"best_metric": 0.6730939745903015, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.039615727443795185, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00039615727443795186, |
|
"grad_norm": 14.314478874206543, |
|
"learning_rate": 5e-06, |
|
"loss": 2.5987, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00039615727443795186, |
|
"eval_loss": 1.2088950872421265, |
|
"eval_runtime": 338.2836, |
|
"eval_samples_per_second": 12.569, |
|
"eval_steps_per_second": 6.285, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0007923145488759037, |
|
"grad_norm": 16.324403762817383, |
|
"learning_rate": 1e-05, |
|
"loss": 3.394, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0011884718233138556, |
|
"grad_norm": 18.80040168762207, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.0517, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0015846290977518075, |
|
"grad_norm": 17.43671989440918, |
|
"learning_rate": 2e-05, |
|
"loss": 3.2647, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0019807863721897595, |
|
"grad_norm": 14.420022010803223, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.9988, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0023769436466277113, |
|
"grad_norm": 13.038010597229004, |
|
"learning_rate": 3e-05, |
|
"loss": 2.9589, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.002773100921065663, |
|
"grad_norm": 11.008463859558105, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.6601, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.003169258195503615, |
|
"grad_norm": 11.67863941192627, |
|
"learning_rate": 4e-05, |
|
"loss": 2.6742, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0035654154699415667, |
|
"grad_norm": 12.343779563903809, |
|
"learning_rate": 4.5e-05, |
|
"loss": 2.9556, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003961572744379519, |
|
"grad_norm": 10.546838760375977, |
|
"learning_rate": 5e-05, |
|
"loss": 2.6015, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004357730018817471, |
|
"grad_norm": 11.106729507446289, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 2.5997, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.004753887293255423, |
|
"grad_norm": 10.504631996154785, |
|
"learning_rate": 6e-05, |
|
"loss": 3.0646, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.005150044567693374, |
|
"grad_norm": 9.306614875793457, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 2.5052, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.005546201842131326, |
|
"grad_norm": 9.76103401184082, |
|
"learning_rate": 7e-05, |
|
"loss": 2.272, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.005942359116569278, |
|
"grad_norm": 10.521080017089844, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.7484, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.00633851639100723, |
|
"grad_norm": 10.383809089660645, |
|
"learning_rate": 8e-05, |
|
"loss": 2.4941, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.006734673665445182, |
|
"grad_norm": 11.771020889282227, |
|
"learning_rate": 8.5e-05, |
|
"loss": 2.7839, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0071308309398831335, |
|
"grad_norm": 10.846237182617188, |
|
"learning_rate": 9e-05, |
|
"loss": 2.4915, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.007526988214321085, |
|
"grad_norm": 9.68586254119873, |
|
"learning_rate": 9.5e-05, |
|
"loss": 2.4326, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.007923145488759038, |
|
"grad_norm": 9.134086608886719, |
|
"learning_rate": 0.0001, |
|
"loss": 2.2999, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.008319302763196989, |
|
"grad_norm": 10.715767860412598, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 2.4463, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.008715460037634942, |
|
"grad_norm": 12.497233390808105, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 2.6965, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.009111617312072893, |
|
"grad_norm": 11.588178634643555, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 2.6387, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.009507774586510845, |
|
"grad_norm": 10.239289283752441, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 2.3716, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.009903931860948796, |
|
"grad_norm": 10.348134994506836, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 2.3706, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.010300089135386749, |
|
"grad_norm": 12.288321495056152, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 2.8137, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0106962464098247, |
|
"grad_norm": 10.109082221984863, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 2.5793, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.011092403684262652, |
|
"grad_norm": 10.935514450073242, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 2.5139, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.011488560958700603, |
|
"grad_norm": 11.004712104797363, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 2.4287, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.011884718233138556, |
|
"grad_norm": 11.961386680603027, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 2.885, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.012280875507576509, |
|
"grad_norm": 10.97961139678955, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 2.7134, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.01267703278201446, |
|
"grad_norm": 13.087514877319336, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 3.0466, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.013073190056452412, |
|
"grad_norm": 9.89000129699707, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 2.0578, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.013469347330890363, |
|
"grad_norm": 13.183517456054688, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 2.8413, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.013865504605328316, |
|
"grad_norm": 10.432723999023438, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 2.3153, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.014261661879766267, |
|
"grad_norm": 10.83343505859375, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 2.3221, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01465781915420422, |
|
"grad_norm": 11.4821138381958, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 2.7535, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.01505397642864217, |
|
"grad_norm": 13.579030990600586, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.1641, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.015450133703080123, |
|
"grad_norm": 12.277050018310547, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 2.5429, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.015846290977518076, |
|
"grad_norm": 11.074421882629395, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 2.3702, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.016242448251956025, |
|
"grad_norm": 14.457468032836914, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 3.2263, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.016638605526393978, |
|
"grad_norm": 12.959149360656738, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 2.4348, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.01703476280083193, |
|
"grad_norm": 17.107925415039062, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 3.3204, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.017430920075269883, |
|
"grad_norm": 14.488329887390137, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 2.845, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.017827077349707832, |
|
"grad_norm": 12.97142505645752, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 3.277, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.018223234624145785, |
|
"grad_norm": 16.733470916748047, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 3.6097, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.018619391898583738, |
|
"grad_norm": 13.195300102233887, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 2.9709, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.01901554917302169, |
|
"grad_norm": 13.836097717285156, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 2.7804, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.019411706447459643, |
|
"grad_norm": 12.61435317993164, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 2.2369, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.019807863721897592, |
|
"grad_norm": 17.996479034423828, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 3.2496, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.019807863721897592, |
|
"eval_loss": 0.7593479752540588, |
|
"eval_runtime": 340.1536, |
|
"eval_samples_per_second": 12.5, |
|
"eval_steps_per_second": 6.25, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.020204020996335545, |
|
"grad_norm": 14.763004302978516, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 3.4166, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.020600178270773498, |
|
"grad_norm": 8.999519348144531, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 2.2951, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02099633554521145, |
|
"grad_norm": 7.654372215270996, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 2.9339, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0213924928196494, |
|
"grad_norm": 6.517328262329102, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 2.4162, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.021788650094087352, |
|
"grad_norm": 7.003291130065918, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 2.2779, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.022184807368525305, |
|
"grad_norm": 8.182329177856445, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.9609, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.022580964642963258, |
|
"grad_norm": 7.554233074188232, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 2.6803, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.022977121917401207, |
|
"grad_norm": 7.749049186706543, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 2.5795, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02337327919183916, |
|
"grad_norm": 7.0886149406433105, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 2.3511, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.023769436466277112, |
|
"grad_norm": 6.84679651260376, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 2.1122, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.024165593740715065, |
|
"grad_norm": 7.1356329917907715, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 2.3196, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.024561751015153017, |
|
"grad_norm": 7.6530046463012695, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 2.4788, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.024957908289590967, |
|
"grad_norm": 8.369495391845703, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 2.3375, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.02535406556402892, |
|
"grad_norm": 8.36403751373291, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 2.0959, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.025750222838466872, |
|
"grad_norm": 10.229287147521973, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 3.0627, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.026146380112904825, |
|
"grad_norm": 8.87305736541748, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 2.5706, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.026542537387342774, |
|
"grad_norm": 9.638481140136719, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 2.7759, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.026938694661780727, |
|
"grad_norm": 9.509964942932129, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.5595, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.02733485193621868, |
|
"grad_norm": 9.560708045959473, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 2.6391, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.027731009210656632, |
|
"grad_norm": 8.581311225891113, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 2.784, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02812716648509458, |
|
"grad_norm": 7.63805627822876, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 2.2859, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.028523323759532534, |
|
"grad_norm": 8.626289367675781, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 2.7975, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.028919481033970486, |
|
"grad_norm": 11.270418167114258, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 2.5275, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02931563830840844, |
|
"grad_norm": 9.097280502319336, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.696, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02971179558284639, |
|
"grad_norm": 7.862690448760986, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 2.2284, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03010795285728434, |
|
"grad_norm": 7.738744735717773, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 2.3424, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.030504110131722294, |
|
"grad_norm": 9.578076362609863, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 2.8572, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.030900267406160246, |
|
"grad_norm": 9.798004150390625, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 2.9643, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0312964246805982, |
|
"grad_norm": 9.317380905151367, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 3.2559, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03169258195503615, |
|
"grad_norm": 9.332989692687988, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.6451, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.032088739229474104, |
|
"grad_norm": 10.110783576965332, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 3.095, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.03248489650391205, |
|
"grad_norm": 9.339815139770508, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 2.3127, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.03288105377835, |
|
"grad_norm": 10.58879280090332, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 2.5449, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.033277211052787956, |
|
"grad_norm": 10.540449142456055, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 3.362, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.03367336832722591, |
|
"grad_norm": 12.278244972229004, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 2.5295, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03406952560166386, |
|
"grad_norm": 9.675426483154297, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 2.4279, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.034465682876101814, |
|
"grad_norm": 8.322236061096191, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 2.1438, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.034861840150539766, |
|
"grad_norm": 15.984248161315918, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 2.4826, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03525799742497772, |
|
"grad_norm": 11.099123001098633, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 2.8121, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.035654154699415665, |
|
"grad_norm": 10.237126350402832, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 2.6013, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03605031197385362, |
|
"grad_norm": 12.89112663269043, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 2.3039, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03644646924829157, |
|
"grad_norm": 8.39738655090332, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.0425, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03684262652272952, |
|
"grad_norm": 10.789386749267578, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 2.7755, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.037238783797167475, |
|
"grad_norm": 11.95657730102539, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 2.5013, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.03763494107160543, |
|
"grad_norm": 9.726419448852539, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 2.6235, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03803109834604338, |
|
"grad_norm": 12.768197059631348, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 2.7799, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03842725562048133, |
|
"grad_norm": 11.749434471130371, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 2.9625, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.038823412894919286, |
|
"grad_norm": 14.08253002166748, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 2.8971, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03921957016935723, |
|
"grad_norm": 11.256560325622559, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 2.3274, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.039615727443795185, |
|
"grad_norm": 12.625090599060059, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 2.6474, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.039615727443795185, |
|
"eval_loss": 0.6730939745903015, |
|
"eval_runtime": 340.0276, |
|
"eval_samples_per_second": 12.505, |
|
"eval_steps_per_second": 6.252, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.430998008987648e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|