|
{ |
|
"best_metric": 0.6193411350250244, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.07923145488759037, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00039615727443795186, |
|
"grad_norm": 14.495563507080078, |
|
"learning_rate": 5e-06, |
|
"loss": 2.5987, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00039615727443795186, |
|
"eval_loss": 1.2088950872421265, |
|
"eval_runtime": 337.0637, |
|
"eval_samples_per_second": 12.615, |
|
"eval_steps_per_second": 6.307, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0007923145488759037, |
|
"grad_norm": 16.614559173583984, |
|
"learning_rate": 1e-05, |
|
"loss": 3.394, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0011884718233138556, |
|
"grad_norm": 18.835763931274414, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.0506, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0015846290977518075, |
|
"grad_norm": 18.204811096191406, |
|
"learning_rate": 2e-05, |
|
"loss": 3.2621, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0019807863721897595, |
|
"grad_norm": 14.541211128234863, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.9941, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0023769436466277113, |
|
"grad_norm": 13.049044609069824, |
|
"learning_rate": 3e-05, |
|
"loss": 2.9559, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.002773100921065663, |
|
"grad_norm": 11.005773544311523, |
|
"learning_rate": 3.5e-05, |
|
"loss": 2.655, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.003169258195503615, |
|
"grad_norm": 11.66140365600586, |
|
"learning_rate": 4e-05, |
|
"loss": 2.6704, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0035654154699415667, |
|
"grad_norm": 12.335329055786133, |
|
"learning_rate": 4.5e-05, |
|
"loss": 2.9587, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003961572744379519, |
|
"grad_norm": 10.606803894042969, |
|
"learning_rate": 5e-05, |
|
"loss": 2.5995, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004357730018817471, |
|
"grad_norm": 11.140325546264648, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 2.5927, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.004753887293255423, |
|
"grad_norm": 10.353896141052246, |
|
"learning_rate": 6e-05, |
|
"loss": 3.0605, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.005150044567693374, |
|
"grad_norm": 9.193838119506836, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 2.5009, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.005546201842131326, |
|
"grad_norm": 9.763623237609863, |
|
"learning_rate": 7e-05, |
|
"loss": 2.2701, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.005942359116569278, |
|
"grad_norm": 10.643813133239746, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.7406, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.00633851639100723, |
|
"grad_norm": 10.669593811035156, |
|
"learning_rate": 8e-05, |
|
"loss": 2.4921, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.006734673665445182, |
|
"grad_norm": 11.960747718811035, |
|
"learning_rate": 8.5e-05, |
|
"loss": 2.7845, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0071308309398831335, |
|
"grad_norm": 10.707191467285156, |
|
"learning_rate": 9e-05, |
|
"loss": 2.4889, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.007526988214321085, |
|
"grad_norm": 9.775712966918945, |
|
"learning_rate": 9.5e-05, |
|
"loss": 2.4272, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.007923145488759038, |
|
"grad_norm": 9.274944305419922, |
|
"learning_rate": 0.0001, |
|
"loss": 2.3026, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.008319302763196989, |
|
"grad_norm": 10.755288124084473, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 2.449, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.008715460037634942, |
|
"grad_norm": 12.539787292480469, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 2.6936, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.009111617312072893, |
|
"grad_norm": 11.043819427490234, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 2.6237, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.009507774586510845, |
|
"grad_norm": 10.186410903930664, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 2.3617, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.009903931860948796, |
|
"grad_norm": 10.530620574951172, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 2.3778, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.010300089135386749, |
|
"grad_norm": 13.25385856628418, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 2.8179, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0106962464098247, |
|
"grad_norm": 10.420865058898926, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 2.5846, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.011092403684262652, |
|
"grad_norm": 11.93872356414795, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 2.5176, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.011488560958700603, |
|
"grad_norm": 11.279356002807617, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 2.4314, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.011884718233138556, |
|
"grad_norm": 11.756826400756836, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 2.8805, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.012280875507576509, |
|
"grad_norm": 11.12096881866455, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 2.7252, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.01267703278201446, |
|
"grad_norm": 12.88371467590332, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 3.0314, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.013073190056452412, |
|
"grad_norm": 10.017961502075195, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 2.0577, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.013469347330890363, |
|
"grad_norm": 13.735207557678223, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 2.8325, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.013865504605328316, |
|
"grad_norm": 10.726877212524414, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 2.3005, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.014261661879766267, |
|
"grad_norm": 10.604691505432129, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 2.341, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01465781915420422, |
|
"grad_norm": 11.629590034484863, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 2.7549, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.01505397642864217, |
|
"grad_norm": 14.335546493530273, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.1741, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.015450133703080123, |
|
"grad_norm": 11.522375106811523, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 2.5413, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.015846290977518076, |
|
"grad_norm": 11.158613204956055, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 2.4075, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.016242448251956025, |
|
"grad_norm": 15.320098876953125, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 3.231, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.016638605526393978, |
|
"grad_norm": 13.03093147277832, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 2.4665, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.01703476280083193, |
|
"grad_norm": 16.613243103027344, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 3.294, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.017430920075269883, |
|
"grad_norm": 14.335494995117188, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 2.815, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.017827077349707832, |
|
"grad_norm": 13.271525382995605, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 3.2896, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.018223234624145785, |
|
"grad_norm": 20.365318298339844, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 3.6564, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.018619391898583738, |
|
"grad_norm": 13.140840530395508, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 2.9866, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.01901554917302169, |
|
"grad_norm": 13.794374465942383, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 2.8268, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.019411706447459643, |
|
"grad_norm": 13.193543434143066, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 2.2512, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.019807863721897592, |
|
"grad_norm": 17.28192138671875, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 3.2413, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.019807863721897592, |
|
"eval_loss": 0.7558191418647766, |
|
"eval_runtime": 338.9865, |
|
"eval_samples_per_second": 12.543, |
|
"eval_steps_per_second": 6.272, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.020204020996335545, |
|
"grad_norm": 13.773219108581543, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 3.3845, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.020600178270773498, |
|
"grad_norm": 9.051798820495605, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 2.2873, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02099633554521145, |
|
"grad_norm": 7.766826629638672, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 2.9136, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0213924928196494, |
|
"grad_norm": 6.526791572570801, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 2.4049, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.021788650094087352, |
|
"grad_norm": 7.026968479156494, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 2.2845, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.022184807368525305, |
|
"grad_norm": 8.329981803894043, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 2.9457, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.022580964642963258, |
|
"grad_norm": 7.5735626220703125, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 2.7011, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.022977121917401207, |
|
"grad_norm": 7.662240028381348, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 2.574, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02337327919183916, |
|
"grad_norm": 7.075723171234131, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 2.3364, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.023769436466277112, |
|
"grad_norm": 6.765683174133301, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 2.1123, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.024165593740715065, |
|
"grad_norm": 7.3614397048950195, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 2.3193, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.024561751015153017, |
|
"grad_norm": 8.243191719055176, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 2.4855, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.024957908289590967, |
|
"grad_norm": 7.8791680335998535, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 2.3414, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.02535406556402892, |
|
"grad_norm": 8.094986915588379, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 2.0803, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.025750222838466872, |
|
"grad_norm": 10.040471076965332, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 3.0746, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.026146380112904825, |
|
"grad_norm": 8.993818283081055, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 2.556, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.026542537387342774, |
|
"grad_norm": 14.81818962097168, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 2.764, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.026938694661780727, |
|
"grad_norm": 9.128986358642578, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.5565, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.02733485193621868, |
|
"grad_norm": 8.368912696838379, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 2.6037, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.027731009210656632, |
|
"grad_norm": 9.154232025146484, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 2.8112, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02812716648509458, |
|
"grad_norm": 7.73675012588501, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 2.2716, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.028523323759532534, |
|
"grad_norm": 8.645244598388672, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 2.7805, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.028919481033970486, |
|
"grad_norm": 8.736540794372559, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 2.494, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02931563830840844, |
|
"grad_norm": 9.59007453918457, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.7142, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.02971179558284639, |
|
"grad_norm": 8.552069664001465, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 2.2465, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03010795285728434, |
|
"grad_norm": 7.933364391326904, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 2.3576, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.030504110131722294, |
|
"grad_norm": 9.648405075073242, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 2.8443, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.030900267406160246, |
|
"grad_norm": 10.026429176330566, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 2.994, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0312964246805982, |
|
"grad_norm": 9.404814720153809, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 3.2357, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03169258195503615, |
|
"grad_norm": 9.66026782989502, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 2.6634, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.032088739229474104, |
|
"grad_norm": 10.365447998046875, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 3.1117, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.03248489650391205, |
|
"grad_norm": 8.9589262008667, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 2.3134, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.03288105377835, |
|
"grad_norm": 11.305326461791992, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 2.5728, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.033277211052787956, |
|
"grad_norm": 10.456574440002441, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 3.3556, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.03367336832722591, |
|
"grad_norm": 11.175398826599121, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 2.5528, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03406952560166386, |
|
"grad_norm": 9.353387832641602, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 2.4417, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.034465682876101814, |
|
"grad_norm": 8.356178283691406, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 2.166, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.034861840150539766, |
|
"grad_norm": 9.713857650756836, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 2.4152, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03525799742497772, |
|
"grad_norm": 10.957006454467773, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 2.8269, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.035654154699415665, |
|
"grad_norm": 10.82498836517334, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 2.5913, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03605031197385362, |
|
"grad_norm": 13.92136287689209, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 2.3097, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03644646924829157, |
|
"grad_norm": 8.223499298095703, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.0382, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03684262652272952, |
|
"grad_norm": 13.45029354095459, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 2.8246, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.037238783797167475, |
|
"grad_norm": 12.594712257385254, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 2.4397, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.03763494107160543, |
|
"grad_norm": 9.746724128723145, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 2.6255, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03803109834604338, |
|
"grad_norm": 12.217853546142578, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 2.7994, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03842725562048133, |
|
"grad_norm": 11.843147277832031, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 2.9499, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.038823412894919286, |
|
"grad_norm": 12.877901077270508, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 2.9359, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03921957016935723, |
|
"grad_norm": 11.579832077026367, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 2.3228, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.039615727443795185, |
|
"grad_norm": 12.583760261535645, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 2.6903, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.039615727443795185, |
|
"eval_loss": 0.6697877645492554, |
|
"eval_runtime": 339.4624, |
|
"eval_samples_per_second": 12.526, |
|
"eval_steps_per_second": 6.263, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04001188471823314, |
|
"grad_norm": 6.360177040100098, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 2.5962, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.04040804199267109, |
|
"grad_norm": 6.526253700256348, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 2.5529, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.04080419926710904, |
|
"grad_norm": 6.883899211883545, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 2.5643, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.041200356541546995, |
|
"grad_norm": 5.620289325714111, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 2.2494, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.04159651381598495, |
|
"grad_norm": 7.3505401611328125, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 2.278, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0419926710904229, |
|
"grad_norm": 6.482987880706787, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 2.6903, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.042388828364860846, |
|
"grad_norm": 6.914910793304443, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 2.7104, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0427849856392988, |
|
"grad_norm": 6.627212047576904, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 2.5426, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.04318114291373675, |
|
"grad_norm": 6.562288761138916, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 2.5501, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.043577300188174704, |
|
"grad_norm": 7.48563814163208, |
|
"learning_rate": 5e-05, |
|
"loss": 2.5545, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04397345746261266, |
|
"grad_norm": 6.828021049499512, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 2.6011, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.04436961473705061, |
|
"grad_norm": 6.753318786621094, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 2.1348, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.04476577201148856, |
|
"grad_norm": 8.458637237548828, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 2.8946, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.045161929285926515, |
|
"grad_norm": 6.85118293762207, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 2.3202, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.04555808656036447, |
|
"grad_norm": 7.629461288452148, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 2.3985, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.045954243834802413, |
|
"grad_norm": 6.77186393737793, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 1.931, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.046350401109240366, |
|
"grad_norm": 6.728777885437012, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 2.3199, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.04674655838367832, |
|
"grad_norm": 6.741183280944824, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 1.9732, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.04714271565811627, |
|
"grad_norm": 7.823344707489014, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 2.5642, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.047538872932554224, |
|
"grad_norm": 9.227714538574219, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 2.8144, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04793503020699218, |
|
"grad_norm": 7.38051176071167, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 2.5403, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.04833118748143013, |
|
"grad_norm": 8.060673713684082, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 2.9318, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.04872734475586808, |
|
"grad_norm": 8.677541732788086, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 2.8636, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.049123502030306035, |
|
"grad_norm": 8.290096282958984, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 2.9682, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.04951965930474398, |
|
"grad_norm": 7.013187408447266, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 2.0986, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.04991581657918193, |
|
"grad_norm": 7.830003261566162, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 2.6328, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.050311973853619886, |
|
"grad_norm": 8.967572212219238, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 2.7992, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.05070813112805784, |
|
"grad_norm": 7.2473907470703125, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 2.2991, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.05110428840249579, |
|
"grad_norm": 8.087238311767578, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 2.2725, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.051500445676933744, |
|
"grad_norm": 6.30985164642334, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 2.2416, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0518966029513717, |
|
"grad_norm": 9.02031421661377, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 2.7646, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.05229276022580965, |
|
"grad_norm": 9.004183769226074, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 2.6726, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.052688917500247595, |
|
"grad_norm": 8.092021942138672, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 2.2824, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.05308507477468555, |
|
"grad_norm": 7.0188703536987305, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 2.0659, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.0534812320491235, |
|
"grad_norm": 9.969956398010254, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 3.0098, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05387738932356145, |
|
"grad_norm": 9.049392700195312, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 2.6349, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.054273546597999406, |
|
"grad_norm": 10.317535400390625, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 2.5721, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.05466970387243736, |
|
"grad_norm": 10.092238426208496, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 2.8833, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.05506586114687531, |
|
"grad_norm": 10.784772872924805, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 2.398, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.055462018421313264, |
|
"grad_norm": 9.963539123535156, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 2.9027, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05585817569575122, |
|
"grad_norm": 8.670623779296875, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 2.4363, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.05625433297018916, |
|
"grad_norm": 8.980132102966309, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 2.4414, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.056650490244627115, |
|
"grad_norm": 10.55542278289795, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 2.432, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.05704664751906507, |
|
"grad_norm": 9.629323959350586, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 2.451, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.05744280479350302, |
|
"grad_norm": 11.688613891601562, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 2.9462, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.05783896206794097, |
|
"grad_norm": 9.729354858398438, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.1626, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.058235119342378926, |
|
"grad_norm": 12.347620964050293, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 2.7457, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.05863127661681688, |
|
"grad_norm": 9.605571746826172, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 2.2582, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.05902743389125483, |
|
"grad_norm": 13.267195701599121, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 3.0407, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.05942359116569278, |
|
"grad_norm": 14.605932235717773, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 2.9647, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05942359116569278, |
|
"eval_loss": 0.6294466853141785, |
|
"eval_runtime": 340.6566, |
|
"eval_samples_per_second": 12.482, |
|
"eval_steps_per_second": 6.241, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05981974844013073, |
|
"grad_norm": 4.8937788009643555, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 2.1533, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.06021590571456868, |
|
"grad_norm": 5.875320911407471, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 2.0878, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.060612062989006635, |
|
"grad_norm": 6.488078594207764, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 2.6752, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.06100822026344459, |
|
"grad_norm": 6.426373481750488, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 2.639, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.06140437753788254, |
|
"grad_norm": 6.61283540725708, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 2.8925, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06180053481232049, |
|
"grad_norm": 6.746737957000732, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 2.5429, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.062196692086758446, |
|
"grad_norm": 5.992364883422852, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 2.3126, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.0625928493611964, |
|
"grad_norm": 6.37977933883667, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 2.2988, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.06298900663563435, |
|
"grad_norm": 6.413293838500977, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 2.6317, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.0633851639100723, |
|
"grad_norm": 6.493679046630859, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 2.4086, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06378132118451026, |
|
"grad_norm": 7.268621921539307, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 2.534, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.06417747845894821, |
|
"grad_norm": 6.6669230461120605, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 2.3878, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.06457363573338615, |
|
"grad_norm": 6.02770471572876, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 1.9933, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.0649697930078241, |
|
"grad_norm": 7.1139326095581055, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.3498, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.06536595028226205, |
|
"grad_norm": 7.1752610206604, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 2.7669, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0657621075567, |
|
"grad_norm": 8.256402015686035, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 2.7502, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.06615826483113796, |
|
"grad_norm": 7.215768814086914, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 2.2185, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.06655442210557591, |
|
"grad_norm": 7.52031946182251, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 2.2662, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.06695057938001386, |
|
"grad_norm": 7.943612575531006, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 2.4433, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.06734673665445182, |
|
"grad_norm": 8.064732551574707, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 2.7827, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06774289392888977, |
|
"grad_norm": 8.597424507141113, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 2.8361, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.06813905120332772, |
|
"grad_norm": 7.607199668884277, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 2.032, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.06853520847776567, |
|
"grad_norm": 7.377221584320068, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 2.5357, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.06893136575220363, |
|
"grad_norm": 7.95181941986084, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 2.6041, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.06932752302664158, |
|
"grad_norm": 6.826475620269775, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 2.0748, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06972368030107953, |
|
"grad_norm": 7.466386795043945, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 2.5444, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.07011983757551749, |
|
"grad_norm": 9.77598762512207, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 3.4488, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.07051599484995544, |
|
"grad_norm": 8.368354797363281, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 2.3909, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.07091215212439339, |
|
"grad_norm": 7.990671157836914, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 2.2496, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.07130830939883133, |
|
"grad_norm": 8.505362510681152, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 2.3256, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07170446667326928, |
|
"grad_norm": 7.670304298400879, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 2.0108, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.07210062394770723, |
|
"grad_norm": 7.144657135009766, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.1987, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.07249678122214519, |
|
"grad_norm": 7.975318908691406, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 2.2561, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.07289293849658314, |
|
"grad_norm": 11.282790184020996, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 2.6531, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.07328909577102109, |
|
"grad_norm": 7.126767158508301, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 2.111, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.07368525304545905, |
|
"grad_norm": 9.32459545135498, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 2.8113, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.074081410319897, |
|
"grad_norm": 9.883260726928711, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 2.2595, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.07447756759433495, |
|
"grad_norm": 8.952421188354492, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 2.1888, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.0748737248687729, |
|
"grad_norm": 8.629685401916504, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 2.0509, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.07526988214321086, |
|
"grad_norm": 8.945446968078613, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 2.0993, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.07566603941764881, |
|
"grad_norm": 9.784154891967773, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 2.693, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.07606219669208676, |
|
"grad_norm": 12.141247749328613, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 3.0066, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.07645835396652471, |
|
"grad_norm": 9.189563751220703, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 2.3949, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.07685451124096267, |
|
"grad_norm": 10.456135749816895, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 2.578, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.07725066851540062, |
|
"grad_norm": 11.396419525146484, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 2.6849, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.07764682578983857, |
|
"grad_norm": 12.435629844665527, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 3.1103, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.07804298306427651, |
|
"grad_norm": 11.277499198913574, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 3.2427, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.07843914033871446, |
|
"grad_norm": 11.502141952514648, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 3.1574, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.07883529761315242, |
|
"grad_norm": 11.261180877685547, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 2.6461, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.07923145488759037, |
|
"grad_norm": 11.87040901184082, |
|
"learning_rate": 0.0, |
|
"loss": 3.0392, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07923145488759037, |
|
"eval_loss": 0.6193411350250244, |
|
"eval_runtime": 340.4464, |
|
"eval_samples_per_second": 12.489, |
|
"eval_steps_per_second": 6.245, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.861996017975296e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|