|
{ |
|
"best_metric": 0.3114449679851532, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.0506906602458497, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002534533012292485, |
|
"grad_norm": 0.5637602210044861, |
|
"learning_rate": 7e-06, |
|
"loss": 0.7307, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002534533012292485, |
|
"eval_loss": 0.9512640833854675, |
|
"eval_runtime": 532.1115, |
|
"eval_samples_per_second": 12.49, |
|
"eval_steps_per_second": 3.123, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.000506906602458497, |
|
"grad_norm": 0.6604697704315186, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.6945, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0007603599036877456, |
|
"grad_norm": 0.7113285660743713, |
|
"learning_rate": 2.1e-05, |
|
"loss": 0.6647, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.001013813204916994, |
|
"grad_norm": 0.5982671976089478, |
|
"learning_rate": 2.8e-05, |
|
"loss": 0.7014, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0012672665061462425, |
|
"grad_norm": 0.6680089235305786, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.6862, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0015207198073754911, |
|
"grad_norm": 0.693647027015686, |
|
"learning_rate": 4.2e-05, |
|
"loss": 0.6805, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0017741731086047396, |
|
"grad_norm": 0.6376394033432007, |
|
"learning_rate": 4.899999999999999e-05, |
|
"loss": 0.6006, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.002027626409833988, |
|
"grad_norm": 0.6353195905685425, |
|
"learning_rate": 5.6e-05, |
|
"loss": 0.6495, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0022810797110632366, |
|
"grad_norm": 0.5442299842834473, |
|
"learning_rate": 6.3e-05, |
|
"loss": 0.5904, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.002534533012292485, |
|
"grad_norm": 0.5597531795501709, |
|
"learning_rate": 7e-05, |
|
"loss": 0.6404, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0027879863135217334, |
|
"grad_norm": 0.5673569440841675, |
|
"learning_rate": 6.999521567473641e-05, |
|
"loss": 0.5773, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0030414396147509823, |
|
"grad_norm": 0.5594638586044312, |
|
"learning_rate": 6.998086400693241e-05, |
|
"loss": 0.5848, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0032948929159802307, |
|
"grad_norm": 0.5175847411155701, |
|
"learning_rate": 6.995694892019065e-05, |
|
"loss": 0.5737, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.003548346217209479, |
|
"grad_norm": 0.5732212662696838, |
|
"learning_rate": 6.99234769526571e-05, |
|
"loss": 0.4932, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0038017995184387275, |
|
"grad_norm": 0.6050289869308472, |
|
"learning_rate": 6.988045725523343e-05, |
|
"loss": 0.4876, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004055252819667976, |
|
"grad_norm": 0.6290268898010254, |
|
"learning_rate": 6.982790158907539e-05, |
|
"loss": 0.4673, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.004308706120897224, |
|
"grad_norm": 0.8223831653594971, |
|
"learning_rate": 6.976582432237733e-05, |
|
"loss": 0.4756, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.004562159422126473, |
|
"grad_norm": 0.633542537689209, |
|
"learning_rate": 6.969424242644413e-05, |
|
"loss": 0.4682, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.004815612723355722, |
|
"grad_norm": 0.6778817772865295, |
|
"learning_rate": 6.961317547105138e-05, |
|
"loss": 0.4258, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.00506906602458497, |
|
"grad_norm": 0.7767919898033142, |
|
"learning_rate": 6.952264561909527e-05, |
|
"loss": 0.4529, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.005322519325814219, |
|
"grad_norm": 0.7921961545944214, |
|
"learning_rate": 6.942267762053337e-05, |
|
"loss": 0.4602, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.005575972627043467, |
|
"grad_norm": 0.6767591834068298, |
|
"learning_rate": 6.931329880561832e-05, |
|
"loss": 0.3301, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.005829425928272716, |
|
"grad_norm": 0.7602695226669312, |
|
"learning_rate": 6.919453907742597e-05, |
|
"loss": 0.3753, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0060828792295019645, |
|
"grad_norm": 1.0894685983657837, |
|
"learning_rate": 6.90664309036802e-05, |
|
"loss": 0.4138, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0063363325307312125, |
|
"grad_norm": 0.7795313596725464, |
|
"learning_rate": 6.892900930787656e-05, |
|
"loss": 0.3878, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.006589785831960461, |
|
"grad_norm": 0.797916829586029, |
|
"learning_rate": 6.87823118597072e-05, |
|
"loss": 0.4445, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00684323913318971, |
|
"grad_norm": 0.8681225776672363, |
|
"learning_rate": 6.862637866478969e-05, |
|
"loss": 0.3423, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.007096692434418958, |
|
"grad_norm": 0.7283143401145935, |
|
"learning_rate": 6.846125235370252e-05, |
|
"loss": 0.3813, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.007350145735648207, |
|
"grad_norm": 0.6523985266685486, |
|
"learning_rate": 6.828697807033038e-05, |
|
"loss": 0.3282, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.007603599036877455, |
|
"grad_norm": 0.7353911399841309, |
|
"learning_rate": 6.81036034595222e-05, |
|
"loss": 0.291, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.007857052338106704, |
|
"grad_norm": 0.9172014594078064, |
|
"learning_rate": 6.791117865406564e-05, |
|
"loss": 0.3509, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.008110505639335953, |
|
"grad_norm": 0.9462753534317017, |
|
"learning_rate": 6.770975626098112e-05, |
|
"loss": 0.4434, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.008363958940565202, |
|
"grad_norm": 1.9650354385375977, |
|
"learning_rate": 6.749939134713974e-05, |
|
"loss": 0.3918, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.008617412241794449, |
|
"grad_norm": 0.9554181694984436, |
|
"learning_rate": 6.728014142420846e-05, |
|
"loss": 0.3665, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.008870865543023698, |
|
"grad_norm": 1.0999562740325928, |
|
"learning_rate": 6.7052066432927e-05, |
|
"loss": 0.3027, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.009124318844252946, |
|
"grad_norm": 0.9678001999855042, |
|
"learning_rate": 6.681522872672069e-05, |
|
"loss": 0.4272, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.009377772145482195, |
|
"grad_norm": 3.732379913330078, |
|
"learning_rate": 6.656969305465356e-05, |
|
"loss": 0.3871, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.009631225446711444, |
|
"grad_norm": 1.7614824771881104, |
|
"learning_rate": 6.631552654372672e-05, |
|
"loss": 0.4311, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.009884678747940691, |
|
"grad_norm": 0.8726099133491516, |
|
"learning_rate": 6.60527986805264e-05, |
|
"loss": 0.3094, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.01013813204916994, |
|
"grad_norm": 0.9563500881195068, |
|
"learning_rate": 6.578158129222711e-05, |
|
"loss": 0.3162, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.010391585350399189, |
|
"grad_norm": 0.8088050484657288, |
|
"learning_rate": 6.550194852695469e-05, |
|
"loss": 0.3227, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.010645038651628438, |
|
"grad_norm": 0.749538004398346, |
|
"learning_rate": 6.521397683351509e-05, |
|
"loss": 0.3582, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.010898491952857687, |
|
"grad_norm": 0.8230732679367065, |
|
"learning_rate": 6.491774494049386e-05, |
|
"loss": 0.3914, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.011151945254086934, |
|
"grad_norm": 0.6536163687705994, |
|
"learning_rate": 6.461333383473272e-05, |
|
"loss": 0.2768, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.011405398555316183, |
|
"grad_norm": 0.8638445138931274, |
|
"learning_rate": 6.430082673918849e-05, |
|
"loss": 0.4332, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.011658851856545431, |
|
"grad_norm": 1.0308754444122314, |
|
"learning_rate": 6.398030909018069e-05, |
|
"loss": 0.2682, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.01191230515777468, |
|
"grad_norm": 0.7687364816665649, |
|
"learning_rate": 6.365186851403423e-05, |
|
"loss": 0.341, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.012165758459003929, |
|
"grad_norm": 0.7668923139572144, |
|
"learning_rate": 6.331559480312315e-05, |
|
"loss": 0.3038, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.012419211760233176, |
|
"grad_norm": 1.1220157146453857, |
|
"learning_rate": 6.297157989132236e-05, |
|
"loss": 0.3223, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.012672665061462425, |
|
"grad_norm": 1.1568130254745483, |
|
"learning_rate": 6.261991782887377e-05, |
|
"loss": 0.442, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.012672665061462425, |
|
"eval_loss": 0.3776959776878357, |
|
"eval_runtime": 534.8367, |
|
"eval_samples_per_second": 12.426, |
|
"eval_steps_per_second": 3.107, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.012926118362691674, |
|
"grad_norm": 0.4836099445819855, |
|
"learning_rate": 6.226070475667393e-05, |
|
"loss": 0.5211, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.013179571663920923, |
|
"grad_norm": 0.4805043935775757, |
|
"learning_rate": 6.189403887999006e-05, |
|
"loss": 0.4534, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.013433024965150172, |
|
"grad_norm": 0.6214394569396973, |
|
"learning_rate": 6.152002044161171e-05, |
|
"loss": 0.4429, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.01368647826637942, |
|
"grad_norm": 0.6329332590103149, |
|
"learning_rate": 6.113875169444539e-05, |
|
"loss": 0.4009, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.013939931567608668, |
|
"grad_norm": 0.49672988057136536, |
|
"learning_rate": 6.0750336873559605e-05, |
|
"loss": 0.4263, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.014193384868837916, |
|
"grad_norm": 0.4916054606437683, |
|
"learning_rate": 6.035488216768811e-05, |
|
"loss": 0.3832, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.014446838170067165, |
|
"grad_norm": 0.6797816753387451, |
|
"learning_rate": 5.9952495690198894e-05, |
|
"loss": 0.4623, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.014700291471296414, |
|
"grad_norm": 0.4841528534889221, |
|
"learning_rate": 5.954328744953709e-05, |
|
"loss": 0.3384, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.014953744772525663, |
|
"grad_norm": 0.49140116572380066, |
|
"learning_rate": 5.91273693191498e-05, |
|
"loss": 0.3551, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.01520719807375491, |
|
"grad_norm": 0.39423346519470215, |
|
"learning_rate": 5.870485500690094e-05, |
|
"loss": 0.4004, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.015460651374984159, |
|
"grad_norm": 0.5048249959945679, |
|
"learning_rate": 5.827586002398468e-05, |
|
"loss": 0.3857, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.015714104676213408, |
|
"grad_norm": 0.36948123574256897, |
|
"learning_rate": 5.784050165334589e-05, |
|
"loss": 0.3812, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.015967557977442655, |
|
"grad_norm": 0.3576664626598358, |
|
"learning_rate": 5.739889891761608e-05, |
|
"loss": 0.2966, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.016221011278671905, |
|
"grad_norm": 0.79672771692276, |
|
"learning_rate": 5.6951172546573794e-05, |
|
"loss": 0.3761, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.016474464579901153, |
|
"grad_norm": 0.521947979927063, |
|
"learning_rate": 5.6497444944138376e-05, |
|
"loss": 0.3655, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.016727917881130403, |
|
"grad_norm": 0.5354653000831604, |
|
"learning_rate": 5.603784015490587e-05, |
|
"loss": 0.4187, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.01698137118235965, |
|
"grad_norm": 0.5953655242919922, |
|
"learning_rate": 5.557248383023655e-05, |
|
"loss": 0.3345, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.017234824483588897, |
|
"grad_norm": 0.5908196568489075, |
|
"learning_rate": 5.510150319390302e-05, |
|
"loss": 0.3566, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.017488277784818148, |
|
"grad_norm": 0.5005892515182495, |
|
"learning_rate": 5.4625027007308546e-05, |
|
"loss": 0.354, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.017741731086047395, |
|
"grad_norm": 0.4641432762145996, |
|
"learning_rate": 5.414318553428494e-05, |
|
"loss": 0.2931, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.017995184387276646, |
|
"grad_norm": 0.48989179730415344, |
|
"learning_rate": 5.3656110505479776e-05, |
|
"loss": 0.3001, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.018248637688505893, |
|
"grad_norm": 0.525313138961792, |
|
"learning_rate": 5.316393508234253e-05, |
|
"loss": 0.3572, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.01850209098973514, |
|
"grad_norm": 0.4779028594493866, |
|
"learning_rate": 5.266679382071953e-05, |
|
"loss": 0.2881, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.01875554429096439, |
|
"grad_norm": 0.6258348822593689, |
|
"learning_rate": 5.216482263406778e-05, |
|
"loss": 0.3244, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.019008997592193638, |
|
"grad_norm": 0.6550619006156921, |
|
"learning_rate": 5.1658158756297576e-05, |
|
"loss": 0.281, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.019262450893422888, |
|
"grad_norm": 0.5427703857421875, |
|
"learning_rate": 5.114694070425407e-05, |
|
"loss": 0.303, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.019515904194652135, |
|
"grad_norm": 0.6002343893051147, |
|
"learning_rate": 5.063130823984823e-05, |
|
"loss": 0.2908, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.019769357495881382, |
|
"grad_norm": 0.9281688928604126, |
|
"learning_rate": 5.011140233184724e-05, |
|
"loss": 0.3231, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.020022810797110633, |
|
"grad_norm": 0.8118467926979065, |
|
"learning_rate": 4.958736511733516e-05, |
|
"loss": 0.3601, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.02027626409833988, |
|
"grad_norm": 0.5715822577476501, |
|
"learning_rate": 4.905933986285393e-05, |
|
"loss": 0.2867, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02052971739956913, |
|
"grad_norm": 0.9286966919898987, |
|
"learning_rate": 4.8527470925235824e-05, |
|
"loss": 0.3645, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.020783170700798378, |
|
"grad_norm": 0.620093584060669, |
|
"learning_rate": 4.799190371213772e-05, |
|
"loss": 0.2954, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.021036624002027625, |
|
"grad_norm": 0.6234807372093201, |
|
"learning_rate": 4.745278464228808e-05, |
|
"loss": 0.3298, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.021290077303256875, |
|
"grad_norm": 0.8412039875984192, |
|
"learning_rate": 4.69102611054575e-05, |
|
"loss": 0.2926, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.021543530604486123, |
|
"grad_norm": 0.703763484954834, |
|
"learning_rate": 4.6364481422163926e-05, |
|
"loss": 0.3051, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.021796983905715373, |
|
"grad_norm": 0.6281498670578003, |
|
"learning_rate": 4.581559480312316e-05, |
|
"loss": 0.3129, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.02205043720694462, |
|
"grad_norm": 0.590596616268158, |
|
"learning_rate": 4.526375130845627e-05, |
|
"loss": 0.2383, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.022303890508173867, |
|
"grad_norm": 0.6789559125900269, |
|
"learning_rate": 4.4709101806664554e-05, |
|
"loss": 0.2878, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.022557343809403118, |
|
"grad_norm": 0.5185131430625916, |
|
"learning_rate": 4.4151797933383685e-05, |
|
"loss": 0.2255, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.022810797110632365, |
|
"grad_norm": 0.6621422171592712, |
|
"learning_rate": 4.359199204992797e-05, |
|
"loss": 0.3419, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.023064250411861616, |
|
"grad_norm": 0.8429589867591858, |
|
"learning_rate": 4.30298372016363e-05, |
|
"loss": 0.3345, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.023317703713090863, |
|
"grad_norm": 0.543500542640686, |
|
"learning_rate": 4.246548707603114e-05, |
|
"loss": 0.2236, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.02357115701432011, |
|
"grad_norm": 0.6969960927963257, |
|
"learning_rate": 4.1899095960801805e-05, |
|
"loss": 0.28, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.02382461031554936, |
|
"grad_norm": 0.8858791589736938, |
|
"learning_rate": 4.133081870162385e-05, |
|
"loss": 0.3377, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.024078063616778608, |
|
"grad_norm": 0.7956605553627014, |
|
"learning_rate": 4.076081065982569e-05, |
|
"loss": 0.2852, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.024331516918007858, |
|
"grad_norm": 0.8670409917831421, |
|
"learning_rate": 4.018922766991447e-05, |
|
"loss": 0.3319, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.024584970219237105, |
|
"grad_norm": 0.8773281574249268, |
|
"learning_rate": 3.961622599697241e-05, |
|
"loss": 0.2355, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.024838423520466352, |
|
"grad_norm": 0.641528844833374, |
|
"learning_rate": 3.9041962293935516e-05, |
|
"loss": 0.3297, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.025091876821695603, |
|
"grad_norm": 0.7666037678718567, |
|
"learning_rate": 3.84665935587662e-05, |
|
"loss": 0.2192, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.02534533012292485, |
|
"grad_norm": 1.073132872581482, |
|
"learning_rate": 3.7890277091531636e-05, |
|
"loss": 0.3292, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02534533012292485, |
|
"eval_loss": 0.33073779940605164, |
|
"eval_runtime": 535.0717, |
|
"eval_samples_per_second": 12.421, |
|
"eval_steps_per_second": 3.106, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0255987834241541, |
|
"grad_norm": 0.7879229187965393, |
|
"learning_rate": 3.7313170451399475e-05, |
|
"loss": 0.4689, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.025852236725383348, |
|
"grad_norm": 0.349199116230011, |
|
"learning_rate": 3.673543141356278e-05, |
|
"loss": 0.426, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.026105690026612595, |
|
"grad_norm": 0.3625866770744324, |
|
"learning_rate": 3.6157217926105783e-05, |
|
"loss": 0.4304, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.026359143327841845, |
|
"grad_norm": 0.4031258821487427, |
|
"learning_rate": 3.557868806682255e-05, |
|
"loss": 0.4335, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.026612596629071093, |
|
"grad_norm": 0.4257863461971283, |
|
"learning_rate": 3.5e-05, |
|
"loss": 0.3935, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.026866049930300343, |
|
"grad_norm": 0.44811195135116577, |
|
"learning_rate": 3.442131193317745e-05, |
|
"loss": 0.3787, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.02711950323152959, |
|
"grad_norm": 0.5182667374610901, |
|
"learning_rate": 3.384278207389421e-05, |
|
"loss": 0.3766, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.02737295653275884, |
|
"grad_norm": 0.4968753457069397, |
|
"learning_rate": 3.3264568586437216e-05, |
|
"loss": 0.42, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.027626409833988088, |
|
"grad_norm": 0.5043284296989441, |
|
"learning_rate": 3.268682954860052e-05, |
|
"loss": 0.3288, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.027879863135217335, |
|
"grad_norm": 0.48617568612098694, |
|
"learning_rate": 3.210972290846837e-05, |
|
"loss": 0.3118, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.028133316436446586, |
|
"grad_norm": 0.480871319770813, |
|
"learning_rate": 3.15334064412338e-05, |
|
"loss": 0.3519, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.028386769737675833, |
|
"grad_norm": 0.6641278862953186, |
|
"learning_rate": 3.0958037706064485e-05, |
|
"loss": 0.3141, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.028640223038905083, |
|
"grad_norm": 0.5058858394622803, |
|
"learning_rate": 3.038377400302758e-05, |
|
"loss": 0.3153, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.02889367634013433, |
|
"grad_norm": 0.45486193895339966, |
|
"learning_rate": 2.9810772330085524e-05, |
|
"loss": 0.2909, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.029147129641363578, |
|
"grad_norm": 0.48425379395484924, |
|
"learning_rate": 2.9239189340174306e-05, |
|
"loss": 0.2886, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.029400582942592828, |
|
"grad_norm": 0.4817794859409332, |
|
"learning_rate": 2.8669181298376163e-05, |
|
"loss": 0.2781, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.029654036243822075, |
|
"grad_norm": 0.6188409924507141, |
|
"learning_rate": 2.8100904039198193e-05, |
|
"loss": 0.3106, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.029907489545051326, |
|
"grad_norm": 0.4940548837184906, |
|
"learning_rate": 2.7534512923968863e-05, |
|
"loss": 0.3322, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.030160942846280573, |
|
"grad_norm": 0.506767988204956, |
|
"learning_rate": 2.6970162798363695e-05, |
|
"loss": 0.3107, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.03041439614750982, |
|
"grad_norm": 0.545142650604248, |
|
"learning_rate": 2.640800795007203e-05, |
|
"loss": 0.2842, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03066784944873907, |
|
"grad_norm": 0.6052780151367188, |
|
"learning_rate": 2.5848202066616305e-05, |
|
"loss": 0.3137, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.030921302749968318, |
|
"grad_norm": 0.5682511329650879, |
|
"learning_rate": 2.5290898193335446e-05, |
|
"loss": 0.2916, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.03117475605119757, |
|
"grad_norm": 0.4890080988407135, |
|
"learning_rate": 2.4736248691543736e-05, |
|
"loss": 0.2588, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.031428209352426816, |
|
"grad_norm": 0.5377240180969238, |
|
"learning_rate": 2.4184405196876842e-05, |
|
"loss": 0.245, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.031681662653656066, |
|
"grad_norm": 0.7997578978538513, |
|
"learning_rate": 2.363551857783608e-05, |
|
"loss": 0.2851, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03193511595488531, |
|
"grad_norm": 0.5697923302650452, |
|
"learning_rate": 2.308973889454249e-05, |
|
"loss": 0.3223, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.03218856925611456, |
|
"grad_norm": 0.5995197296142578, |
|
"learning_rate": 2.2547215357711918e-05, |
|
"loss": 0.2729, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.03244202255734381, |
|
"grad_norm": 0.5520817041397095, |
|
"learning_rate": 2.2008096287862266e-05, |
|
"loss": 0.3105, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.032695475858573055, |
|
"grad_norm": 0.5432480573654175, |
|
"learning_rate": 2.1472529074764177e-05, |
|
"loss": 0.3281, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.032948929159802305, |
|
"grad_norm": 0.6505929827690125, |
|
"learning_rate": 2.0940660137146074e-05, |
|
"loss": 0.3874, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.033202382461031556, |
|
"grad_norm": 0.689994215965271, |
|
"learning_rate": 2.041263488266484e-05, |
|
"loss": 0.3136, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.033455835762260806, |
|
"grad_norm": 8.160080909729004, |
|
"learning_rate": 1.988859766815275e-05, |
|
"loss": 0.3555, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.03370928906349005, |
|
"grad_norm": 0.6300246715545654, |
|
"learning_rate": 1.9368691760151773e-05, |
|
"loss": 0.2766, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.0339627423647193, |
|
"grad_norm": 0.5888091325759888, |
|
"learning_rate": 1.885305929574593e-05, |
|
"loss": 0.2724, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.03421619566594855, |
|
"grad_norm": 0.7288299798965454, |
|
"learning_rate": 1.8341841243702424e-05, |
|
"loss": 0.3592, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.034469648967177795, |
|
"grad_norm": 0.9315939545631409, |
|
"learning_rate": 1.7835177365932225e-05, |
|
"loss": 0.4391, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.034723102268407045, |
|
"grad_norm": 0.9517157673835754, |
|
"learning_rate": 1.7333206179280478e-05, |
|
"loss": 0.2577, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.034976555569636296, |
|
"grad_norm": 0.5201305747032166, |
|
"learning_rate": 1.6836064917657478e-05, |
|
"loss": 0.2471, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.03523000887086554, |
|
"grad_norm": 0.7326086163520813, |
|
"learning_rate": 1.6343889494520224e-05, |
|
"loss": 0.3126, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.03548346217209479, |
|
"grad_norm": 0.6664350628852844, |
|
"learning_rate": 1.5856814465715064e-05, |
|
"loss": 0.2755, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03573691547332404, |
|
"grad_norm": 0.6586111187934875, |
|
"learning_rate": 1.5374972992691458e-05, |
|
"loss": 0.2913, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.03599036877455329, |
|
"grad_norm": 0.5519941449165344, |
|
"learning_rate": 1.4898496806096974e-05, |
|
"loss": 0.2339, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.036243822075782535, |
|
"grad_norm": 0.6732093095779419, |
|
"learning_rate": 1.4427516169763444e-05, |
|
"loss": 0.3004, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.036497275377011786, |
|
"grad_norm": 0.671667754650116, |
|
"learning_rate": 1.396215984509412e-05, |
|
"loss": 0.3293, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.036750728678241036, |
|
"grad_norm": 0.5071290135383606, |
|
"learning_rate": 1.3502555055861625e-05, |
|
"loss": 0.1972, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03700418197947028, |
|
"grad_norm": 0.8010750412940979, |
|
"learning_rate": 1.3048827453426203e-05, |
|
"loss": 0.3243, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.03725763528069953, |
|
"grad_norm": 0.6564486026763916, |
|
"learning_rate": 1.2601101082383917e-05, |
|
"loss": 0.2197, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.03751108858192878, |
|
"grad_norm": 0.9637052416801453, |
|
"learning_rate": 1.2159498346654094e-05, |
|
"loss": 0.2946, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.03776454188315803, |
|
"grad_norm": 0.7391737103462219, |
|
"learning_rate": 1.1724139976015306e-05, |
|
"loss": 0.2606, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.038017995184387275, |
|
"grad_norm": 0.9585252404212952, |
|
"learning_rate": 1.1295144993099068e-05, |
|
"loss": 0.2699, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.038017995184387275, |
|
"eval_loss": 0.3155807852745056, |
|
"eval_runtime": 535.2039, |
|
"eval_samples_per_second": 12.418, |
|
"eval_steps_per_second": 3.105, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.038271448485616526, |
|
"grad_norm": 0.36455297470092773, |
|
"learning_rate": 1.0872630680850196e-05, |
|
"loss": 0.4006, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.038524901786845776, |
|
"grad_norm": 0.31821301579475403, |
|
"learning_rate": 1.0456712550462898e-05, |
|
"loss": 0.3745, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.03877835508807502, |
|
"grad_norm": 0.3565686047077179, |
|
"learning_rate": 1.0047504309801104e-05, |
|
"loss": 0.3645, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.03903180838930427, |
|
"grad_norm": 0.3877963721752167, |
|
"learning_rate": 9.645117832311886e-06, |
|
"loss": 0.4005, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.03928526169053352, |
|
"grad_norm": 0.40037497878074646, |
|
"learning_rate": 9.249663126440394e-06, |
|
"loss": 0.3768, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.039538714991762765, |
|
"grad_norm": 0.4870663583278656, |
|
"learning_rate": 8.861248305554624e-06, |
|
"loss": 0.3838, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.039792168292992015, |
|
"grad_norm": 0.37471503019332886, |
|
"learning_rate": 8.47997955838829e-06, |
|
"loss": 0.261, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.040045621594221266, |
|
"grad_norm": 0.37314358353614807, |
|
"learning_rate": 8.10596112000994e-06, |
|
"loss": 0.3119, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.040299074895450517, |
|
"grad_norm": 0.5217383503913879, |
|
"learning_rate": 7.739295243326067e-06, |
|
"loss": 0.3344, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.04055252819667976, |
|
"grad_norm": 0.3944564461708069, |
|
"learning_rate": 7.380082171126228e-06, |
|
"loss": 0.316, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04080598149790901, |
|
"grad_norm": 0.4504675567150116, |
|
"learning_rate": 7.028420108677635e-06, |
|
"loss": 0.3101, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.04105943479913826, |
|
"grad_norm": 0.5876880288124084, |
|
"learning_rate": 6.684405196876842e-06, |
|
"loss": 0.3112, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.041312888100367505, |
|
"grad_norm": 0.39890074729919434, |
|
"learning_rate": 6.3481314859657675e-06, |
|
"loss": 0.2746, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.041566341401596756, |
|
"grad_norm": 0.3916687071323395, |
|
"learning_rate": 6.019690909819298e-06, |
|
"loss": 0.3408, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.041819794702826006, |
|
"grad_norm": 0.41850560903549194, |
|
"learning_rate": 5.6991732608115e-06, |
|
"loss": 0.3479, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04207324800405525, |
|
"grad_norm": 0.5424418449401855, |
|
"learning_rate": 5.386666165267256e-06, |
|
"loss": 0.3619, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.0423267013052845, |
|
"grad_norm": 0.4991215765476227, |
|
"learning_rate": 5.08225505950613e-06, |
|
"loss": 0.3204, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.04258015460651375, |
|
"grad_norm": 0.5841875672340393, |
|
"learning_rate": 4.786023166484913e-06, |
|
"loss": 0.3074, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.042833607907743, |
|
"grad_norm": 0.4832156002521515, |
|
"learning_rate": 4.498051473045291e-06, |
|
"loss": 0.4109, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.043087061208972245, |
|
"grad_norm": 0.41757968068122864, |
|
"learning_rate": 4.218418707772886e-06, |
|
"loss": 0.2469, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.043340514510201496, |
|
"grad_norm": 0.4750998020172119, |
|
"learning_rate": 3.947201319473587e-06, |
|
"loss": 0.2775, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.043593967811430746, |
|
"grad_norm": 0.5711901783943176, |
|
"learning_rate": 3.684473456273278e-06, |
|
"loss": 0.2939, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.04384742111265999, |
|
"grad_norm": 0.5536498427391052, |
|
"learning_rate": 3.4303069453464383e-06, |
|
"loss": 0.3898, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.04410087441388924, |
|
"grad_norm": 0.6635736227035522, |
|
"learning_rate": 3.184771273279312e-06, |
|
"loss": 0.3395, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.04435432771511849, |
|
"grad_norm": 0.45959383249282837, |
|
"learning_rate": 2.947933567072987e-06, |
|
"loss": 0.3143, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.044607781016347735, |
|
"grad_norm": 0.5077531933784485, |
|
"learning_rate": 2.719858575791534e-06, |
|
"loss": 0.2469, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.044861234317576985, |
|
"grad_norm": 0.5000652074813843, |
|
"learning_rate": 2.500608652860256e-06, |
|
"loss": 0.3236, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.045114687618806236, |
|
"grad_norm": 0.528018057346344, |
|
"learning_rate": 2.2902437390188737e-06, |
|
"loss": 0.3417, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.04536814092003549, |
|
"grad_norm": 0.5613098740577698, |
|
"learning_rate": 2.0888213459343587e-06, |
|
"loss": 0.3168, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.04562159422126473, |
|
"grad_norm": 0.48844948410987854, |
|
"learning_rate": 1.8963965404777875e-06, |
|
"loss": 0.272, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04587504752249398, |
|
"grad_norm": 0.6144419312477112, |
|
"learning_rate": 1.7130219296696263e-06, |
|
"loss": 0.2605, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.04612850082372323, |
|
"grad_norm": 0.5530086159706116, |
|
"learning_rate": 1.5387476462974824e-06, |
|
"loss": 0.3317, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.046381954124952475, |
|
"grad_norm": 0.5514850616455078, |
|
"learning_rate": 1.3736213352103147e-06, |
|
"loss": 0.2773, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.046635407426181726, |
|
"grad_norm": 0.6471708416938782, |
|
"learning_rate": 1.2176881402928002e-06, |
|
"loss": 0.3462, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.046888860727410976, |
|
"grad_norm": 0.8579723238945007, |
|
"learning_rate": 1.0709906921234367e-06, |
|
"loss": 0.2975, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04714231402864022, |
|
"grad_norm": 0.7083173990249634, |
|
"learning_rate": 9.33569096319799e-07, |
|
"loss": 0.3132, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.04739576732986947, |
|
"grad_norm": 0.6655422449111938, |
|
"learning_rate": 8.054609225740255e-07, |
|
"loss": 0.2419, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.04764922063109872, |
|
"grad_norm": 0.896893322467804, |
|
"learning_rate": 6.867011943816724e-07, |
|
"loss": 0.3688, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.04790267393232797, |
|
"grad_norm": 0.6923823952674866, |
|
"learning_rate": 5.77322379466617e-07, |
|
"loss": 0.3454, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.048156127233557215, |
|
"grad_norm": 0.5786012411117554, |
|
"learning_rate": 4.773543809047186e-07, |
|
"loss": 0.2916, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.048409580534786466, |
|
"grad_norm": 0.5328980684280396, |
|
"learning_rate": 3.868245289486027e-07, |
|
"loss": 0.264, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.048663033836015716, |
|
"grad_norm": 0.4660666286945343, |
|
"learning_rate": 3.0575757355586817e-07, |
|
"loss": 0.2526, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.04891648713724496, |
|
"grad_norm": 0.7175193428993225, |
|
"learning_rate": 2.3417567762266497e-07, |
|
"loss": 0.3126, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.04916994043847421, |
|
"grad_norm": 0.6442475318908691, |
|
"learning_rate": 1.7209841092460043e-07, |
|
"loss": 0.2952, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.04942339373970346, |
|
"grad_norm": 0.6591751575469971, |
|
"learning_rate": 1.1954274476655534e-07, |
|
"loss": 0.2414, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.049676847040932705, |
|
"grad_norm": 0.8419310450553894, |
|
"learning_rate": 7.652304734289127e-08, |
|
"loss": 0.2862, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.049930300342161955, |
|
"grad_norm": 0.6237998604774475, |
|
"learning_rate": 4.30510798093342e-08, |
|
"loss": 0.2748, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.050183753643391206, |
|
"grad_norm": 0.8555868864059448, |
|
"learning_rate": 1.9135993067588284e-08, |
|
"loss": 0.2559, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.05043720694462046, |
|
"grad_norm": 1.0285953283309937, |
|
"learning_rate": 4.784325263584854e-09, |
|
"loss": 0.2633, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.0506906602458497, |
|
"grad_norm": 0.818027913570404, |
|
"learning_rate": 0.0, |
|
"loss": 0.2162, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.0506906602458497, |
|
"eval_loss": 0.3114449679851532, |
|
"eval_runtime": 535.4173, |
|
"eval_samples_per_second": 12.413, |
|
"eval_steps_per_second": 3.104, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.9889466058683187e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|