|
{ |
|
"best_metric": 0.8263623714447021, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.4434589800443459, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0022172949002217295, |
|
"grad_norm": 43.49946975708008, |
|
"learning_rate": 1.004e-05, |
|
"loss": 2.8648, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0022172949002217295, |
|
"eval_loss": 2.7855024337768555, |
|
"eval_runtime": 20.2353, |
|
"eval_samples_per_second": 9.39, |
|
"eval_steps_per_second": 2.372, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004434589800443459, |
|
"grad_norm": 49.54320526123047, |
|
"learning_rate": 2.008e-05, |
|
"loss": 3.479, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0066518847006651885, |
|
"grad_norm": 43.55087661743164, |
|
"learning_rate": 3.012e-05, |
|
"loss": 3.174, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008869179600886918, |
|
"grad_norm": 34.47139358520508, |
|
"learning_rate": 4.016e-05, |
|
"loss": 2.9292, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011086474501108648, |
|
"grad_norm": 33.695804595947266, |
|
"learning_rate": 5.02e-05, |
|
"loss": 2.9573, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013303769401330377, |
|
"grad_norm": 35.9599609375, |
|
"learning_rate": 6.024e-05, |
|
"loss": 3.0289, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.015521064301552107, |
|
"grad_norm": 30.871747970581055, |
|
"learning_rate": 7.028e-05, |
|
"loss": 2.7663, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.017738359201773836, |
|
"grad_norm": 21.825246810913086, |
|
"learning_rate": 8.032e-05, |
|
"loss": 2.2641, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.019955654101995565, |
|
"grad_norm": 26.116636276245117, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 2.573, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.022172949002217297, |
|
"grad_norm": 22.22761344909668, |
|
"learning_rate": 0.0001004, |
|
"loss": 2.4794, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.024390243902439025, |
|
"grad_norm": 26.01641082763672, |
|
"learning_rate": 9.987157894736842e-05, |
|
"loss": 2.447, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.026607538802660754, |
|
"grad_norm": 21.385372161865234, |
|
"learning_rate": 9.934315789473684e-05, |
|
"loss": 2.0634, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.028824833702882482, |
|
"grad_norm": 34.55962371826172, |
|
"learning_rate": 9.881473684210525e-05, |
|
"loss": 2.431, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.031042128603104215, |
|
"grad_norm": 29.898178100585938, |
|
"learning_rate": 9.828631578947369e-05, |
|
"loss": 2.6574, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03325942350332594, |
|
"grad_norm": 27.43971824645996, |
|
"learning_rate": 9.77578947368421e-05, |
|
"loss": 1.9155, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03547671840354767, |
|
"grad_norm": 27.087106704711914, |
|
"learning_rate": 9.722947368421052e-05, |
|
"loss": 2.1793, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.037694013303769404, |
|
"grad_norm": 30.93043327331543, |
|
"learning_rate": 9.670105263157895e-05, |
|
"loss": 2.4952, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03991130820399113, |
|
"grad_norm": 27.66971206665039, |
|
"learning_rate": 9.617263157894737e-05, |
|
"loss": 2.2107, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04212860310421286, |
|
"grad_norm": 37.52210235595703, |
|
"learning_rate": 9.564421052631579e-05, |
|
"loss": 2.235, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04434589800443459, |
|
"grad_norm": 23.63651466369629, |
|
"learning_rate": 9.511578947368421e-05, |
|
"loss": 2.1324, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04656319290465632, |
|
"grad_norm": 24.720478057861328, |
|
"learning_rate": 9.458736842105264e-05, |
|
"loss": 2.5987, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04878048780487805, |
|
"grad_norm": 23.012195587158203, |
|
"learning_rate": 9.405894736842106e-05, |
|
"loss": 2.3515, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.050997782705099776, |
|
"grad_norm": 24.576961517333984, |
|
"learning_rate": 9.353052631578947e-05, |
|
"loss": 2.2289, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05321507760532151, |
|
"grad_norm": 24.782127380371094, |
|
"learning_rate": 9.300210526315789e-05, |
|
"loss": 2.2446, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05543237250554324, |
|
"grad_norm": 29.085662841796875, |
|
"learning_rate": 9.247368421052631e-05, |
|
"loss": 2.4364, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.057649667405764965, |
|
"grad_norm": 26.606853485107422, |
|
"learning_rate": 9.194526315789473e-05, |
|
"loss": 2.2234, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0598669623059867, |
|
"grad_norm": 30.282766342163086, |
|
"learning_rate": 9.141684210526316e-05, |
|
"loss": 2.5798, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06208425720620843, |
|
"grad_norm": 29.041223526000977, |
|
"learning_rate": 9.088842105263158e-05, |
|
"loss": 2.4436, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06430155210643015, |
|
"grad_norm": 32.80604553222656, |
|
"learning_rate": 9.036000000000001e-05, |
|
"loss": 2.8015, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06651884700665188, |
|
"grad_norm": 28.190860748291016, |
|
"learning_rate": 8.983157894736843e-05, |
|
"loss": 2.359, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06873614190687362, |
|
"grad_norm": 35.894405364990234, |
|
"learning_rate": 8.930315789473684e-05, |
|
"loss": 1.9933, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07095343680709534, |
|
"grad_norm": 27.286930084228516, |
|
"learning_rate": 8.877473684210526e-05, |
|
"loss": 2.4784, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07317073170731707, |
|
"grad_norm": 31.852373123168945, |
|
"learning_rate": 8.824631578947368e-05, |
|
"loss": 2.2698, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07538802660753881, |
|
"grad_norm": 30.357845306396484, |
|
"learning_rate": 8.771789473684211e-05, |
|
"loss": 2.2334, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07760532150776053, |
|
"grad_norm": 34.58502197265625, |
|
"learning_rate": 8.718947368421053e-05, |
|
"loss": 2.6314, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07982261640798226, |
|
"grad_norm": 29.154176712036133, |
|
"learning_rate": 8.666105263157895e-05, |
|
"loss": 2.1524, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.082039911308204, |
|
"grad_norm": 28.38983917236328, |
|
"learning_rate": 8.613263157894737e-05, |
|
"loss": 1.7586, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08425720620842572, |
|
"grad_norm": 22.648962020874023, |
|
"learning_rate": 8.560421052631578e-05, |
|
"loss": 1.4079, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08647450110864745, |
|
"grad_norm": 43.15308380126953, |
|
"learning_rate": 8.50757894736842e-05, |
|
"loss": 2.1336, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08869179600886919, |
|
"grad_norm": 41.409969329833984, |
|
"learning_rate": 8.454736842105263e-05, |
|
"loss": 2.5629, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 32.42283248901367, |
|
"learning_rate": 8.401894736842106e-05, |
|
"loss": 2.0355, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09312638580931264, |
|
"grad_norm": 64.17134094238281, |
|
"learning_rate": 8.349052631578948e-05, |
|
"loss": 2.8994, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09534368070953436, |
|
"grad_norm": 51.92913055419922, |
|
"learning_rate": 8.29621052631579e-05, |
|
"loss": 1.9425, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0975609756097561, |
|
"grad_norm": 60.37343215942383, |
|
"learning_rate": 8.243368421052632e-05, |
|
"loss": 2.5374, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09977827050997783, |
|
"grad_norm": 56.61979675292969, |
|
"learning_rate": 8.190526315789474e-05, |
|
"loss": 2.7696, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.10199556541019955, |
|
"grad_norm": 45.54286575317383, |
|
"learning_rate": 8.137684210526315e-05, |
|
"loss": 2.2621, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10421286031042129, |
|
"grad_norm": 48.23731231689453, |
|
"learning_rate": 8.084842105263157e-05, |
|
"loss": 3.2684, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10643015521064302, |
|
"grad_norm": 52.1467399597168, |
|
"learning_rate": 8.032e-05, |
|
"loss": 3.0954, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.10864745011086474, |
|
"grad_norm": 57.875450134277344, |
|
"learning_rate": 7.979157894736842e-05, |
|
"loss": 2.946, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11086474501108648, |
|
"grad_norm": 88.02739715576172, |
|
"learning_rate": 7.926315789473684e-05, |
|
"loss": 4.3883, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11086474501108648, |
|
"eval_loss": 1.270225167274475, |
|
"eval_runtime": 20.2194, |
|
"eval_samples_per_second": 9.397, |
|
"eval_steps_per_second": 2.374, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1130820399113082, |
|
"grad_norm": 26.53325653076172, |
|
"learning_rate": 7.873473684210526e-05, |
|
"loss": 3.0032, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11529933481152993, |
|
"grad_norm": 17.52619743347168, |
|
"learning_rate": 7.820631578947369e-05, |
|
"loss": 2.7827, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11751662971175167, |
|
"grad_norm": 14.779261589050293, |
|
"learning_rate": 7.76778947368421e-05, |
|
"loss": 2.2784, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1197339246119734, |
|
"grad_norm": 12.997217178344727, |
|
"learning_rate": 7.714947368421052e-05, |
|
"loss": 2.2414, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12195121951219512, |
|
"grad_norm": 14.368338584899902, |
|
"learning_rate": 7.662105263157896e-05, |
|
"loss": 2.2074, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12416851441241686, |
|
"grad_norm": 15.52922534942627, |
|
"learning_rate": 7.609263157894737e-05, |
|
"loss": 2.1736, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12638580931263857, |
|
"grad_norm": 18.17425537109375, |
|
"learning_rate": 7.556421052631579e-05, |
|
"loss": 2.3588, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1286031042128603, |
|
"grad_norm": 16.153467178344727, |
|
"learning_rate": 7.503578947368421e-05, |
|
"loss": 2.4205, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13082039911308205, |
|
"grad_norm": 15.452371597290039, |
|
"learning_rate": 7.450736842105263e-05, |
|
"loss": 2.2599, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13303769401330376, |
|
"grad_norm": 18.882034301757812, |
|
"learning_rate": 7.397894736842105e-05, |
|
"loss": 1.9239, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1352549889135255, |
|
"grad_norm": 15.408071517944336, |
|
"learning_rate": 7.345052631578948e-05, |
|
"loss": 2.0407, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13747228381374724, |
|
"grad_norm": 18.57291603088379, |
|
"learning_rate": 7.29221052631579e-05, |
|
"loss": 2.1355, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.13968957871396895, |
|
"grad_norm": 14.49044418334961, |
|
"learning_rate": 7.239368421052631e-05, |
|
"loss": 1.6433, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1419068736141907, |
|
"grad_norm": 21.082347869873047, |
|
"learning_rate": 7.186526315789474e-05, |
|
"loss": 2.5801, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14412416851441243, |
|
"grad_norm": 18.55680274963379, |
|
"learning_rate": 7.133684210526316e-05, |
|
"loss": 1.7756, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14634146341463414, |
|
"grad_norm": 17.898927688598633, |
|
"learning_rate": 7.080842105263158e-05, |
|
"loss": 1.6243, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14855875831485588, |
|
"grad_norm": 17.05344581604004, |
|
"learning_rate": 7.028e-05, |
|
"loss": 1.7034, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.15077605321507762, |
|
"grad_norm": 19.20818328857422, |
|
"learning_rate": 6.975157894736843e-05, |
|
"loss": 1.8613, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.15299334811529933, |
|
"grad_norm": 14.604522705078125, |
|
"learning_rate": 6.922315789473685e-05, |
|
"loss": 1.3704, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.15521064301552107, |
|
"grad_norm": 18.43328285217285, |
|
"learning_rate": 6.869473684210527e-05, |
|
"loss": 1.9582, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1574279379157428, |
|
"grad_norm": 19.990154266357422, |
|
"learning_rate": 6.816631578947368e-05, |
|
"loss": 1.7685, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15964523281596452, |
|
"grad_norm": 17.83521842956543, |
|
"learning_rate": 6.76378947368421e-05, |
|
"loss": 2.1192, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.16186252771618626, |
|
"grad_norm": 21.896644592285156, |
|
"learning_rate": 6.710947368421052e-05, |
|
"loss": 1.6801, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.164079822616408, |
|
"grad_norm": 17.964820861816406, |
|
"learning_rate": 6.658105263157894e-05, |
|
"loss": 1.7591, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1662971175166297, |
|
"grad_norm": 22.256559371948242, |
|
"learning_rate": 6.605263157894737e-05, |
|
"loss": 2.5407, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16851441241685144, |
|
"grad_norm": 19.16011619567871, |
|
"learning_rate": 6.55242105263158e-05, |
|
"loss": 1.7553, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.17073170731707318, |
|
"grad_norm": 24.629257202148438, |
|
"learning_rate": 6.499578947368422e-05, |
|
"loss": 2.3935, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1729490022172949, |
|
"grad_norm": 19.385765075683594, |
|
"learning_rate": 6.446736842105264e-05, |
|
"loss": 1.8258, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17516629711751663, |
|
"grad_norm": 20.863401412963867, |
|
"learning_rate": 6.393894736842105e-05, |
|
"loss": 1.9962, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.17738359201773837, |
|
"grad_norm": 24.57994842529297, |
|
"learning_rate": 6.341052631578947e-05, |
|
"loss": 2.5787, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17960088691796008, |
|
"grad_norm": 23.363492965698242, |
|
"learning_rate": 6.288210526315789e-05, |
|
"loss": 1.577, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 22.762670516967773, |
|
"learning_rate": 6.235368421052632e-05, |
|
"loss": 2.2144, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18403547671840353, |
|
"grad_norm": 19.128597259521484, |
|
"learning_rate": 6.182526315789474e-05, |
|
"loss": 1.6379, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.18625277161862527, |
|
"grad_norm": 20.36613655090332, |
|
"learning_rate": 6.129684210526316e-05, |
|
"loss": 2.1041, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.188470066518847, |
|
"grad_norm": 30.103256225585938, |
|
"learning_rate": 6.076842105263158e-05, |
|
"loss": 1.8926, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.19068736141906872, |
|
"grad_norm": 25.986953735351562, |
|
"learning_rate": 6.024e-05, |
|
"loss": 1.9419, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.19290465631929046, |
|
"grad_norm": 29.100027084350586, |
|
"learning_rate": 5.971157894736842e-05, |
|
"loss": 2.3349, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1951219512195122, |
|
"grad_norm": 27.938316345214844, |
|
"learning_rate": 5.9183157894736835e-05, |
|
"loss": 1.8313, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1973392461197339, |
|
"grad_norm": 36.821903228759766, |
|
"learning_rate": 5.8654736842105267e-05, |
|
"loss": 2.5949, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.19955654101995565, |
|
"grad_norm": 28.251705169677734, |
|
"learning_rate": 5.8126315789473684e-05, |
|
"loss": 2.038, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2017738359201774, |
|
"grad_norm": 20.87628936767578, |
|
"learning_rate": 5.759789473684211e-05, |
|
"loss": 1.4964, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2039911308203991, |
|
"grad_norm": 36.82621383666992, |
|
"learning_rate": 5.706947368421053e-05, |
|
"loss": 2.1786, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.20620842572062084, |
|
"grad_norm": 34.26662826538086, |
|
"learning_rate": 5.6541052631578945e-05, |
|
"loss": 2.3302, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.20842572062084258, |
|
"grad_norm": 29.61810874938965, |
|
"learning_rate": 5.601263157894736e-05, |
|
"loss": 2.0587, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2106430155210643, |
|
"grad_norm": 31.78813934326172, |
|
"learning_rate": 5.5484210526315794e-05, |
|
"loss": 2.5443, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21286031042128603, |
|
"grad_norm": 37.09660339355469, |
|
"learning_rate": 5.495578947368421e-05, |
|
"loss": 1.9665, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.21507760532150777, |
|
"grad_norm": 24.321231842041016, |
|
"learning_rate": 5.442736842105264e-05, |
|
"loss": 2.1167, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.21729490022172948, |
|
"grad_norm": 28.0638484954834, |
|
"learning_rate": 5.3898947368421055e-05, |
|
"loss": 1.2507, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21951219512195122, |
|
"grad_norm": 43.129005432128906, |
|
"learning_rate": 5.337052631578947e-05, |
|
"loss": 3.1423, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.22172949002217296, |
|
"grad_norm": 90.48893737792969, |
|
"learning_rate": 5.284210526315789e-05, |
|
"loss": 2.6743, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22172949002217296, |
|
"eval_loss": 1.0482261180877686, |
|
"eval_runtime": 20.243, |
|
"eval_samples_per_second": 9.386, |
|
"eval_steps_per_second": 2.371, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22394678492239467, |
|
"grad_norm": 17.120187759399414, |
|
"learning_rate": 5.231368421052631e-05, |
|
"loss": 2.3941, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.2261640798226164, |
|
"grad_norm": 16.493423461914062, |
|
"learning_rate": 5.178526315789474e-05, |
|
"loss": 2.3262, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.22838137472283815, |
|
"grad_norm": 12.841573715209961, |
|
"learning_rate": 5.1256842105263165e-05, |
|
"loss": 2.0664, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.23059866962305986, |
|
"grad_norm": 12.463582992553711, |
|
"learning_rate": 5.072842105263158e-05, |
|
"loss": 2.0229, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2328159645232816, |
|
"grad_norm": 13.76339054107666, |
|
"learning_rate": 5.02e-05, |
|
"loss": 2.0176, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23503325942350334, |
|
"grad_norm": 12.230353355407715, |
|
"learning_rate": 4.967157894736842e-05, |
|
"loss": 1.6626, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.23725055432372505, |
|
"grad_norm": 14.192876815795898, |
|
"learning_rate": 4.914315789473684e-05, |
|
"loss": 2.0265, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.2394678492239468, |
|
"grad_norm": 13.952037811279297, |
|
"learning_rate": 4.861473684210526e-05, |
|
"loss": 1.7241, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.24168514412416853, |
|
"grad_norm": 14.577288627624512, |
|
"learning_rate": 4.8086315789473686e-05, |
|
"loss": 1.7365, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.24390243902439024, |
|
"grad_norm": 19.91164779663086, |
|
"learning_rate": 4.7557894736842104e-05, |
|
"loss": 1.8842, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24611973392461198, |
|
"grad_norm": 14.9652681350708, |
|
"learning_rate": 4.702947368421053e-05, |
|
"loss": 1.7975, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.24833702882483372, |
|
"grad_norm": 13.924230575561523, |
|
"learning_rate": 4.6501052631578946e-05, |
|
"loss": 1.9894, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.25055432372505543, |
|
"grad_norm": 13.089156150817871, |
|
"learning_rate": 4.5972631578947364e-05, |
|
"loss": 1.758, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.25277161862527714, |
|
"grad_norm": 12.60175895690918, |
|
"learning_rate": 4.544421052631579e-05, |
|
"loss": 1.6666, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2549889135254989, |
|
"grad_norm": 17.490257263183594, |
|
"learning_rate": 4.4915789473684213e-05, |
|
"loss": 1.9925, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.2572062084257206, |
|
"grad_norm": 12.952994346618652, |
|
"learning_rate": 4.438736842105263e-05, |
|
"loss": 1.6062, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.25942350332594233, |
|
"grad_norm": 15.763453483581543, |
|
"learning_rate": 4.3858947368421056e-05, |
|
"loss": 2.0694, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2616407982261641, |
|
"grad_norm": 14.717920303344727, |
|
"learning_rate": 4.3330526315789474e-05, |
|
"loss": 1.7913, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2638580931263858, |
|
"grad_norm": 16.53207778930664, |
|
"learning_rate": 4.280210526315789e-05, |
|
"loss": 1.7959, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2660753880266075, |
|
"grad_norm": 13.253047943115234, |
|
"learning_rate": 4.2273684210526317e-05, |
|
"loss": 1.6143, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2682926829268293, |
|
"grad_norm": 15.786408424377441, |
|
"learning_rate": 4.174526315789474e-05, |
|
"loss": 1.6721, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.270509977827051, |
|
"grad_norm": 12.185956001281738, |
|
"learning_rate": 4.121684210526316e-05, |
|
"loss": 1.5245, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 21.057693481445312, |
|
"learning_rate": 4.068842105263158e-05, |
|
"loss": 2.1359, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2749445676274945, |
|
"grad_norm": 13.455188751220703, |
|
"learning_rate": 4.016e-05, |
|
"loss": 2.0307, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2771618625277162, |
|
"grad_norm": 15.558540344238281, |
|
"learning_rate": 3.963157894736842e-05, |
|
"loss": 1.7351, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2793791574279379, |
|
"grad_norm": 16.883926391601562, |
|
"learning_rate": 3.9103157894736844e-05, |
|
"loss": 2.1596, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.28159645232815966, |
|
"grad_norm": 20.178613662719727, |
|
"learning_rate": 3.857473684210526e-05, |
|
"loss": 1.8029, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2838137472283814, |
|
"grad_norm": 18.442106246948242, |
|
"learning_rate": 3.804631578947369e-05, |
|
"loss": 2.0837, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2860310421286031, |
|
"grad_norm": 19.457265853881836, |
|
"learning_rate": 3.7517894736842105e-05, |
|
"loss": 1.6284, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.28824833702882485, |
|
"grad_norm": 26.571035385131836, |
|
"learning_rate": 3.698947368421052e-05, |
|
"loss": 1.7662, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.29046563192904656, |
|
"grad_norm": 19.368440628051758, |
|
"learning_rate": 3.646105263157895e-05, |
|
"loss": 1.8661, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2926829268292683, |
|
"grad_norm": 22.966577529907227, |
|
"learning_rate": 3.593263157894737e-05, |
|
"loss": 2.619, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.29490022172949004, |
|
"grad_norm": 24.9805965423584, |
|
"learning_rate": 3.540421052631579e-05, |
|
"loss": 1.5081, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.29711751662971175, |
|
"grad_norm": 19.5284366607666, |
|
"learning_rate": 3.4875789473684215e-05, |
|
"loss": 1.7447, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.29933481152993346, |
|
"grad_norm": 21.54673194885254, |
|
"learning_rate": 3.434736842105263e-05, |
|
"loss": 1.6988, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.30155210643015523, |
|
"grad_norm": 23.316730499267578, |
|
"learning_rate": 3.381894736842105e-05, |
|
"loss": 2.2736, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.30376940133037694, |
|
"grad_norm": 22.004526138305664, |
|
"learning_rate": 3.329052631578947e-05, |
|
"loss": 1.9549, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.30598669623059865, |
|
"grad_norm": 23.052682876586914, |
|
"learning_rate": 3.27621052631579e-05, |
|
"loss": 2.0963, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3082039911308204, |
|
"grad_norm": 20.599315643310547, |
|
"learning_rate": 3.223368421052632e-05, |
|
"loss": 1.2949, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.31042128603104213, |
|
"grad_norm": 20.93349266052246, |
|
"learning_rate": 3.1705263157894736e-05, |
|
"loss": 1.9277, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31263858093126384, |
|
"grad_norm": 27.307830810546875, |
|
"learning_rate": 3.117684210526316e-05, |
|
"loss": 1.122, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3148558758314856, |
|
"grad_norm": 17.325700759887695, |
|
"learning_rate": 3.064842105263158e-05, |
|
"loss": 1.2689, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3170731707317073, |
|
"grad_norm": 30.8470458984375, |
|
"learning_rate": 3.012e-05, |
|
"loss": 2.1764, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.31929046563192903, |
|
"grad_norm": 19.84284210205078, |
|
"learning_rate": 2.9591578947368418e-05, |
|
"loss": 1.059, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3215077605321508, |
|
"grad_norm": 23.532106399536133, |
|
"learning_rate": 2.9063157894736842e-05, |
|
"loss": 1.9666, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3237250554323725, |
|
"grad_norm": 17.46700668334961, |
|
"learning_rate": 2.8534736842105264e-05, |
|
"loss": 1.5324, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.3259423503325942, |
|
"grad_norm": 23.52845001220703, |
|
"learning_rate": 2.800631578947368e-05, |
|
"loss": 1.3784, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.328159645232816, |
|
"grad_norm": 27.91365623474121, |
|
"learning_rate": 2.7477894736842106e-05, |
|
"loss": 1.6899, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3303769401330377, |
|
"grad_norm": 32.38272476196289, |
|
"learning_rate": 2.6949473684210527e-05, |
|
"loss": 1.6625, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3325942350332594, |
|
"grad_norm": 51.69731140136719, |
|
"learning_rate": 2.6421052631578945e-05, |
|
"loss": 1.9981, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3325942350332594, |
|
"eval_loss": 0.8841075897216797, |
|
"eval_runtime": 20.2428, |
|
"eval_samples_per_second": 9.386, |
|
"eval_steps_per_second": 2.371, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3348115299334812, |
|
"grad_norm": 9.736676216125488, |
|
"learning_rate": 2.589263157894737e-05, |
|
"loss": 1.6967, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3370288248337029, |
|
"grad_norm": 10.335311889648438, |
|
"learning_rate": 2.536421052631579e-05, |
|
"loss": 1.6221, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3392461197339246, |
|
"grad_norm": 10.948195457458496, |
|
"learning_rate": 2.483578947368421e-05, |
|
"loss": 1.891, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.34146341463414637, |
|
"grad_norm": 10.740894317626953, |
|
"learning_rate": 2.430736842105263e-05, |
|
"loss": 1.5576, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3436807095343681, |
|
"grad_norm": 10.873139381408691, |
|
"learning_rate": 2.3778947368421052e-05, |
|
"loss": 1.5878, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3458980044345898, |
|
"grad_norm": 14.580072402954102, |
|
"learning_rate": 2.3250526315789473e-05, |
|
"loss": 1.9496, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.34811529933481156, |
|
"grad_norm": 12.087544441223145, |
|
"learning_rate": 2.2722105263157894e-05, |
|
"loss": 1.8294, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.35033259423503327, |
|
"grad_norm": 14.1340913772583, |
|
"learning_rate": 2.2193684210526316e-05, |
|
"loss": 1.8763, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.352549889135255, |
|
"grad_norm": 11.832399368286133, |
|
"learning_rate": 2.1665263157894737e-05, |
|
"loss": 1.771, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.35476718403547675, |
|
"grad_norm": 14.125495910644531, |
|
"learning_rate": 2.1136842105263158e-05, |
|
"loss": 2.1433, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.35698447893569846, |
|
"grad_norm": 16.098318099975586, |
|
"learning_rate": 2.060842105263158e-05, |
|
"loss": 1.5098, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.35920177383592017, |
|
"grad_norm": 11.606100082397461, |
|
"learning_rate": 2.008e-05, |
|
"loss": 1.531, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3614190687361419, |
|
"grad_norm": 14.126666069030762, |
|
"learning_rate": 1.9551578947368422e-05, |
|
"loss": 2.1956, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 13.657944679260254, |
|
"learning_rate": 1.9023157894736843e-05, |
|
"loss": 2.0216, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.36585365853658536, |
|
"grad_norm": 16.890853881835938, |
|
"learning_rate": 1.849473684210526e-05, |
|
"loss": 2.0006, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.36807095343680707, |
|
"grad_norm": 12.908790588378906, |
|
"learning_rate": 1.7966315789473686e-05, |
|
"loss": 1.6173, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.37028824833702884, |
|
"grad_norm": 17.241039276123047, |
|
"learning_rate": 1.7437894736842107e-05, |
|
"loss": 1.9784, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.37250554323725055, |
|
"grad_norm": 15.207599639892578, |
|
"learning_rate": 1.6909473684210525e-05, |
|
"loss": 1.5988, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.37472283813747226, |
|
"grad_norm": 15.888737678527832, |
|
"learning_rate": 1.638105263157895e-05, |
|
"loss": 1.4648, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.376940133037694, |
|
"grad_norm": 12.339811325073242, |
|
"learning_rate": 1.5852631578947368e-05, |
|
"loss": 1.5447, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.37915742793791574, |
|
"grad_norm": 12.044404983520508, |
|
"learning_rate": 1.532421052631579e-05, |
|
"loss": 1.3339, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.38137472283813745, |
|
"grad_norm": 11.459868431091309, |
|
"learning_rate": 1.4795789473684209e-05, |
|
"loss": 1.4563, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3835920177383592, |
|
"grad_norm": 14.248141288757324, |
|
"learning_rate": 1.4267368421052632e-05, |
|
"loss": 1.5893, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3858093126385809, |
|
"grad_norm": 19.118242263793945, |
|
"learning_rate": 1.3738947368421053e-05, |
|
"loss": 1.9648, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.38802660753880264, |
|
"grad_norm": 15.491469383239746, |
|
"learning_rate": 1.3210526315789473e-05, |
|
"loss": 1.5081, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.3902439024390244, |
|
"grad_norm": 13.472190856933594, |
|
"learning_rate": 1.2682105263157896e-05, |
|
"loss": 1.2014, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.3924611973392461, |
|
"grad_norm": 16.698389053344727, |
|
"learning_rate": 1.2153684210526315e-05, |
|
"loss": 1.6185, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3946784922394678, |
|
"grad_norm": 14.444684028625488, |
|
"learning_rate": 1.1625263157894737e-05, |
|
"loss": 1.663, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3968957871396896, |
|
"grad_norm": 12.130651473999023, |
|
"learning_rate": 1.1096842105263158e-05, |
|
"loss": 1.1374, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3991130820399113, |
|
"grad_norm": 13.900895118713379, |
|
"learning_rate": 1.0568421052631579e-05, |
|
"loss": 1.3243, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.401330376940133, |
|
"grad_norm": 15.653491020202637, |
|
"learning_rate": 1.004e-05, |
|
"loss": 1.7645, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.4035476718403548, |
|
"grad_norm": 15.8237886428833, |
|
"learning_rate": 9.511578947368422e-06, |
|
"loss": 1.6201, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.4057649667405765, |
|
"grad_norm": 32.045345306396484, |
|
"learning_rate": 8.983157894736843e-06, |
|
"loss": 1.7167, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4079822616407982, |
|
"grad_norm": 13.554454803466797, |
|
"learning_rate": 8.454736842105263e-06, |
|
"loss": 1.1857, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.41019955654101997, |
|
"grad_norm": 13.749832153320312, |
|
"learning_rate": 7.926315789473684e-06, |
|
"loss": 1.0891, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4124168514412417, |
|
"grad_norm": 16.378047943115234, |
|
"learning_rate": 7.397894736842104e-06, |
|
"loss": 1.244, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4146341463414634, |
|
"grad_norm": 20.462013244628906, |
|
"learning_rate": 6.8694736842105265e-06, |
|
"loss": 1.659, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.41685144124168516, |
|
"grad_norm": 18.432472229003906, |
|
"learning_rate": 6.341052631578948e-06, |
|
"loss": 1.8659, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4190687361419069, |
|
"grad_norm": 21.487030029296875, |
|
"learning_rate": 5.812631578947368e-06, |
|
"loss": 1.7793, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4212860310421286, |
|
"grad_norm": 27.553062438964844, |
|
"learning_rate": 5.2842105263157896e-06, |
|
"loss": 2.1433, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.42350332594235035, |
|
"grad_norm": 20.972902297973633, |
|
"learning_rate": 4.755789473684211e-06, |
|
"loss": 1.3501, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.42572062084257206, |
|
"grad_norm": 21.430740356445312, |
|
"learning_rate": 4.227368421052631e-06, |
|
"loss": 1.2867, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.4279379157427938, |
|
"grad_norm": 21.622007369995117, |
|
"learning_rate": 3.698947368421052e-06, |
|
"loss": 1.8264, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.43015521064301554, |
|
"grad_norm": 18.00780487060547, |
|
"learning_rate": 3.170526315789474e-06, |
|
"loss": 1.5396, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.43237250554323725, |
|
"grad_norm": 21.456939697265625, |
|
"learning_rate": 2.6421052631578948e-06, |
|
"loss": 0.8305, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.43458980044345896, |
|
"grad_norm": 37.9657096862793, |
|
"learning_rate": 2.1136842105263157e-06, |
|
"loss": 2.433, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.43680709534368073, |
|
"grad_norm": 31.422948837280273, |
|
"learning_rate": 1.585263157894737e-06, |
|
"loss": 1.5014, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.43902439024390244, |
|
"grad_norm": 22.746129989624023, |
|
"learning_rate": 1.0568421052631578e-06, |
|
"loss": 1.9673, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.44124168514412415, |
|
"grad_norm": 45.2837028503418, |
|
"learning_rate": 5.284210526315789e-07, |
|
"loss": 1.036, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.4434589800443459, |
|
"grad_norm": 56.693782806396484, |
|
"learning_rate": 0.0, |
|
"loss": 2.1921, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4434589800443459, |
|
"eval_loss": 0.8263623714447021, |
|
"eval_runtime": 20.2153, |
|
"eval_samples_per_second": 9.399, |
|
"eval_steps_per_second": 2.374, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.15499809800192e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|