|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9983642311886587, |
|
"eval_steps": 500, |
|
"global_step": 229, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008724100327153763, |
|
"grad_norm": 6.035512972360855, |
|
"learning_rate": 1.4035087719298246e-06, |
|
"loss": 1.1029, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017448200654307525, |
|
"grad_norm": 6.042841036030158, |
|
"learning_rate": 2.8070175438596493e-06, |
|
"loss": 1.1024, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.026172300981461286, |
|
"grad_norm": 5.910306635602092, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.0986, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03489640130861505, |
|
"grad_norm": 4.536951510086868, |
|
"learning_rate": 5.6140350877192985e-06, |
|
"loss": 1.0538, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04362050163576881, |
|
"grad_norm": 2.3773466328483686, |
|
"learning_rate": 7.017543859649123e-06, |
|
"loss": 0.9941, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05234460196292257, |
|
"grad_norm": 1.999261332126332, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 0.999, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.061068702290076333, |
|
"grad_norm": 4.369629342726399, |
|
"learning_rate": 9.824561403508772e-06, |
|
"loss": 0.9889, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0697928026172301, |
|
"grad_norm": 4.899527942424933, |
|
"learning_rate": 1.1228070175438597e-05, |
|
"loss": 0.9631, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07851690294438386, |
|
"grad_norm": 5.748165018609449, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 0.9524, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08724100327153762, |
|
"grad_norm": 4.763126150981281, |
|
"learning_rate": 1.4035087719298246e-05, |
|
"loss": 0.952, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09596510359869138, |
|
"grad_norm": 3.167454561583767, |
|
"learning_rate": 1.543859649122807e-05, |
|
"loss": 0.9095, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10468920392584515, |
|
"grad_norm": 3.3676041736516433, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 0.89, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1134133042529989, |
|
"grad_norm": 2.8110117483164676, |
|
"learning_rate": 1.824561403508772e-05, |
|
"loss": 0.8538, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12213740458015267, |
|
"grad_norm": 2.0519859086879286, |
|
"learning_rate": 1.9649122807017544e-05, |
|
"loss": 0.85, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13086150490730644, |
|
"grad_norm": 1.8373157533967603, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 0.8349, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1395856052344602, |
|
"grad_norm": 1.6156997409278058, |
|
"learning_rate": 2.2456140350877194e-05, |
|
"loss": 0.829, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14830970556161396, |
|
"grad_norm": 1.2400567097625959, |
|
"learning_rate": 2.385964912280702e-05, |
|
"loss": 0.8108, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.15703380588876772, |
|
"grad_norm": 1.1069155525681142, |
|
"learning_rate": 2.526315789473684e-05, |
|
"loss": 0.8029, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16575790621592149, |
|
"grad_norm": 1.0769626887248518, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7858, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17448200654307525, |
|
"grad_norm": 0.7016586049675165, |
|
"learning_rate": 2.8070175438596492e-05, |
|
"loss": 0.7816, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.183206106870229, |
|
"grad_norm": 0.8460028260574683, |
|
"learning_rate": 2.9473684210526317e-05, |
|
"loss": 0.7811, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19193020719738277, |
|
"grad_norm": 0.6598083455923888, |
|
"learning_rate": 3.087719298245614e-05, |
|
"loss": 0.7643, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.20065430752453653, |
|
"grad_norm": 0.7215762522174539, |
|
"learning_rate": 3.228070175438597e-05, |
|
"loss": 0.7617, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2093784078516903, |
|
"grad_norm": 0.5554406103693584, |
|
"learning_rate": 3.368421052631579e-05, |
|
"loss": 0.7576, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21810250817884405, |
|
"grad_norm": 0.6258543895787715, |
|
"learning_rate": 3.508771929824562e-05, |
|
"loss": 0.7629, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2268266085059978, |
|
"grad_norm": 0.7676778822568288, |
|
"learning_rate": 3.649122807017544e-05, |
|
"loss": 0.7521, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23555070883315157, |
|
"grad_norm": 1.0032530689824866, |
|
"learning_rate": 3.789473684210526e-05, |
|
"loss": 0.7384, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24427480916030533, |
|
"grad_norm": 1.2535316239757839, |
|
"learning_rate": 3.929824561403509e-05, |
|
"loss": 0.7474, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2529989094874591, |
|
"grad_norm": 0.9953077887554728, |
|
"learning_rate": 4.070175438596492e-05, |
|
"loss": 0.7412, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2617230098146129, |
|
"grad_norm": 1.3600760898966002, |
|
"learning_rate": 4.210526315789474e-05, |
|
"loss": 0.7257, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27044711014176664, |
|
"grad_norm": 0.6326945767090465, |
|
"learning_rate": 4.350877192982457e-05, |
|
"loss": 0.7247, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2791712104689204, |
|
"grad_norm": 1.0488194881380388, |
|
"learning_rate": 4.491228070175439e-05, |
|
"loss": 0.7247, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.28789531079607417, |
|
"grad_norm": 1.1854260719924898, |
|
"learning_rate": 4.6315789473684214e-05, |
|
"loss": 0.7265, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2966194111232279, |
|
"grad_norm": 1.231800504978088, |
|
"learning_rate": 4.771929824561404e-05, |
|
"loss": 0.724, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3053435114503817, |
|
"grad_norm": 1.1471910683800164, |
|
"learning_rate": 4.9122807017543864e-05, |
|
"loss": 0.7245, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.31406761177753545, |
|
"grad_norm": 1.2650375204215663, |
|
"learning_rate": 5.052631578947368e-05, |
|
"loss": 0.7173, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3227917121046892, |
|
"grad_norm": 1.581708517443491, |
|
"learning_rate": 5.1929824561403515e-05, |
|
"loss": 0.7146, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.33151581243184297, |
|
"grad_norm": 0.6899231907687099, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.7039, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.34023991275899673, |
|
"grad_norm": 1.9659650918585876, |
|
"learning_rate": 5.4736842105263165e-05, |
|
"loss": 0.7129, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3489640130861505, |
|
"grad_norm": 0.9602545263207618, |
|
"learning_rate": 5.6140350877192984e-05, |
|
"loss": 0.7229, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.35768811341330425, |
|
"grad_norm": 2.019396688493347, |
|
"learning_rate": 5.7543859649122816e-05, |
|
"loss": 0.7173, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.366412213740458, |
|
"grad_norm": 1.3667216071947406, |
|
"learning_rate": 5.8947368421052634e-05, |
|
"loss": 0.7088, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3751363140676118, |
|
"grad_norm": 1.6739220492126983, |
|
"learning_rate": 6.035087719298246e-05, |
|
"loss": 0.7126, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.38386041439476554, |
|
"grad_norm": 1.406631076781101, |
|
"learning_rate": 6.175438596491228e-05, |
|
"loss": 0.7211, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.3925845147219193, |
|
"grad_norm": 1.232815581243198, |
|
"learning_rate": 6.315789473684212e-05, |
|
"loss": 0.7114, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.40130861504907306, |
|
"grad_norm": 1.3716611381630202, |
|
"learning_rate": 6.456140350877194e-05, |
|
"loss": 0.7174, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4100327153762268, |
|
"grad_norm": 1.1415702503019296, |
|
"learning_rate": 6.596491228070175e-05, |
|
"loss": 0.7005, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4187568157033806, |
|
"grad_norm": 1.2074860027919303, |
|
"learning_rate": 6.736842105263159e-05, |
|
"loss": 0.7098, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.42748091603053434, |
|
"grad_norm": 1.6609425790213797, |
|
"learning_rate": 6.87719298245614e-05, |
|
"loss": 0.7098, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4362050163576881, |
|
"grad_norm": 0.9769451874458346, |
|
"learning_rate": 7.017543859649124e-05, |
|
"loss": 0.7079, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.44492911668484186, |
|
"grad_norm": 1.7666355593904801, |
|
"learning_rate": 7.157894736842105e-05, |
|
"loss": 0.6967, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4536532170119956, |
|
"grad_norm": 1.014343915032852, |
|
"learning_rate": 7.298245614035087e-05, |
|
"loss": 0.6961, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4623773173391494, |
|
"grad_norm": 1.292912081295997, |
|
"learning_rate": 7.43859649122807e-05, |
|
"loss": 0.6888, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.47110141766630315, |
|
"grad_norm": 1.33549845587438, |
|
"learning_rate": 7.578947368421052e-05, |
|
"loss": 0.6989, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4798255179934569, |
|
"grad_norm": 1.6285853466115705, |
|
"learning_rate": 7.719298245614036e-05, |
|
"loss": 0.7016, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.48854961832061067, |
|
"grad_norm": 0.9433375871219883, |
|
"learning_rate": 7.859649122807017e-05, |
|
"loss": 0.6824, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.49727371864776443, |
|
"grad_norm": 1.2489894514867836, |
|
"learning_rate": 8e-05, |
|
"loss": 0.6898, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5059978189749182, |
|
"grad_norm": 1.60283780700127, |
|
"learning_rate": 7.99992499440621e-05, |
|
"loss": 0.7019, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.514721919302072, |
|
"grad_norm": 1.349411337094158, |
|
"learning_rate": 7.999699980437755e-05, |
|
"loss": 0.692, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5234460196292258, |
|
"grad_norm": 1.4027794489135887, |
|
"learning_rate": 7.999324966533291e-05, |
|
"loss": 0.6739, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5321701199563795, |
|
"grad_norm": 0.8999831714962346, |
|
"learning_rate": 7.998799966756889e-05, |
|
"loss": 0.6925, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5408942202835333, |
|
"grad_norm": 1.2349556182219792, |
|
"learning_rate": 7.998125000797506e-05, |
|
"loss": 0.6811, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.549618320610687, |
|
"grad_norm": 0.9953482513645127, |
|
"learning_rate": 7.997300093968255e-05, |
|
"loss": 0.6919, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5583424209378408, |
|
"grad_norm": 1.7631278928848257, |
|
"learning_rate": 7.99632527720545e-05, |
|
"loss": 0.7076, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5670665212649946, |
|
"grad_norm": 0.9802682486455654, |
|
"learning_rate": 7.995200587067445e-05, |
|
"loss": 0.6836, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5757906215921483, |
|
"grad_norm": 1.4018686830735987, |
|
"learning_rate": 7.993926065733265e-05, |
|
"loss": 0.6998, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5845147219193021, |
|
"grad_norm": 1.0081781512305683, |
|
"learning_rate": 7.992501761001027e-05, |
|
"loss": 0.6878, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5932388222464559, |
|
"grad_norm": 1.2813118135682324, |
|
"learning_rate": 7.99092772628614e-05, |
|
"loss": 0.6948, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6019629225736096, |
|
"grad_norm": 0.9281112795468938, |
|
"learning_rate": 7.98920402061931e-05, |
|
"loss": 0.6969, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6106870229007634, |
|
"grad_norm": 1.2375443626494387, |
|
"learning_rate": 7.987330708644319e-05, |
|
"loss": 0.6793, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6194111232279171, |
|
"grad_norm": 1.0863449033166035, |
|
"learning_rate": 7.985307860615607e-05, |
|
"loss": 0.6848, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6281352235550709, |
|
"grad_norm": 0.942362107324505, |
|
"learning_rate": 7.98313555239563e-05, |
|
"loss": 0.6862, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6368593238822247, |
|
"grad_norm": 1.3027161650276327, |
|
"learning_rate": 7.980813865452026e-05, |
|
"loss": 0.6735, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6455834242093784, |
|
"grad_norm": 0.7977961043410647, |
|
"learning_rate": 7.978342886854546e-05, |
|
"loss": 0.6787, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6543075245365322, |
|
"grad_norm": 1.3693856212692685, |
|
"learning_rate": 7.975722709271799e-05, |
|
"loss": 0.6751, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6630316248636859, |
|
"grad_norm": 0.7738139399748106, |
|
"learning_rate": 7.972953430967773e-05, |
|
"loss": 0.6726, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6717557251908397, |
|
"grad_norm": 0.6453937973753266, |
|
"learning_rate": 7.97003515579815e-05, |
|
"loss": 0.6756, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6804798255179935, |
|
"grad_norm": 0.7085908664971486, |
|
"learning_rate": 7.96696799320641e-05, |
|
"loss": 0.6774, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6892039258451472, |
|
"grad_norm": 0.7420105586344222, |
|
"learning_rate": 7.96375205821973e-05, |
|
"loss": 0.6647, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.697928026172301, |
|
"grad_norm": 1.182903610639294, |
|
"learning_rate": 7.960387471444666e-05, |
|
"loss": 0.6758, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7066521264994547, |
|
"grad_norm": 1.305515943296027, |
|
"learning_rate": 7.956874359062632e-05, |
|
"loss": 0.6808, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7153762268266085, |
|
"grad_norm": 0.9451990102208602, |
|
"learning_rate": 7.95321285282517e-05, |
|
"loss": 0.6697, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7241003271537623, |
|
"grad_norm": 0.9432391308082473, |
|
"learning_rate": 7.949403090049002e-05, |
|
"loss": 0.6723, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.732824427480916, |
|
"grad_norm": 1.1371193647756934, |
|
"learning_rate": 7.94544521361089e-05, |
|
"loss": 0.6773, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7415485278080698, |
|
"grad_norm": 1.1858932832021734, |
|
"learning_rate": 7.941339371942269e-05, |
|
"loss": 0.6657, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7502726281352236, |
|
"grad_norm": 1.165847606968587, |
|
"learning_rate": 7.937085719023685e-05, |
|
"loss": 0.6743, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7589967284623773, |
|
"grad_norm": 0.5770889497138026, |
|
"learning_rate": 7.932684414379021e-05, |
|
"loss": 0.668, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7677208287895311, |
|
"grad_norm": 0.925349798073699, |
|
"learning_rate": 7.928135623069509e-05, |
|
"loss": 0.6693, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7764449291166848, |
|
"grad_norm": 1.2265564072341149, |
|
"learning_rate": 7.923439515687546e-05, |
|
"loss": 0.6721, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7851690294438386, |
|
"grad_norm": 0.76774338811776, |
|
"learning_rate": 7.918596268350296e-05, |
|
"loss": 0.664, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7938931297709924, |
|
"grad_norm": 0.811830002272808, |
|
"learning_rate": 7.913606062693077e-05, |
|
"loss": 0.6674, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8026172300981461, |
|
"grad_norm": 0.7896885933612465, |
|
"learning_rate": 7.90846908586256e-05, |
|
"loss": 0.6603, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8113413304252999, |
|
"grad_norm": 0.6860009351157326, |
|
"learning_rate": 7.903185530509743e-05, |
|
"loss": 0.6541, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8200654307524536, |
|
"grad_norm": 0.7887179980130798, |
|
"learning_rate": 7.89775559478273e-05, |
|
"loss": 0.6719, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8287895310796074, |
|
"grad_norm": 0.6863156036434349, |
|
"learning_rate": 7.892179482319297e-05, |
|
"loss": 0.6712, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8375136314067612, |
|
"grad_norm": 0.612318445743932, |
|
"learning_rate": 7.886457402239256e-05, |
|
"loss": 0.6588, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8462377317339149, |
|
"grad_norm": 0.5266659568320967, |
|
"learning_rate": 7.880589569136616e-05, |
|
"loss": 0.6477, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8549618320610687, |
|
"grad_norm": 0.6815563256417184, |
|
"learning_rate": 7.874576203071531e-05, |
|
"loss": 0.6647, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8636859323882224, |
|
"grad_norm": 0.7021771362356389, |
|
"learning_rate": 7.868417529562043e-05, |
|
"loss": 0.6519, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8724100327153762, |
|
"grad_norm": 0.9537663707386643, |
|
"learning_rate": 7.862113779575638e-05, |
|
"loss": 0.6655, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.88113413304253, |
|
"grad_norm": 1.7262458672727332, |
|
"learning_rate": 7.85566518952057e-05, |
|
"loss": 0.6617, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.8898582333696837, |
|
"grad_norm": 0.5924197078630852, |
|
"learning_rate": 7.849072001237001e-05, |
|
"loss": 0.6586, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8985823336968375, |
|
"grad_norm": 1.784429399577841, |
|
"learning_rate": 7.842334461987936e-05, |
|
"loss": 0.6718, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9073064340239912, |
|
"grad_norm": 0.8548005705828143, |
|
"learning_rate": 7.835452824449935e-05, |
|
"loss": 0.6589, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.916030534351145, |
|
"grad_norm": 1.5914313223828267, |
|
"learning_rate": 7.828427346703657e-05, |
|
"loss": 0.6639, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9247546346782988, |
|
"grad_norm": 1.1912921777513241, |
|
"learning_rate": 7.821258292224166e-05, |
|
"loss": 0.669, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9334787350054525, |
|
"grad_norm": 1.1951669530416693, |
|
"learning_rate": 7.813945929871056e-05, |
|
"loss": 0.6497, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9422028353326063, |
|
"grad_norm": 1.037062568022807, |
|
"learning_rate": 7.806490533878368e-05, |
|
"loss": 0.6632, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.95092693565976, |
|
"grad_norm": 1.2483557427639183, |
|
"learning_rate": 7.798892383844303e-05, |
|
"loss": 0.658, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9596510359869138, |
|
"grad_norm": 0.8133626652288988, |
|
"learning_rate": 7.791151764720737e-05, |
|
"loss": 0.6571, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9683751363140676, |
|
"grad_norm": 1.0059615445006975, |
|
"learning_rate": 7.783268966802539e-05, |
|
"loss": 0.6595, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9770992366412213, |
|
"grad_norm": 0.7304172503042858, |
|
"learning_rate": 7.775244285716679e-05, |
|
"loss": 0.6608, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9858233369683751, |
|
"grad_norm": 0.7284490288054312, |
|
"learning_rate": 7.767078022411139e-05, |
|
"loss": 0.6492, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.9945474372955289, |
|
"grad_norm": 0.5901533647047473, |
|
"learning_rate": 7.758770483143634e-05, |
|
"loss": 0.6642, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0038167938931297, |
|
"grad_norm": 0.8414397249541458, |
|
"learning_rate": 7.750321979470123e-05, |
|
"loss": 0.9152, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0125408942202836, |
|
"grad_norm": 1.0739193248054102, |
|
"learning_rate": 7.741732828233124e-05, |
|
"loss": 0.626, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0212649945474372, |
|
"grad_norm": 1.1988463490729344, |
|
"learning_rate": 7.733003351549829e-05, |
|
"loss": 0.6393, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0299890948745911, |
|
"grad_norm": 0.9606655160302263, |
|
"learning_rate": 7.724133876800031e-05, |
|
"loss": 0.625, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0387131952017448, |
|
"grad_norm": 1.2372012961246242, |
|
"learning_rate": 7.715124736613839e-05, |
|
"loss": 0.6278, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0474372955288986, |
|
"grad_norm": 0.86001815183547, |
|
"learning_rate": 7.705976268859207e-05, |
|
"loss": 0.6187, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0561613958560523, |
|
"grad_norm": 1.1413833226380308, |
|
"learning_rate": 7.696688816629266e-05, |
|
"loss": 0.63, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0648854961832062, |
|
"grad_norm": 1.2327702775009692, |
|
"learning_rate": 7.687262728229447e-05, |
|
"loss": 0.6205, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0736095965103598, |
|
"grad_norm": 0.7767212251506833, |
|
"learning_rate": 7.677698357164431e-05, |
|
"loss": 0.6177, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.0823336968375137, |
|
"grad_norm": 1.7079661807939999, |
|
"learning_rate": 7.667996062124884e-05, |
|
"loss": 0.6242, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0910577971646673, |
|
"grad_norm": 1.0068734700150146, |
|
"learning_rate": 7.658156206974005e-05, |
|
"loss": 0.6183, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0997818974918212, |
|
"grad_norm": 1.8753922872596245, |
|
"learning_rate": 7.648179160733883e-05, |
|
"loss": 0.6337, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1085059978189749, |
|
"grad_norm": 1.8578444866986128, |
|
"learning_rate": 7.638065297571657e-05, |
|
"loss": 0.6171, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1172300981461287, |
|
"grad_norm": 0.6478551153484415, |
|
"learning_rate": 7.627814996785484e-05, |
|
"loss": 0.6158, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1259541984732824, |
|
"grad_norm": 1.4820468569461809, |
|
"learning_rate": 7.61742864279031e-05, |
|
"loss": 0.6197, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1346782988004362, |
|
"grad_norm": 0.7243487925020168, |
|
"learning_rate": 7.606906625103464e-05, |
|
"loss": 0.625, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.14340239912759, |
|
"grad_norm": 0.9539104928282168, |
|
"learning_rate": 7.596249338330034e-05, |
|
"loss": 0.6194, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1521264994547438, |
|
"grad_norm": 0.8136844990028549, |
|
"learning_rate": 7.585457182148081e-05, |
|
"loss": 0.6172, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1608505997818974, |
|
"grad_norm": 0.7125736106844306, |
|
"learning_rate": 7.57453056129365e-05, |
|
"loss": 0.6151, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1695747001090513, |
|
"grad_norm": 0.5804990189493681, |
|
"learning_rate": 7.56346988554558e-05, |
|
"loss": 0.6256, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.178298800436205, |
|
"grad_norm": 0.6362204084540434, |
|
"learning_rate": 7.552275569710152e-05, |
|
"loss": 0.6184, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.1870229007633588, |
|
"grad_norm": 0.43420077191532125, |
|
"learning_rate": 7.540948033605513e-05, |
|
"loss": 0.6105, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.1957470010905125, |
|
"grad_norm": 0.46232316550703767, |
|
"learning_rate": 7.529487702045953e-05, |
|
"loss": 0.6056, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2044711014176663, |
|
"grad_norm": 0.4316498267915253, |
|
"learning_rate": 7.517895004825956e-05, |
|
"loss": 0.6011, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.21319520174482, |
|
"grad_norm": 0.38696383075278273, |
|
"learning_rate": 7.506170376704095e-05, |
|
"loss": 0.6141, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2219193020719739, |
|
"grad_norm": 0.3858939834475368, |
|
"learning_rate": 7.494314257386715e-05, |
|
"loss": 0.6065, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2306434023991275, |
|
"grad_norm": 0.3738279653817542, |
|
"learning_rate": 7.48232709151145e-05, |
|
"loss": 0.6104, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2393675027262814, |
|
"grad_norm": 0.34434724315715803, |
|
"learning_rate": 7.470209328630548e-05, |
|
"loss": 0.605, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.248091603053435, |
|
"grad_norm": 0.3291602472493041, |
|
"learning_rate": 7.457961423194011e-05, |
|
"loss": 0.6018, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.256815703380589, |
|
"grad_norm": 0.27837095142325796, |
|
"learning_rate": 7.445583834532546e-05, |
|
"loss": 0.5993, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2655398037077425, |
|
"grad_norm": 0.31225187743500077, |
|
"learning_rate": 7.433077026840346e-05, |
|
"loss": 0.5984, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2742639040348964, |
|
"grad_norm": 0.27063932853508044, |
|
"learning_rate": 7.420441469157684e-05, |
|
"loss": 0.6048, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.2829880043620503, |
|
"grad_norm": 0.28328810560800655, |
|
"learning_rate": 7.407677635353308e-05, |
|
"loss": 0.6061, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.291712104689204, |
|
"grad_norm": 0.3335236260280515, |
|
"learning_rate": 7.39478600410669e-05, |
|
"loss": 0.6055, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3004362050163576, |
|
"grad_norm": 0.34277641090811606, |
|
"learning_rate": 7.381767058890056e-05, |
|
"loss": 0.5956, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3091603053435115, |
|
"grad_norm": 0.3944860323036086, |
|
"learning_rate": 7.368621287950264e-05, |
|
"loss": 0.5971, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3178844056706653, |
|
"grad_norm": 0.390412169412763, |
|
"learning_rate": 7.355349184290491e-05, |
|
"loss": 0.5991, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.326608505997819, |
|
"grad_norm": 0.44167362977698205, |
|
"learning_rate": 7.341951245651747e-05, |
|
"loss": 0.5905, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3353326063249726, |
|
"grad_norm": 0.4873733423530836, |
|
"learning_rate": 7.328427974494201e-05, |
|
"loss": 0.6061, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3440567066521265, |
|
"grad_norm": 0.529005701508295, |
|
"learning_rate": 7.314779877978346e-05, |
|
"loss": 0.6038, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3527808069792804, |
|
"grad_norm": 0.5776151078666283, |
|
"learning_rate": 7.301007467945974e-05, |
|
"loss": 0.6063, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.361504907306434, |
|
"grad_norm": 0.6712943884189805, |
|
"learning_rate": 7.28711126090098e-05, |
|
"loss": 0.609, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3702290076335877, |
|
"grad_norm": 0.6974077084236574, |
|
"learning_rate": 7.273091777989997e-05, |
|
"loss": 0.617, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.3789531079607416, |
|
"grad_norm": 0.622800075821039, |
|
"learning_rate": 7.258949544982843e-05, |
|
"loss": 0.6012, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.3876772082878954, |
|
"grad_norm": 0.6652165160394861, |
|
"learning_rate": 7.24468509225281e-05, |
|
"loss": 0.6213, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.396401308615049, |
|
"grad_norm": 0.7796997637986155, |
|
"learning_rate": 7.230298954756772e-05, |
|
"loss": 0.6116, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4051254089422027, |
|
"grad_norm": 0.8050974173338423, |
|
"learning_rate": 7.215791672015121e-05, |
|
"loss": 0.6102, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.4138495092693566, |
|
"grad_norm": 0.6245666904233175, |
|
"learning_rate": 7.201163788091536e-05, |
|
"loss": 0.6051, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4225736095965105, |
|
"grad_norm": 0.2791815826660419, |
|
"learning_rate": 7.186415851572579e-05, |
|
"loss": 0.5978, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4312977099236641, |
|
"grad_norm": 0.5658363527146323, |
|
"learning_rate": 7.171548415547114e-05, |
|
"loss": 0.6064, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4400218102508178, |
|
"grad_norm": 0.6010202995307374, |
|
"learning_rate": 7.156562037585576e-05, |
|
"loss": 0.6117, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.4487459105779716, |
|
"grad_norm": 0.45545843396243296, |
|
"learning_rate": 7.141457279719053e-05, |
|
"loss": 0.6079, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4574700109051255, |
|
"grad_norm": 0.37879238626475686, |
|
"learning_rate": 7.126234708418214e-05, |
|
"loss": 0.5965, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.4661941112322792, |
|
"grad_norm": 0.4580319723211172, |
|
"learning_rate": 7.110894894572056e-05, |
|
"loss": 0.6106, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.4749182115594328, |
|
"grad_norm": 0.43310826738894753, |
|
"learning_rate": 7.095438413466503e-05, |
|
"loss": 0.6152, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.4836423118865867, |
|
"grad_norm": 0.42673749198290256, |
|
"learning_rate": 7.079865844762829e-05, |
|
"loss": 0.6058, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.4923664122137406, |
|
"grad_norm": 0.5356882769390818, |
|
"learning_rate": 7.064177772475912e-05, |
|
"loss": 0.6011, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.5010905125408942, |
|
"grad_norm": 0.48453744479417066, |
|
"learning_rate": 7.048374784952343e-05, |
|
"loss": 0.6014, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.5098146128680479, |
|
"grad_norm": 0.40573397034428954, |
|
"learning_rate": 7.03245747484835e-05, |
|
"loss": 0.6021, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5185387131952017, |
|
"grad_norm": 0.5052592973873418, |
|
"learning_rate": 7.016426439107586e-05, |
|
"loss": 0.5976, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5272628135223556, |
|
"grad_norm": 0.5640845358828827, |
|
"learning_rate": 7.000282278938724e-05, |
|
"loss": 0.6032, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5359869138495092, |
|
"grad_norm": 0.5716127749488464, |
|
"learning_rate": 6.984025599792926e-05, |
|
"loss": 0.6069, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.544711014176663, |
|
"grad_norm": 0.6147245538888289, |
|
"learning_rate": 6.967657011341126e-05, |
|
"loss": 0.6017, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.5534351145038168, |
|
"grad_norm": 0.5972264181543786, |
|
"learning_rate": 6.951177127451177e-05, |
|
"loss": 0.6039, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5621592148309706, |
|
"grad_norm": 0.4511378630106592, |
|
"learning_rate": 6.934586566164811e-05, |
|
"loss": 0.609, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.5708833151581243, |
|
"grad_norm": 0.3324255798317227, |
|
"learning_rate": 6.917885949674483e-05, |
|
"loss": 0.6046, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.579607415485278, |
|
"grad_norm": 0.3318537793653575, |
|
"learning_rate": 6.901075904300021e-05, |
|
"loss": 0.5984, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.5883315158124318, |
|
"grad_norm": 0.4025671963472955, |
|
"learning_rate": 6.88415706046514e-05, |
|
"loss": 0.5991, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.5970556161395857, |
|
"grad_norm": 0.4039375325984613, |
|
"learning_rate": 6.867130052673806e-05, |
|
"loss": 0.6132, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6057797164667393, |
|
"grad_norm": 0.42633324449837107, |
|
"learning_rate": 6.849995519486434e-05, |
|
"loss": 0.6112, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.614503816793893, |
|
"grad_norm": 0.4031771610559183, |
|
"learning_rate": 6.832754103495939e-05, |
|
"loss": 0.5951, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6232279171210469, |
|
"grad_norm": 0.3101345676851429, |
|
"learning_rate": 6.815406451303647e-05, |
|
"loss": 0.5977, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.6319520174482007, |
|
"grad_norm": 0.28180350855331504, |
|
"learning_rate": 6.797953213495033e-05, |
|
"loss": 0.6081, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.6406761177753544, |
|
"grad_norm": 0.3450910788047459, |
|
"learning_rate": 6.780395044615329e-05, |
|
"loss": 0.606, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.649400218102508, |
|
"grad_norm": 0.3941732240418325, |
|
"learning_rate": 6.762732603144978e-05, |
|
"loss": 0.6079, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.658124318429662, |
|
"grad_norm": 0.3241938707733741, |
|
"learning_rate": 6.744966551474936e-05, |
|
"loss": 0.604, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6668484187568158, |
|
"grad_norm": 0.27804934781744717, |
|
"learning_rate": 6.727097555881826e-05, |
|
"loss": 0.595, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.6755725190839694, |
|
"grad_norm": 0.25336159667743025, |
|
"learning_rate": 6.709126286502965e-05, |
|
"loss": 0.6025, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.684296619411123, |
|
"grad_norm": 0.29341065316587794, |
|
"learning_rate": 6.691053417311216e-05, |
|
"loss": 0.6023, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.693020719738277, |
|
"grad_norm": 0.2878985409168238, |
|
"learning_rate": 6.672879626089723e-05, |
|
"loss": 0.5949, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7017448200654308, |
|
"grad_norm": 0.26927072720134837, |
|
"learning_rate": 6.654605594406486e-05, |
|
"loss": 0.5979, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7104689203925845, |
|
"grad_norm": 0.3155329601805801, |
|
"learning_rate": 6.636232007588805e-05, |
|
"loss": 0.587, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.7191930207197381, |
|
"grad_norm": 0.34847815991034525, |
|
"learning_rate": 6.617759554697573e-05, |
|
"loss": 0.5978, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.727917121046892, |
|
"grad_norm": 0.28896074500408486, |
|
"learning_rate": 6.59918892850144e-05, |
|
"loss": 0.6069, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7366412213740459, |
|
"grad_norm": 0.2981626524269195, |
|
"learning_rate": 6.580520825450827e-05, |
|
"loss": 0.5956, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.7453653217011995, |
|
"grad_norm": 0.31680273274222387, |
|
"learning_rate": 6.561755945651813e-05, |
|
"loss": 0.5907, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7540894220283532, |
|
"grad_norm": 0.2788023167998957, |
|
"learning_rate": 6.542894992839873e-05, |
|
"loss": 0.5988, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.762813522355507, |
|
"grad_norm": 0.26041179212976306, |
|
"learning_rate": 6.52393867435349e-05, |
|
"loss": 0.5879, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.771537622682661, |
|
"grad_norm": 0.34950012098507055, |
|
"learning_rate": 6.504887701107626e-05, |
|
"loss": 0.5933, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.7802617230098146, |
|
"grad_norm": 0.5298511066656313, |
|
"learning_rate": 6.48574278756706e-05, |
|
"loss": 0.59, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.7889858233369684, |
|
"grad_norm": 0.6366920336388178, |
|
"learning_rate": 6.466504651719598e-05, |
|
"loss": 0.5967, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.7977099236641223, |
|
"grad_norm": 0.6299010554780248, |
|
"learning_rate": 6.447174015049139e-05, |
|
"loss": 0.5974, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.806434023991276, |
|
"grad_norm": 0.562858852871968, |
|
"learning_rate": 6.427751602508628e-05, |
|
"loss": 0.6057, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8151581243184296, |
|
"grad_norm": 0.4004595325182879, |
|
"learning_rate": 6.408238142492855e-05, |
|
"loss": 0.59, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.8238822246455835, |
|
"grad_norm": 0.21584901270572757, |
|
"learning_rate": 6.388634366811146e-05, |
|
"loss": 0.5943, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.8326063249727373, |
|
"grad_norm": 0.37265049074019335, |
|
"learning_rate": 6.368941010659921e-05, |
|
"loss": 0.6096, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.841330425299891, |
|
"grad_norm": 0.5267910816953789, |
|
"learning_rate": 6.349158812595116e-05, |
|
"loss": 0.6053, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.8500545256270446, |
|
"grad_norm": 0.47801721985552514, |
|
"learning_rate": 6.329288514504487e-05, |
|
"loss": 0.5936, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.8587786259541985, |
|
"grad_norm": 0.27531874942679646, |
|
"learning_rate": 6.309330861579786e-05, |
|
"loss": 0.5997, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.8675027262813524, |
|
"grad_norm": 0.3657379875213062, |
|
"learning_rate": 6.28928660228882e-05, |
|
"loss": 0.5963, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.876226826608506, |
|
"grad_norm": 0.3633999933893769, |
|
"learning_rate": 6.269156488347372e-05, |
|
"loss": 0.5899, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.8849509269356597, |
|
"grad_norm": 0.2855859194806056, |
|
"learning_rate": 6.248941274691017e-05, |
|
"loss": 0.5963, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.8936750272628136, |
|
"grad_norm": 0.26871535087996673, |
|
"learning_rate": 6.228641719446808e-05, |
|
"loss": 0.5832, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9023991275899674, |
|
"grad_norm": 0.26413333351828056, |
|
"learning_rate": 6.208258583904841e-05, |
|
"loss": 0.5913, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.911123227917121, |
|
"grad_norm": 0.27861019506862306, |
|
"learning_rate": 6.18779263248971e-05, |
|
"loss": 0.6088, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.9198473282442747, |
|
"grad_norm": 0.26490994643954596, |
|
"learning_rate": 6.16724463273183e-05, |
|
"loss": 0.6072, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9285714285714286, |
|
"grad_norm": 0.2464264085957813, |
|
"learning_rate": 6.146615355238668e-05, |
|
"loss": 0.5998, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.9372955288985825, |
|
"grad_norm": 0.2060880785957384, |
|
"learning_rate": 6.125905573665824e-05, |
|
"loss": 0.5998, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.9460196292257361, |
|
"grad_norm": 0.26971063557098873, |
|
"learning_rate": 6.105116064688033e-05, |
|
"loss": 0.6048, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.9547437295528898, |
|
"grad_norm": 0.32136759035233387, |
|
"learning_rate": 6.0842476079700264e-05, |
|
"loss": 0.6019, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.9634678298800436, |
|
"grad_norm": 0.23286070304886092, |
|
"learning_rate": 6.063300986137297e-05, |
|
"loss": 0.5978, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.9721919302071975, |
|
"grad_norm": 0.18824642933626035, |
|
"learning_rate": 6.04227698474675e-05, |
|
"loss": 0.5853, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.9809160305343512, |
|
"grad_norm": 0.21674772391599217, |
|
"learning_rate": 6.02117639225724e-05, |
|
"loss": 0.6078, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.9896401308615048, |
|
"grad_norm": 0.2934361150620414, |
|
"learning_rate": 6.000000000000001e-05, |
|
"loss": 0.5984, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.9983642311886587, |
|
"grad_norm": 0.44422300505199064, |
|
"learning_rate": 5.9787486021489705e-05, |
|
"loss": 0.8056, |
|
"step": 229 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 570, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.092582874781843e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|