|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.994547437295529, |
|
"eval_steps": 500, |
|
"global_step": 456, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008724100327153763, |
|
"grad_norm": 6.035512972360855, |
|
"learning_rate": 1.4035087719298246e-06, |
|
"loss": 1.1029, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017448200654307525, |
|
"grad_norm": 6.042841036030158, |
|
"learning_rate": 2.8070175438596493e-06, |
|
"loss": 1.1024, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.026172300981461286, |
|
"grad_norm": 5.910306635602092, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 1.0986, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03489640130861505, |
|
"grad_norm": 4.536951510086868, |
|
"learning_rate": 5.6140350877192985e-06, |
|
"loss": 1.0538, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04362050163576881, |
|
"grad_norm": 2.3773466328483686, |
|
"learning_rate": 7.017543859649123e-06, |
|
"loss": 0.9941, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05234460196292257, |
|
"grad_norm": 1.999261332126332, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 0.999, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.061068702290076333, |
|
"grad_norm": 4.369629342726399, |
|
"learning_rate": 9.824561403508772e-06, |
|
"loss": 0.9889, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0697928026172301, |
|
"grad_norm": 4.899527942424933, |
|
"learning_rate": 1.1228070175438597e-05, |
|
"loss": 0.9631, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07851690294438386, |
|
"grad_norm": 5.748165018609449, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 0.9524, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08724100327153762, |
|
"grad_norm": 4.763126150981281, |
|
"learning_rate": 1.4035087719298246e-05, |
|
"loss": 0.952, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09596510359869138, |
|
"grad_norm": 3.167454561583767, |
|
"learning_rate": 1.543859649122807e-05, |
|
"loss": 0.9095, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10468920392584515, |
|
"grad_norm": 3.3676041736516433, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 0.89, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1134133042529989, |
|
"grad_norm": 2.8110117483164676, |
|
"learning_rate": 1.824561403508772e-05, |
|
"loss": 0.8538, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12213740458015267, |
|
"grad_norm": 2.0519859086879286, |
|
"learning_rate": 1.9649122807017544e-05, |
|
"loss": 0.85, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13086150490730644, |
|
"grad_norm": 1.8373157533967603, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 0.8349, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1395856052344602, |
|
"grad_norm": 1.6156997409278058, |
|
"learning_rate": 2.2456140350877194e-05, |
|
"loss": 0.829, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14830970556161396, |
|
"grad_norm": 1.2400567097625959, |
|
"learning_rate": 2.385964912280702e-05, |
|
"loss": 0.8108, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.15703380588876772, |
|
"grad_norm": 1.1069155525681142, |
|
"learning_rate": 2.526315789473684e-05, |
|
"loss": 0.8029, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16575790621592149, |
|
"grad_norm": 1.0769626887248518, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.7858, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17448200654307525, |
|
"grad_norm": 0.7016586049675165, |
|
"learning_rate": 2.8070175438596492e-05, |
|
"loss": 0.7816, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.183206106870229, |
|
"grad_norm": 0.8460028260574683, |
|
"learning_rate": 2.9473684210526317e-05, |
|
"loss": 0.7811, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19193020719738277, |
|
"grad_norm": 0.6598083455923888, |
|
"learning_rate": 3.087719298245614e-05, |
|
"loss": 0.7643, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.20065430752453653, |
|
"grad_norm": 0.7215762522174539, |
|
"learning_rate": 3.228070175438597e-05, |
|
"loss": 0.7617, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.2093784078516903, |
|
"grad_norm": 0.5554406103693584, |
|
"learning_rate": 3.368421052631579e-05, |
|
"loss": 0.7576, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21810250817884405, |
|
"grad_norm": 0.6258543895787715, |
|
"learning_rate": 3.508771929824562e-05, |
|
"loss": 0.7629, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2268266085059978, |
|
"grad_norm": 0.7676778822568288, |
|
"learning_rate": 3.649122807017544e-05, |
|
"loss": 0.7521, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23555070883315157, |
|
"grad_norm": 1.0032530689824866, |
|
"learning_rate": 3.789473684210526e-05, |
|
"loss": 0.7384, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24427480916030533, |
|
"grad_norm": 1.2535316239757839, |
|
"learning_rate": 3.929824561403509e-05, |
|
"loss": 0.7474, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2529989094874591, |
|
"grad_norm": 0.9953077887554728, |
|
"learning_rate": 4.070175438596492e-05, |
|
"loss": 0.7412, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2617230098146129, |
|
"grad_norm": 1.3600760898966002, |
|
"learning_rate": 4.210526315789474e-05, |
|
"loss": 0.7257, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27044711014176664, |
|
"grad_norm": 0.6326945767090465, |
|
"learning_rate": 4.350877192982457e-05, |
|
"loss": 0.7247, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2791712104689204, |
|
"grad_norm": 1.0488194881380388, |
|
"learning_rate": 4.491228070175439e-05, |
|
"loss": 0.7247, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.28789531079607417, |
|
"grad_norm": 1.1854260719924898, |
|
"learning_rate": 4.6315789473684214e-05, |
|
"loss": 0.7265, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.2966194111232279, |
|
"grad_norm": 1.231800504978088, |
|
"learning_rate": 4.771929824561404e-05, |
|
"loss": 0.724, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3053435114503817, |
|
"grad_norm": 1.1471910683800164, |
|
"learning_rate": 4.9122807017543864e-05, |
|
"loss": 0.7245, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.31406761177753545, |
|
"grad_norm": 1.2650375204215663, |
|
"learning_rate": 5.052631578947368e-05, |
|
"loss": 0.7173, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3227917121046892, |
|
"grad_norm": 1.581708517443491, |
|
"learning_rate": 5.1929824561403515e-05, |
|
"loss": 0.7146, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.33151581243184297, |
|
"grad_norm": 0.6899231907687099, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.7039, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.34023991275899673, |
|
"grad_norm": 1.9659650918585876, |
|
"learning_rate": 5.4736842105263165e-05, |
|
"loss": 0.7129, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3489640130861505, |
|
"grad_norm": 0.9602545263207618, |
|
"learning_rate": 5.6140350877192984e-05, |
|
"loss": 0.7229, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.35768811341330425, |
|
"grad_norm": 2.019396688493347, |
|
"learning_rate": 5.7543859649122816e-05, |
|
"loss": 0.7173, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.366412213740458, |
|
"grad_norm": 1.3667216071947406, |
|
"learning_rate": 5.8947368421052634e-05, |
|
"loss": 0.7088, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3751363140676118, |
|
"grad_norm": 1.6739220492126983, |
|
"learning_rate": 6.035087719298246e-05, |
|
"loss": 0.7126, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.38386041439476554, |
|
"grad_norm": 1.406631076781101, |
|
"learning_rate": 6.175438596491228e-05, |
|
"loss": 0.7211, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.3925845147219193, |
|
"grad_norm": 1.232815581243198, |
|
"learning_rate": 6.315789473684212e-05, |
|
"loss": 0.7114, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.40130861504907306, |
|
"grad_norm": 1.3716611381630202, |
|
"learning_rate": 6.456140350877194e-05, |
|
"loss": 0.7174, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4100327153762268, |
|
"grad_norm": 1.1415702503019296, |
|
"learning_rate": 6.596491228070175e-05, |
|
"loss": 0.7005, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4187568157033806, |
|
"grad_norm": 1.2074860027919303, |
|
"learning_rate": 6.736842105263159e-05, |
|
"loss": 0.7098, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.42748091603053434, |
|
"grad_norm": 1.6609425790213797, |
|
"learning_rate": 6.87719298245614e-05, |
|
"loss": 0.7098, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4362050163576881, |
|
"grad_norm": 0.9769451874458346, |
|
"learning_rate": 7.017543859649124e-05, |
|
"loss": 0.7079, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.44492911668484186, |
|
"grad_norm": 1.7666355593904801, |
|
"learning_rate": 7.157894736842105e-05, |
|
"loss": 0.6967, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4536532170119956, |
|
"grad_norm": 1.014343915032852, |
|
"learning_rate": 7.298245614035087e-05, |
|
"loss": 0.6961, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4623773173391494, |
|
"grad_norm": 1.292912081295997, |
|
"learning_rate": 7.43859649122807e-05, |
|
"loss": 0.6888, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.47110141766630315, |
|
"grad_norm": 1.33549845587438, |
|
"learning_rate": 7.578947368421052e-05, |
|
"loss": 0.6989, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4798255179934569, |
|
"grad_norm": 1.6285853466115705, |
|
"learning_rate": 7.719298245614036e-05, |
|
"loss": 0.7016, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.48854961832061067, |
|
"grad_norm": 0.9433375871219883, |
|
"learning_rate": 7.859649122807017e-05, |
|
"loss": 0.6824, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.49727371864776443, |
|
"grad_norm": 1.2489894514867836, |
|
"learning_rate": 8e-05, |
|
"loss": 0.6898, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5059978189749182, |
|
"grad_norm": 1.60283780700127, |
|
"learning_rate": 7.99992499440621e-05, |
|
"loss": 0.7019, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.514721919302072, |
|
"grad_norm": 1.349411337094158, |
|
"learning_rate": 7.999699980437755e-05, |
|
"loss": 0.692, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5234460196292258, |
|
"grad_norm": 1.4027794489135887, |
|
"learning_rate": 7.999324966533291e-05, |
|
"loss": 0.6739, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5321701199563795, |
|
"grad_norm": 0.8999831714962346, |
|
"learning_rate": 7.998799966756889e-05, |
|
"loss": 0.6925, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5408942202835333, |
|
"grad_norm": 1.2349556182219792, |
|
"learning_rate": 7.998125000797506e-05, |
|
"loss": 0.6811, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.549618320610687, |
|
"grad_norm": 0.9953482513645127, |
|
"learning_rate": 7.997300093968255e-05, |
|
"loss": 0.6919, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5583424209378408, |
|
"grad_norm": 1.7631278928848257, |
|
"learning_rate": 7.99632527720545e-05, |
|
"loss": 0.7076, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5670665212649946, |
|
"grad_norm": 0.9802682486455654, |
|
"learning_rate": 7.995200587067445e-05, |
|
"loss": 0.6836, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5757906215921483, |
|
"grad_norm": 1.4018686830735987, |
|
"learning_rate": 7.993926065733265e-05, |
|
"loss": 0.6998, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5845147219193021, |
|
"grad_norm": 1.0081781512305683, |
|
"learning_rate": 7.992501761001027e-05, |
|
"loss": 0.6878, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5932388222464559, |
|
"grad_norm": 1.2813118135682324, |
|
"learning_rate": 7.99092772628614e-05, |
|
"loss": 0.6948, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6019629225736096, |
|
"grad_norm": 0.9281112795468938, |
|
"learning_rate": 7.98920402061931e-05, |
|
"loss": 0.6969, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6106870229007634, |
|
"grad_norm": 1.2375443626494387, |
|
"learning_rate": 7.987330708644319e-05, |
|
"loss": 0.6793, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6194111232279171, |
|
"grad_norm": 1.0863449033166035, |
|
"learning_rate": 7.985307860615607e-05, |
|
"loss": 0.6848, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6281352235550709, |
|
"grad_norm": 0.942362107324505, |
|
"learning_rate": 7.98313555239563e-05, |
|
"loss": 0.6862, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6368593238822247, |
|
"grad_norm": 1.3027161650276327, |
|
"learning_rate": 7.980813865452026e-05, |
|
"loss": 0.6735, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6455834242093784, |
|
"grad_norm": 0.7977961043410647, |
|
"learning_rate": 7.978342886854546e-05, |
|
"loss": 0.6787, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6543075245365322, |
|
"grad_norm": 1.3693856212692685, |
|
"learning_rate": 7.975722709271799e-05, |
|
"loss": 0.6751, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6630316248636859, |
|
"grad_norm": 0.7738139399748106, |
|
"learning_rate": 7.972953430967773e-05, |
|
"loss": 0.6726, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6717557251908397, |
|
"grad_norm": 0.6453937973753266, |
|
"learning_rate": 7.97003515579815e-05, |
|
"loss": 0.6756, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6804798255179935, |
|
"grad_norm": 0.7085908664971486, |
|
"learning_rate": 7.96696799320641e-05, |
|
"loss": 0.6774, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6892039258451472, |
|
"grad_norm": 0.7420105586344222, |
|
"learning_rate": 7.96375205821973e-05, |
|
"loss": 0.6647, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.697928026172301, |
|
"grad_norm": 1.182903610639294, |
|
"learning_rate": 7.960387471444666e-05, |
|
"loss": 0.6758, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7066521264994547, |
|
"grad_norm": 1.305515943296027, |
|
"learning_rate": 7.956874359062632e-05, |
|
"loss": 0.6808, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7153762268266085, |
|
"grad_norm": 0.9451990102208602, |
|
"learning_rate": 7.95321285282517e-05, |
|
"loss": 0.6697, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7241003271537623, |
|
"grad_norm": 0.9432391308082473, |
|
"learning_rate": 7.949403090049002e-05, |
|
"loss": 0.6723, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.732824427480916, |
|
"grad_norm": 1.1371193647756934, |
|
"learning_rate": 7.94544521361089e-05, |
|
"loss": 0.6773, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7415485278080698, |
|
"grad_norm": 1.1858932832021734, |
|
"learning_rate": 7.941339371942269e-05, |
|
"loss": 0.6657, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7502726281352236, |
|
"grad_norm": 1.165847606968587, |
|
"learning_rate": 7.937085719023685e-05, |
|
"loss": 0.6743, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7589967284623773, |
|
"grad_norm": 0.5770889497138026, |
|
"learning_rate": 7.932684414379021e-05, |
|
"loss": 0.668, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7677208287895311, |
|
"grad_norm": 0.925349798073699, |
|
"learning_rate": 7.928135623069509e-05, |
|
"loss": 0.6693, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7764449291166848, |
|
"grad_norm": 1.2265564072341149, |
|
"learning_rate": 7.923439515687546e-05, |
|
"loss": 0.6721, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7851690294438386, |
|
"grad_norm": 0.76774338811776, |
|
"learning_rate": 7.918596268350296e-05, |
|
"loss": 0.664, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.7938931297709924, |
|
"grad_norm": 0.811830002272808, |
|
"learning_rate": 7.913606062693077e-05, |
|
"loss": 0.6674, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8026172300981461, |
|
"grad_norm": 0.7896885933612465, |
|
"learning_rate": 7.90846908586256e-05, |
|
"loss": 0.6603, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8113413304252999, |
|
"grad_norm": 0.6860009351157326, |
|
"learning_rate": 7.903185530509743e-05, |
|
"loss": 0.6541, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8200654307524536, |
|
"grad_norm": 0.7887179980130798, |
|
"learning_rate": 7.89775559478273e-05, |
|
"loss": 0.6719, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8287895310796074, |
|
"grad_norm": 0.6863156036434349, |
|
"learning_rate": 7.892179482319297e-05, |
|
"loss": 0.6712, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8375136314067612, |
|
"grad_norm": 0.612318445743932, |
|
"learning_rate": 7.886457402239256e-05, |
|
"loss": 0.6588, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8462377317339149, |
|
"grad_norm": 0.5266659568320967, |
|
"learning_rate": 7.880589569136616e-05, |
|
"loss": 0.6477, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8549618320610687, |
|
"grad_norm": 0.6815563256417184, |
|
"learning_rate": 7.874576203071531e-05, |
|
"loss": 0.6647, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8636859323882224, |
|
"grad_norm": 0.7021771362356389, |
|
"learning_rate": 7.868417529562043e-05, |
|
"loss": 0.6519, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8724100327153762, |
|
"grad_norm": 0.9537663707386643, |
|
"learning_rate": 7.862113779575638e-05, |
|
"loss": 0.6655, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.88113413304253, |
|
"grad_norm": 1.7262458672727332, |
|
"learning_rate": 7.85566518952057e-05, |
|
"loss": 0.6617, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.8898582333696837, |
|
"grad_norm": 0.5924197078630852, |
|
"learning_rate": 7.849072001237001e-05, |
|
"loss": 0.6586, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8985823336968375, |
|
"grad_norm": 1.784429399577841, |
|
"learning_rate": 7.842334461987936e-05, |
|
"loss": 0.6718, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9073064340239912, |
|
"grad_norm": 0.8548005705828143, |
|
"learning_rate": 7.835452824449935e-05, |
|
"loss": 0.6589, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.916030534351145, |
|
"grad_norm": 1.5914313223828267, |
|
"learning_rate": 7.828427346703657e-05, |
|
"loss": 0.6639, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9247546346782988, |
|
"grad_norm": 1.1912921777513241, |
|
"learning_rate": 7.821258292224166e-05, |
|
"loss": 0.669, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9334787350054525, |
|
"grad_norm": 1.1951669530416693, |
|
"learning_rate": 7.813945929871056e-05, |
|
"loss": 0.6497, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9422028353326063, |
|
"grad_norm": 1.037062568022807, |
|
"learning_rate": 7.806490533878368e-05, |
|
"loss": 0.6632, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.95092693565976, |
|
"grad_norm": 1.2483557427639183, |
|
"learning_rate": 7.798892383844303e-05, |
|
"loss": 0.658, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9596510359869138, |
|
"grad_norm": 0.8133626652288988, |
|
"learning_rate": 7.791151764720737e-05, |
|
"loss": 0.6571, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9683751363140676, |
|
"grad_norm": 1.0059615445006975, |
|
"learning_rate": 7.783268966802539e-05, |
|
"loss": 0.6595, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9770992366412213, |
|
"grad_norm": 0.7304172503042858, |
|
"learning_rate": 7.775244285716679e-05, |
|
"loss": 0.6608, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.9858233369683751, |
|
"grad_norm": 0.7284490288054312, |
|
"learning_rate": 7.767078022411139e-05, |
|
"loss": 0.6492, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.9945474372955289, |
|
"grad_norm": 0.5901533647047473, |
|
"learning_rate": 7.758770483143634e-05, |
|
"loss": 0.6642, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0038167938931297, |
|
"grad_norm": 0.8414397249541458, |
|
"learning_rate": 7.750321979470123e-05, |
|
"loss": 0.9152, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0125408942202836, |
|
"grad_norm": 1.0739193248054102, |
|
"learning_rate": 7.741732828233124e-05, |
|
"loss": 0.626, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0212649945474372, |
|
"grad_norm": 1.1988463490729344, |
|
"learning_rate": 7.733003351549829e-05, |
|
"loss": 0.6393, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0299890948745911, |
|
"grad_norm": 0.9606655160302263, |
|
"learning_rate": 7.724133876800031e-05, |
|
"loss": 0.625, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0387131952017448, |
|
"grad_norm": 1.2372012961246242, |
|
"learning_rate": 7.715124736613839e-05, |
|
"loss": 0.6278, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0474372955288986, |
|
"grad_norm": 0.86001815183547, |
|
"learning_rate": 7.705976268859207e-05, |
|
"loss": 0.6187, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0561613958560523, |
|
"grad_norm": 1.1413833226380308, |
|
"learning_rate": 7.696688816629266e-05, |
|
"loss": 0.63, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0648854961832062, |
|
"grad_norm": 1.2327702775009692, |
|
"learning_rate": 7.687262728229447e-05, |
|
"loss": 0.6205, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0736095965103598, |
|
"grad_norm": 0.7767212251506833, |
|
"learning_rate": 7.677698357164431e-05, |
|
"loss": 0.6177, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.0823336968375137, |
|
"grad_norm": 1.7079661807939999, |
|
"learning_rate": 7.667996062124884e-05, |
|
"loss": 0.6242, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.0910577971646673, |
|
"grad_norm": 1.0068734700150146, |
|
"learning_rate": 7.658156206974005e-05, |
|
"loss": 0.6183, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.0997818974918212, |
|
"grad_norm": 1.8753922872596245, |
|
"learning_rate": 7.648179160733883e-05, |
|
"loss": 0.6337, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1085059978189749, |
|
"grad_norm": 1.8578444866986128, |
|
"learning_rate": 7.638065297571657e-05, |
|
"loss": 0.6171, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1172300981461287, |
|
"grad_norm": 0.6478551153484415, |
|
"learning_rate": 7.627814996785484e-05, |
|
"loss": 0.6158, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1259541984732824, |
|
"grad_norm": 1.4820468569461809, |
|
"learning_rate": 7.61742864279031e-05, |
|
"loss": 0.6197, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1346782988004362, |
|
"grad_norm": 0.7243487925020168, |
|
"learning_rate": 7.606906625103464e-05, |
|
"loss": 0.625, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.14340239912759, |
|
"grad_norm": 0.9539104928282168, |
|
"learning_rate": 7.596249338330034e-05, |
|
"loss": 0.6194, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1521264994547438, |
|
"grad_norm": 0.8136844990028549, |
|
"learning_rate": 7.585457182148081e-05, |
|
"loss": 0.6172, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1608505997818974, |
|
"grad_norm": 0.7125736106844306, |
|
"learning_rate": 7.57453056129365e-05, |
|
"loss": 0.6151, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1695747001090513, |
|
"grad_norm": 0.5804990189493681, |
|
"learning_rate": 7.56346988554558e-05, |
|
"loss": 0.6256, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.178298800436205, |
|
"grad_norm": 0.6362204084540434, |
|
"learning_rate": 7.552275569710152e-05, |
|
"loss": 0.6184, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.1870229007633588, |
|
"grad_norm": 0.43420077191532125, |
|
"learning_rate": 7.540948033605513e-05, |
|
"loss": 0.6105, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.1957470010905125, |
|
"grad_norm": 0.46232316550703767, |
|
"learning_rate": 7.529487702045953e-05, |
|
"loss": 0.6056, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2044711014176663, |
|
"grad_norm": 0.4316498267915253, |
|
"learning_rate": 7.517895004825956e-05, |
|
"loss": 0.6011, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.21319520174482, |
|
"grad_norm": 0.38696383075278273, |
|
"learning_rate": 7.506170376704095e-05, |
|
"loss": 0.6141, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2219193020719739, |
|
"grad_norm": 0.3858939834475368, |
|
"learning_rate": 7.494314257386715e-05, |
|
"loss": 0.6065, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2306434023991275, |
|
"grad_norm": 0.3738279653817542, |
|
"learning_rate": 7.48232709151145e-05, |
|
"loss": 0.6104, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2393675027262814, |
|
"grad_norm": 0.34434724315715803, |
|
"learning_rate": 7.470209328630548e-05, |
|
"loss": 0.605, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.248091603053435, |
|
"grad_norm": 0.3291602472493041, |
|
"learning_rate": 7.457961423194011e-05, |
|
"loss": 0.6018, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.256815703380589, |
|
"grad_norm": 0.27837095142325796, |
|
"learning_rate": 7.445583834532546e-05, |
|
"loss": 0.5993, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2655398037077425, |
|
"grad_norm": 0.31225187743500077, |
|
"learning_rate": 7.433077026840346e-05, |
|
"loss": 0.5984, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2742639040348964, |
|
"grad_norm": 0.27063932853508044, |
|
"learning_rate": 7.420441469157684e-05, |
|
"loss": 0.6048, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.2829880043620503, |
|
"grad_norm": 0.28328810560800655, |
|
"learning_rate": 7.407677635353308e-05, |
|
"loss": 0.6061, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.291712104689204, |
|
"grad_norm": 0.3335236260280515, |
|
"learning_rate": 7.39478600410669e-05, |
|
"loss": 0.6055, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3004362050163576, |
|
"grad_norm": 0.34277641090811606, |
|
"learning_rate": 7.381767058890056e-05, |
|
"loss": 0.5956, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3091603053435115, |
|
"grad_norm": 0.3944860323036086, |
|
"learning_rate": 7.368621287950264e-05, |
|
"loss": 0.5971, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3178844056706653, |
|
"grad_norm": 0.390412169412763, |
|
"learning_rate": 7.355349184290491e-05, |
|
"loss": 0.5991, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.326608505997819, |
|
"grad_norm": 0.44167362977698205, |
|
"learning_rate": 7.341951245651747e-05, |
|
"loss": 0.5905, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3353326063249726, |
|
"grad_norm": 0.4873733423530836, |
|
"learning_rate": 7.328427974494201e-05, |
|
"loss": 0.6061, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3440567066521265, |
|
"grad_norm": 0.529005701508295, |
|
"learning_rate": 7.314779877978346e-05, |
|
"loss": 0.6038, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3527808069792804, |
|
"grad_norm": 0.5776151078666283, |
|
"learning_rate": 7.301007467945974e-05, |
|
"loss": 0.6063, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.361504907306434, |
|
"grad_norm": 0.6712943884189805, |
|
"learning_rate": 7.28711126090098e-05, |
|
"loss": 0.609, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3702290076335877, |
|
"grad_norm": 0.6974077084236574, |
|
"learning_rate": 7.273091777989997e-05, |
|
"loss": 0.617, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.3789531079607416, |
|
"grad_norm": 0.622800075821039, |
|
"learning_rate": 7.258949544982843e-05, |
|
"loss": 0.6012, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.3876772082878954, |
|
"grad_norm": 0.6652165160394861, |
|
"learning_rate": 7.24468509225281e-05, |
|
"loss": 0.6213, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.396401308615049, |
|
"grad_norm": 0.7796997637986155, |
|
"learning_rate": 7.230298954756772e-05, |
|
"loss": 0.6116, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4051254089422027, |
|
"grad_norm": 0.8050974173338423, |
|
"learning_rate": 7.215791672015121e-05, |
|
"loss": 0.6102, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.4138495092693566, |
|
"grad_norm": 0.6245666904233175, |
|
"learning_rate": 7.201163788091536e-05, |
|
"loss": 0.6051, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4225736095965105, |
|
"grad_norm": 0.2791815826660419, |
|
"learning_rate": 7.186415851572579e-05, |
|
"loss": 0.5978, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4312977099236641, |
|
"grad_norm": 0.5658363527146323, |
|
"learning_rate": 7.171548415547114e-05, |
|
"loss": 0.6064, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4400218102508178, |
|
"grad_norm": 0.6010202995307374, |
|
"learning_rate": 7.156562037585576e-05, |
|
"loss": 0.6117, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.4487459105779716, |
|
"grad_norm": 0.45545843396243296, |
|
"learning_rate": 7.141457279719053e-05, |
|
"loss": 0.6079, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4574700109051255, |
|
"grad_norm": 0.37879238626475686, |
|
"learning_rate": 7.126234708418214e-05, |
|
"loss": 0.5965, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.4661941112322792, |
|
"grad_norm": 0.4580319723211172, |
|
"learning_rate": 7.110894894572056e-05, |
|
"loss": 0.6106, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.4749182115594328, |
|
"grad_norm": 0.43310826738894753, |
|
"learning_rate": 7.095438413466503e-05, |
|
"loss": 0.6152, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.4836423118865867, |
|
"grad_norm": 0.42673749198290256, |
|
"learning_rate": 7.079865844762829e-05, |
|
"loss": 0.6058, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.4923664122137406, |
|
"grad_norm": 0.5356882769390818, |
|
"learning_rate": 7.064177772475912e-05, |
|
"loss": 0.6011, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.5010905125408942, |
|
"grad_norm": 0.48453744479417066, |
|
"learning_rate": 7.048374784952343e-05, |
|
"loss": 0.6014, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.5098146128680479, |
|
"grad_norm": 0.40573397034428954, |
|
"learning_rate": 7.03245747484835e-05, |
|
"loss": 0.6021, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5185387131952017, |
|
"grad_norm": 0.5052592973873418, |
|
"learning_rate": 7.016426439107586e-05, |
|
"loss": 0.5976, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5272628135223556, |
|
"grad_norm": 0.5640845358828827, |
|
"learning_rate": 7.000282278938724e-05, |
|
"loss": 0.6032, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5359869138495092, |
|
"grad_norm": 0.5716127749488464, |
|
"learning_rate": 6.984025599792926e-05, |
|
"loss": 0.6069, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.544711014176663, |
|
"grad_norm": 0.6147245538888289, |
|
"learning_rate": 6.967657011341126e-05, |
|
"loss": 0.6017, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.5534351145038168, |
|
"grad_norm": 0.5972264181543786, |
|
"learning_rate": 6.951177127451177e-05, |
|
"loss": 0.6039, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5621592148309706, |
|
"grad_norm": 0.4511378630106592, |
|
"learning_rate": 6.934586566164811e-05, |
|
"loss": 0.609, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.5708833151581243, |
|
"grad_norm": 0.3324255798317227, |
|
"learning_rate": 6.917885949674483e-05, |
|
"loss": 0.6046, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.579607415485278, |
|
"grad_norm": 0.3318537793653575, |
|
"learning_rate": 6.901075904300021e-05, |
|
"loss": 0.5984, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.5883315158124318, |
|
"grad_norm": 0.4025671963472955, |
|
"learning_rate": 6.88415706046514e-05, |
|
"loss": 0.5991, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.5970556161395857, |
|
"grad_norm": 0.4039375325984613, |
|
"learning_rate": 6.867130052673806e-05, |
|
"loss": 0.6132, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6057797164667393, |
|
"grad_norm": 0.42633324449837107, |
|
"learning_rate": 6.849995519486434e-05, |
|
"loss": 0.6112, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.614503816793893, |
|
"grad_norm": 0.4031771610559183, |
|
"learning_rate": 6.832754103495939e-05, |
|
"loss": 0.5951, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6232279171210469, |
|
"grad_norm": 0.3101345676851429, |
|
"learning_rate": 6.815406451303647e-05, |
|
"loss": 0.5977, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.6319520174482007, |
|
"grad_norm": 0.28180350855331504, |
|
"learning_rate": 6.797953213495033e-05, |
|
"loss": 0.6081, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.6406761177753544, |
|
"grad_norm": 0.3450910788047459, |
|
"learning_rate": 6.780395044615329e-05, |
|
"loss": 0.606, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.649400218102508, |
|
"grad_norm": 0.3941732240418325, |
|
"learning_rate": 6.762732603144978e-05, |
|
"loss": 0.6079, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.658124318429662, |
|
"grad_norm": 0.3241938707733741, |
|
"learning_rate": 6.744966551474936e-05, |
|
"loss": 0.604, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6668484187568158, |
|
"grad_norm": 0.27804934781744717, |
|
"learning_rate": 6.727097555881826e-05, |
|
"loss": 0.595, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.6755725190839694, |
|
"grad_norm": 0.25336159667743025, |
|
"learning_rate": 6.709126286502965e-05, |
|
"loss": 0.6025, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.684296619411123, |
|
"grad_norm": 0.29341065316587794, |
|
"learning_rate": 6.691053417311216e-05, |
|
"loss": 0.6023, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.693020719738277, |
|
"grad_norm": 0.2878985409168238, |
|
"learning_rate": 6.672879626089723e-05, |
|
"loss": 0.5949, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7017448200654308, |
|
"grad_norm": 0.26927072720134837, |
|
"learning_rate": 6.654605594406486e-05, |
|
"loss": 0.5979, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7104689203925845, |
|
"grad_norm": 0.3155329601805801, |
|
"learning_rate": 6.636232007588805e-05, |
|
"loss": 0.587, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.7191930207197381, |
|
"grad_norm": 0.34847815991034525, |
|
"learning_rate": 6.617759554697573e-05, |
|
"loss": 0.5978, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.727917121046892, |
|
"grad_norm": 0.28896074500408486, |
|
"learning_rate": 6.59918892850144e-05, |
|
"loss": 0.6069, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7366412213740459, |
|
"grad_norm": 0.2981626524269195, |
|
"learning_rate": 6.580520825450827e-05, |
|
"loss": 0.5956, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.7453653217011995, |
|
"grad_norm": 0.31680273274222387, |
|
"learning_rate": 6.561755945651813e-05, |
|
"loss": 0.5907, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7540894220283532, |
|
"grad_norm": 0.2788023167998957, |
|
"learning_rate": 6.542894992839873e-05, |
|
"loss": 0.5988, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.762813522355507, |
|
"grad_norm": 0.26041179212976306, |
|
"learning_rate": 6.52393867435349e-05, |
|
"loss": 0.5879, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.771537622682661, |
|
"grad_norm": 0.34950012098507055, |
|
"learning_rate": 6.504887701107626e-05, |
|
"loss": 0.5933, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.7802617230098146, |
|
"grad_norm": 0.5298511066656313, |
|
"learning_rate": 6.48574278756706e-05, |
|
"loss": 0.59, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.7889858233369684, |
|
"grad_norm": 0.6366920336388178, |
|
"learning_rate": 6.466504651719598e-05, |
|
"loss": 0.5967, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.7977099236641223, |
|
"grad_norm": 0.6299010554780248, |
|
"learning_rate": 6.447174015049139e-05, |
|
"loss": 0.5974, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.806434023991276, |
|
"grad_norm": 0.562858852871968, |
|
"learning_rate": 6.427751602508628e-05, |
|
"loss": 0.6057, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8151581243184296, |
|
"grad_norm": 0.4004595325182879, |
|
"learning_rate": 6.408238142492855e-05, |
|
"loss": 0.59, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.8238822246455835, |
|
"grad_norm": 0.21584901270572757, |
|
"learning_rate": 6.388634366811146e-05, |
|
"loss": 0.5943, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.8326063249727373, |
|
"grad_norm": 0.37265049074019335, |
|
"learning_rate": 6.368941010659921e-05, |
|
"loss": 0.6096, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.841330425299891, |
|
"grad_norm": 0.5267910816953789, |
|
"learning_rate": 6.349158812595116e-05, |
|
"loss": 0.6053, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.8500545256270446, |
|
"grad_norm": 0.47801721985552514, |
|
"learning_rate": 6.329288514504487e-05, |
|
"loss": 0.5936, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.8587786259541985, |
|
"grad_norm": 0.27531874942679646, |
|
"learning_rate": 6.309330861579786e-05, |
|
"loss": 0.5997, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.8675027262813524, |
|
"grad_norm": 0.3657379875213062, |
|
"learning_rate": 6.28928660228882e-05, |
|
"loss": 0.5963, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.876226826608506, |
|
"grad_norm": 0.3633999933893769, |
|
"learning_rate": 6.269156488347372e-05, |
|
"loss": 0.5899, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.8849509269356597, |
|
"grad_norm": 0.2855859194806056, |
|
"learning_rate": 6.248941274691017e-05, |
|
"loss": 0.5963, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.8936750272628136, |
|
"grad_norm": 0.26871535087996673, |
|
"learning_rate": 6.228641719446808e-05, |
|
"loss": 0.5832, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9023991275899674, |
|
"grad_norm": 0.26413333351828056, |
|
"learning_rate": 6.208258583904841e-05, |
|
"loss": 0.5913, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.911123227917121, |
|
"grad_norm": 0.27861019506862306, |
|
"learning_rate": 6.18779263248971e-05, |
|
"loss": 0.6088, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.9198473282442747, |
|
"grad_norm": 0.26490994643954596, |
|
"learning_rate": 6.16724463273183e-05, |
|
"loss": 0.6072, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9285714285714286, |
|
"grad_norm": 0.2464264085957813, |
|
"learning_rate": 6.146615355238668e-05, |
|
"loss": 0.5998, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.9372955288985825, |
|
"grad_norm": 0.2060880785957384, |
|
"learning_rate": 6.125905573665824e-05, |
|
"loss": 0.5998, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.9460196292257361, |
|
"grad_norm": 0.26971063557098873, |
|
"learning_rate": 6.105116064688033e-05, |
|
"loss": 0.6048, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.9547437295528898, |
|
"grad_norm": 0.32136759035233387, |
|
"learning_rate": 6.0842476079700264e-05, |
|
"loss": 0.6019, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.9634678298800436, |
|
"grad_norm": 0.23286070304886092, |
|
"learning_rate": 6.063300986137297e-05, |
|
"loss": 0.5978, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.9721919302071975, |
|
"grad_norm": 0.18824642933626035, |
|
"learning_rate": 6.04227698474675e-05, |
|
"loss": 0.5853, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.9809160305343512, |
|
"grad_norm": 0.21674772391599217, |
|
"learning_rate": 6.02117639225724e-05, |
|
"loss": 0.6078, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.9896401308615048, |
|
"grad_norm": 0.2934361150620414, |
|
"learning_rate": 6.000000000000001e-05, |
|
"loss": 0.5984, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.9983642311886587, |
|
"grad_norm": 0.44422300505199064, |
|
"learning_rate": 5.9787486021489705e-05, |
|
"loss": 0.8056, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.0076335877862594, |
|
"grad_norm": 0.5875332032837016, |
|
"learning_rate": 5.957422995691007e-05, |
|
"loss": 0.6104, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.016357688113413, |
|
"grad_norm": 0.7504197536874134, |
|
"learning_rate": 5.936023980395997e-05, |
|
"loss": 0.5445, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.025081788440567, |
|
"grad_norm": 1.1182174595326815, |
|
"learning_rate": 5.914552358786864e-05, |
|
"loss": 0.5436, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.033805888767721, |
|
"grad_norm": 0.9253408273401474, |
|
"learning_rate": 5.893008936109474e-05, |
|
"loss": 0.5517, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.0425299890948745, |
|
"grad_norm": 0.6241397091570694, |
|
"learning_rate": 5.871394520302432e-05, |
|
"loss": 0.5436, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.051254089422028, |
|
"grad_norm": 0.6714967761660577, |
|
"learning_rate": 5.8497099219667834e-05, |
|
"loss": 0.5513, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.0599781897491822, |
|
"grad_norm": 0.6546289177103435, |
|
"learning_rate": 5.827955954335616e-05, |
|
"loss": 0.5365, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.068702290076336, |
|
"grad_norm": 0.6213690775351733, |
|
"learning_rate": 5.806133433243558e-05, |
|
"loss": 0.546, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.0774263904034895, |
|
"grad_norm": 0.6920575535177348, |
|
"learning_rate": 5.784243177096187e-05, |
|
"loss": 0.5433, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.0861504907306436, |
|
"grad_norm": 0.4645752707045055, |
|
"learning_rate": 5.7622860068393334e-05, |
|
"loss": 0.5289, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.0948745910577973, |
|
"grad_norm": 0.4731711629332615, |
|
"learning_rate": 5.740262745928293e-05, |
|
"loss": 0.5448, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.103598691384951, |
|
"grad_norm": 0.5087479759223862, |
|
"learning_rate": 5.718174220296949e-05, |
|
"loss": 0.5415, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.1123227917121046, |
|
"grad_norm": 0.3397004171608698, |
|
"learning_rate": 5.6960212583267873e-05, |
|
"loss": 0.5373, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.1210468920392582, |
|
"grad_norm": 0.3028591738754104, |
|
"learning_rate": 5.673804690815845e-05, |
|
"loss": 0.5393, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.1297709923664123, |
|
"grad_norm": 0.4035449728515965, |
|
"learning_rate": 5.6515253509475405e-05, |
|
"loss": 0.5351, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.138495092693566, |
|
"grad_norm": 0.3353749295009557, |
|
"learning_rate": 5.6291840742594305e-05, |
|
"loss": 0.5365, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.1472191930207196, |
|
"grad_norm": 0.2948957119809547, |
|
"learning_rate": 5.606781698611879e-05, |
|
"loss": 0.5367, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.1559432933478737, |
|
"grad_norm": 0.3274847500759932, |
|
"learning_rate": 5.584319064156628e-05, |
|
"loss": 0.5343, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.1646673936750274, |
|
"grad_norm": 0.27942787362736154, |
|
"learning_rate": 5.561797013305297e-05, |
|
"loss": 0.5355, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.173391494002181, |
|
"grad_norm": 0.3320815797761542, |
|
"learning_rate": 5.5392163906977835e-05, |
|
"loss": 0.5324, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.1821155943293347, |
|
"grad_norm": 0.2793266605014413, |
|
"learning_rate": 5.516578043170591e-05, |
|
"loss": 0.5333, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.1908396946564888, |
|
"grad_norm": 0.24153435404173842, |
|
"learning_rate": 5.49388281972507e-05, |
|
"loss": 0.5403, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.1995637949836424, |
|
"grad_norm": 0.29337585767480123, |
|
"learning_rate": 5.471131571495574e-05, |
|
"loss": 0.5479, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.208287895310796, |
|
"grad_norm": 0.3065886341708783, |
|
"learning_rate": 5.4483251517175454e-05, |
|
"loss": 0.5274, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.2170119956379497, |
|
"grad_norm": 0.2885822613470245, |
|
"learning_rate": 5.425464415695514e-05, |
|
"loss": 0.5431, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.225736095965104, |
|
"grad_norm": 0.2588491126601207, |
|
"learning_rate": 5.4025502207710184e-05, |
|
"loss": 0.5336, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.2344601962922575, |
|
"grad_norm": 0.26723660749591893, |
|
"learning_rate": 5.379583426290458e-05, |
|
"loss": 0.5338, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.243184296619411, |
|
"grad_norm": 0.3305889777035953, |
|
"learning_rate": 5.356564893572859e-05, |
|
"loss": 0.5393, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.2519083969465647, |
|
"grad_norm": 0.22453022727431107, |
|
"learning_rate": 5.333495485877583e-05, |
|
"loss": 0.5485, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.2606324972737184, |
|
"grad_norm": 0.19233156617492056, |
|
"learning_rate": 5.310376068371938e-05, |
|
"loss": 0.5418, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.2693565976008725, |
|
"grad_norm": 0.20527709672078884, |
|
"learning_rate": 5.287207508098743e-05, |
|
"loss": 0.5416, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.278080697928026, |
|
"grad_norm": 0.2376527665778272, |
|
"learning_rate": 5.263990673943811e-05, |
|
"loss": 0.54, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.28680479825518, |
|
"grad_norm": 0.2613858051995676, |
|
"learning_rate": 5.2407264366033555e-05, |
|
"loss": 0.5498, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.295528898582334, |
|
"grad_norm": 0.18916730210716454, |
|
"learning_rate": 5.2174156685513446e-05, |
|
"loss": 0.5364, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.3042529989094875, |
|
"grad_norm": 0.19275577825236526, |
|
"learning_rate": 5.194059244006779e-05, |
|
"loss": 0.5434, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.312977099236641, |
|
"grad_norm": 0.2384097782515164, |
|
"learning_rate": 5.170658038900904e-05, |
|
"loss": 0.5353, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.321701199563795, |
|
"grad_norm": 0.21432203996579718, |
|
"learning_rate": 5.1472129308443616e-05, |
|
"loss": 0.537, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.330425299890949, |
|
"grad_norm": 0.17783572065614472, |
|
"learning_rate": 5.123724799094279e-05, |
|
"loss": 0.5444, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.3391494002181026, |
|
"grad_norm": 0.24973497434785122, |
|
"learning_rate": 5.1001945245212874e-05, |
|
"loss": 0.5404, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.3478735005452562, |
|
"grad_norm": 0.2176404792941727, |
|
"learning_rate": 5.076622989576498e-05, |
|
"loss": 0.5376, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.35659760087241, |
|
"grad_norm": 0.2460399652714288, |
|
"learning_rate": 5.053011078258397e-05, |
|
"loss": 0.5412, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.365321701199564, |
|
"grad_norm": 0.1827234799837285, |
|
"learning_rate": 5.0293596760797e-05, |
|
"loss": 0.5394, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.3740458015267176, |
|
"grad_norm": 0.21898045121125942, |
|
"learning_rate": 5.005669670034138e-05, |
|
"loss": 0.5417, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.3827699018538713, |
|
"grad_norm": 0.24065391523216037, |
|
"learning_rate": 4.981941948563197e-05, |
|
"loss": 0.5425, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.391494002181025, |
|
"grad_norm": 0.19964448205882962, |
|
"learning_rate": 4.958177401522796e-05, |
|
"loss": 0.5383, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.400218102508179, |
|
"grad_norm": 0.2287719342097617, |
|
"learning_rate": 4.934376920149915e-05, |
|
"loss": 0.5424, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.4089422028353327, |
|
"grad_norm": 0.15994920899181853, |
|
"learning_rate": 4.9105413970291747e-05, |
|
"loss": 0.5492, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.4176663031624863, |
|
"grad_norm": 0.20177344996011212, |
|
"learning_rate": 4.886671726059355e-05, |
|
"loss": 0.5452, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.42639040348964, |
|
"grad_norm": 0.2505950233818444, |
|
"learning_rate": 4.862768802419881e-05, |
|
"loss": 0.5355, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.435114503816794, |
|
"grad_norm": 0.20877969786025213, |
|
"learning_rate": 4.8388335225372416e-05, |
|
"loss": 0.5312, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.4438386041439477, |
|
"grad_norm": 0.16740200168268377, |
|
"learning_rate": 4.8148667840513773e-05, |
|
"loss": 0.536, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.4525627044711014, |
|
"grad_norm": 0.1624204063771831, |
|
"learning_rate": 4.790869485782014e-05, |
|
"loss": 0.5411, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.461286804798255, |
|
"grad_norm": 0.17548255884293712, |
|
"learning_rate": 4.7668425276949546e-05, |
|
"loss": 0.5377, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.470010905125409, |
|
"grad_norm": 0.24425295770194558, |
|
"learning_rate": 4.742786810868327e-05, |
|
"loss": 0.5424, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.4787350054525628, |
|
"grad_norm": 0.2279410111439652, |
|
"learning_rate": 4.7187032374587956e-05, |
|
"loss": 0.5408, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.4874591057797164, |
|
"grad_norm": 0.18333960694349968, |
|
"learning_rate": 4.694592710667723e-05, |
|
"loss": 0.5387, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.49618320610687, |
|
"grad_norm": 0.17387901999937752, |
|
"learning_rate": 4.670456134707294e-05, |
|
"loss": 0.5412, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.504907306434024, |
|
"grad_norm": 0.1802799576438455, |
|
"learning_rate": 4.64629441476662e-05, |
|
"loss": 0.5358, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.513631406761178, |
|
"grad_norm": 0.174702677057685, |
|
"learning_rate": 4.622108456977773e-05, |
|
"loss": 0.536, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.5223555070883314, |
|
"grad_norm": 0.14211263717330275, |
|
"learning_rate": 4.597899168381818e-05, |
|
"loss": 0.5441, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.531079607415485, |
|
"grad_norm": 0.1774683759788631, |
|
"learning_rate": 4.573667456894786e-05, |
|
"loss": 0.5349, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.539803707742639, |
|
"grad_norm": 0.1951727957490502, |
|
"learning_rate": 4.549414231273633e-05, |
|
"loss": 0.5428, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.548527808069793, |
|
"grad_norm": 0.16687560601341936, |
|
"learning_rate": 4.525140401082153e-05, |
|
"loss": 0.535, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.5572519083969465, |
|
"grad_norm": 0.15616561471578494, |
|
"learning_rate": 4.50084687665687e-05, |
|
"loss": 0.5475, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.5659760087241006, |
|
"grad_norm": 0.15824726116167803, |
|
"learning_rate": 4.476534569072895e-05, |
|
"loss": 0.5473, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.5747001090512542, |
|
"grad_norm": 0.22356058463377537, |
|
"learning_rate": 4.452204390109763e-05, |
|
"loss": 0.5345, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.583424209378408, |
|
"grad_norm": 0.21936785429021016, |
|
"learning_rate": 4.4278572522172336e-05, |
|
"loss": 0.5379, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.5921483097055615, |
|
"grad_norm": 0.15224856301200368, |
|
"learning_rate": 4.403494068481074e-05, |
|
"loss": 0.5409, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.600872410032715, |
|
"grad_norm": 0.19196426118514048, |
|
"learning_rate": 4.379115752588814e-05, |
|
"loss": 0.5531, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.6095965103598693, |
|
"grad_norm": 0.21896927123986717, |
|
"learning_rate": 4.3547232187954866e-05, |
|
"loss": 0.5437, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.618320610687023, |
|
"grad_norm": 0.187029673883834, |
|
"learning_rate": 4.33031738188933e-05, |
|
"loss": 0.5381, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.6270447110141766, |
|
"grad_norm": 0.1542884550374639, |
|
"learning_rate": 4.3058991571574896e-05, |
|
"loss": 0.5421, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.6357688113413307, |
|
"grad_norm": 0.2086016114061771, |
|
"learning_rate": 4.2814694603516876e-05, |
|
"loss": 0.5492, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.6444929116684843, |
|
"grad_norm": 0.19799737279991564, |
|
"learning_rate": 4.257029207653881e-05, |
|
"loss": 0.5372, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.653217011995638, |
|
"grad_norm": 0.16486499555492864, |
|
"learning_rate": 4.2325793156419035e-05, |
|
"loss": 0.5468, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.6619411123227916, |
|
"grad_norm": 0.21172626026429878, |
|
"learning_rate": 4.20812070125509e-05, |
|
"loss": 0.5396, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.6706652126499453, |
|
"grad_norm": 0.2470196212926822, |
|
"learning_rate": 4.183654281759888e-05, |
|
"loss": 0.5454, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.6793893129770994, |
|
"grad_norm": 0.22097827298910216, |
|
"learning_rate": 4.159180974715457e-05, |
|
"loss": 0.5401, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.688113413304253, |
|
"grad_norm": 0.15719194953902701, |
|
"learning_rate": 4.1347016979392626e-05, |
|
"loss": 0.5351, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.6968375136314067, |
|
"grad_norm": 0.15729742427040297, |
|
"learning_rate": 4.110217369472649e-05, |
|
"loss": 0.5295, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.7055616139585608, |
|
"grad_norm": 0.14739912075610104, |
|
"learning_rate": 4.085728907546413e-05, |
|
"loss": 0.5408, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.7142857142857144, |
|
"grad_norm": 0.14793685021802994, |
|
"learning_rate": 4.061237230546369e-05, |
|
"loss": 0.5428, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.723009814612868, |
|
"grad_norm": 0.16234675353976102, |
|
"learning_rate": 4.0367432569789065e-05, |
|
"loss": 0.5362, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.7317339149400217, |
|
"grad_norm": 0.14302498512172665, |
|
"learning_rate": 4.012247905436539e-05, |
|
"loss": 0.5412, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.7404580152671754, |
|
"grad_norm": 0.14016241879849234, |
|
"learning_rate": 3.987752094563462e-05, |
|
"loss": 0.5396, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.7491821155943295, |
|
"grad_norm": 0.15182946842699183, |
|
"learning_rate": 3.963256743021095e-05, |
|
"loss": 0.54, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.757906215921483, |
|
"grad_norm": 0.14224077108756453, |
|
"learning_rate": 3.9387627694536316e-05, |
|
"loss": 0.541, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.7666303162486368, |
|
"grad_norm": 0.15804186928200673, |
|
"learning_rate": 3.914271092453589e-05, |
|
"loss": 0.5436, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.775354416575791, |
|
"grad_norm": 0.18867349297613747, |
|
"learning_rate": 3.889782630527353e-05, |
|
"loss": 0.5303, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.7840785169029445, |
|
"grad_norm": 0.14326765308764183, |
|
"learning_rate": 3.865298302060739e-05, |
|
"loss": 0.5306, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.792802617230098, |
|
"grad_norm": 0.2074238009547974, |
|
"learning_rate": 3.8408190252845435e-05, |
|
"loss": 0.5372, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.801526717557252, |
|
"grad_norm": 0.14034785022531543, |
|
"learning_rate": 3.816345718240113e-05, |
|
"loss": 0.5403, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.8102508178844054, |
|
"grad_norm": 0.15821673087787588, |
|
"learning_rate": 3.791879298744911e-05, |
|
"loss": 0.5321, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.8189749182115595, |
|
"grad_norm": 0.14024264925437085, |
|
"learning_rate": 3.767420684358097e-05, |
|
"loss": 0.5509, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.827699018538713, |
|
"grad_norm": 0.14817524358908055, |
|
"learning_rate": 3.74297079234612e-05, |
|
"loss": 0.5414, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.836423118865867, |
|
"grad_norm": 0.1618059561441321, |
|
"learning_rate": 3.7185305396483144e-05, |
|
"loss": 0.5435, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.845147219193021, |
|
"grad_norm": 0.1463097964083179, |
|
"learning_rate": 3.694100842842512e-05, |
|
"loss": 0.5415, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.8538713195201746, |
|
"grad_norm": 0.19816090369688377, |
|
"learning_rate": 3.669682618110671e-05, |
|
"loss": 0.5289, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.8625954198473282, |
|
"grad_norm": 0.13008063298224137, |
|
"learning_rate": 3.645276781204515e-05, |
|
"loss": 0.5409, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.871319520174482, |
|
"grad_norm": 0.19611647889519343, |
|
"learning_rate": 3.6208842474111865e-05, |
|
"loss": 0.5318, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.8800436205016355, |
|
"grad_norm": 0.16795674711289083, |
|
"learning_rate": 3.5965059315189274e-05, |
|
"loss": 0.5402, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.8887677208287896, |
|
"grad_norm": 0.15014005665113414, |
|
"learning_rate": 3.572142747782768e-05, |
|
"loss": 0.5358, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.8974918211559433, |
|
"grad_norm": 0.17800420417158852, |
|
"learning_rate": 3.547795609890238e-05, |
|
"loss": 0.5434, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.906215921483097, |
|
"grad_norm": 0.15795311662461162, |
|
"learning_rate": 3.523465430927106e-05, |
|
"loss": 0.5391, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.914940021810251, |
|
"grad_norm": 0.12743060334259004, |
|
"learning_rate": 3.499153123343131e-05, |
|
"loss": 0.5325, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.9236641221374047, |
|
"grad_norm": 0.16855028063385719, |
|
"learning_rate": 3.474859598917849e-05, |
|
"loss": 0.5413, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.9323882224645583, |
|
"grad_norm": 0.13048706187884607, |
|
"learning_rate": 3.4505857687263675e-05, |
|
"loss": 0.5424, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.941112322791712, |
|
"grad_norm": 0.19367317061005948, |
|
"learning_rate": 3.4263325431052156e-05, |
|
"loss": 0.5432, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 2.9498364231188656, |
|
"grad_norm": 0.12027268336619701, |
|
"learning_rate": 3.4021008316181834e-05, |
|
"loss": 0.5454, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 2.9585605234460197, |
|
"grad_norm": 0.15147251810786375, |
|
"learning_rate": 3.377891543022229e-05, |
|
"loss": 0.5319, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 2.9672846237731734, |
|
"grad_norm": 0.12811996405452336, |
|
"learning_rate": 3.353705585233381e-05, |
|
"loss": 0.544, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.976008724100327, |
|
"grad_norm": 0.13491661704715102, |
|
"learning_rate": 3.329543865292707e-05, |
|
"loss": 0.5435, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 2.984732824427481, |
|
"grad_norm": 0.15940079876011493, |
|
"learning_rate": 3.305407289332279e-05, |
|
"loss": 0.5321, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 2.9934569247546348, |
|
"grad_norm": 0.13416535357448142, |
|
"learning_rate": 3.281296762541206e-05, |
|
"loss": 0.5555, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.0174482006543077, |
|
"grad_norm": 0.2721337362560336, |
|
"learning_rate": 3.2572131891316735e-05, |
|
"loss": 0.4954, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.0261723009814614, |
|
"grad_norm": 0.19428239960148935, |
|
"learning_rate": 3.2331574723050474e-05, |
|
"loss": 0.4841, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.034896401308615, |
|
"grad_norm": 0.21517719571854402, |
|
"learning_rate": 3.209130514217987e-05, |
|
"loss": 0.4868, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.0436205016357687, |
|
"grad_norm": 0.21889099316423938, |
|
"learning_rate": 3.185133215948625e-05, |
|
"loss": 0.4986, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.052344601962923, |
|
"grad_norm": 0.21703320950794194, |
|
"learning_rate": 3.161166477462759e-05, |
|
"loss": 0.4879, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.0610687022900764, |
|
"grad_norm": 0.20716597340235016, |
|
"learning_rate": 3.137231197580121e-05, |
|
"loss": 0.4926, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.06979280261723, |
|
"grad_norm": 0.23215045785693364, |
|
"learning_rate": 3.1133282739406454e-05, |
|
"loss": 0.4823, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.0785169029443837, |
|
"grad_norm": 0.18540346216841436, |
|
"learning_rate": 3.089458602970828e-05, |
|
"loss": 0.4932, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.087241003271538, |
|
"grad_norm": 0.2117646196498009, |
|
"learning_rate": 3.065623079850086e-05, |
|
"loss": 0.4829, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.0959651035986915, |
|
"grad_norm": 0.19784374074464264, |
|
"learning_rate": 3.041822598477206e-05, |
|
"loss": 0.4894, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.104689203925845, |
|
"grad_norm": 0.20389277832289257, |
|
"learning_rate": 3.0180580514368037e-05, |
|
"loss": 0.4819, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.113413304252999, |
|
"grad_norm": 0.20689059795484877, |
|
"learning_rate": 2.9943303299658634e-05, |
|
"loss": 0.4854, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.122137404580153, |
|
"grad_norm": 0.19252775326832058, |
|
"learning_rate": 2.970640323920301e-05, |
|
"loss": 0.4853, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.1308615049073065, |
|
"grad_norm": 0.20405962502922226, |
|
"learning_rate": 2.9469889217416045e-05, |
|
"loss": 0.4931, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.13958560523446, |
|
"grad_norm": 0.18417287655063955, |
|
"learning_rate": 2.9233770104235027e-05, |
|
"loss": 0.4861, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.148309705561614, |
|
"grad_norm": 0.17779606526419078, |
|
"learning_rate": 2.8998054754787143e-05, |
|
"loss": 0.4892, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.157033805888768, |
|
"grad_norm": 0.17500936627057292, |
|
"learning_rate": 2.8762752009057232e-05, |
|
"loss": 0.482, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.1657579062159216, |
|
"grad_norm": 0.14921104868796375, |
|
"learning_rate": 2.8527870691556404e-05, |
|
"loss": 0.4884, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.174482006543075, |
|
"grad_norm": 0.1738429047868548, |
|
"learning_rate": 2.8293419610990968e-05, |
|
"loss": 0.4856, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.183206106870229, |
|
"grad_norm": 0.14815153041649903, |
|
"learning_rate": 2.805940755993223e-05, |
|
"loss": 0.4856, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.191930207197383, |
|
"grad_norm": 0.16213628899803448, |
|
"learning_rate": 2.7825843314486564e-05, |
|
"loss": 0.4833, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.2006543075245366, |
|
"grad_norm": 0.12776465267608372, |
|
"learning_rate": 2.7592735633966468e-05, |
|
"loss": 0.4825, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.2093784078516903, |
|
"grad_norm": 0.1532685309345078, |
|
"learning_rate": 2.7360093260561904e-05, |
|
"loss": 0.4865, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.218102508178844, |
|
"grad_norm": 0.13916327010405047, |
|
"learning_rate": 2.712792491901258e-05, |
|
"loss": 0.4784, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.226826608505998, |
|
"grad_norm": 0.13219062695260514, |
|
"learning_rate": 2.6896239316280634e-05, |
|
"loss": 0.4883, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.2355507088331517, |
|
"grad_norm": 0.1425947319673384, |
|
"learning_rate": 2.6665045141224193e-05, |
|
"loss": 0.4842, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.2442748091603053, |
|
"grad_norm": 0.13401395028938867, |
|
"learning_rate": 2.643435106427141e-05, |
|
"loss": 0.4911, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.252998909487459, |
|
"grad_norm": 0.13526534167394852, |
|
"learning_rate": 2.6204165737095447e-05, |
|
"loss": 0.486, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.261723009814613, |
|
"grad_norm": 0.13067389909619231, |
|
"learning_rate": 2.597449779228983e-05, |
|
"loss": 0.4915, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.2704471101417667, |
|
"grad_norm": 0.12011326204522878, |
|
"learning_rate": 2.5745355843044882e-05, |
|
"loss": 0.4863, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.2791712104689203, |
|
"grad_norm": 0.12879501353541944, |
|
"learning_rate": 2.5516748482824552e-05, |
|
"loss": 0.482, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.287895310796074, |
|
"grad_norm": 0.13720739097629261, |
|
"learning_rate": 2.5288684285044283e-05, |
|
"loss": 0.4949, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.296619411123228, |
|
"grad_norm": 0.1302222305392029, |
|
"learning_rate": 2.5061171802749317e-05, |
|
"loss": 0.494, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.3053435114503817, |
|
"grad_norm": 0.13160648955568055, |
|
"learning_rate": 2.4834219568294112e-05, |
|
"loss": 0.4813, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.3140676117775354, |
|
"grad_norm": 0.12345223103133267, |
|
"learning_rate": 2.460783609302218e-05, |
|
"loss": 0.4872, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.322791712104689, |
|
"grad_norm": 0.12449345441651119, |
|
"learning_rate": 2.438202986694705e-05, |
|
"loss": 0.4779, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.331515812431843, |
|
"grad_norm": 0.13788217467466876, |
|
"learning_rate": 2.4156809358433728e-05, |
|
"loss": 0.4827, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.340239912758997, |
|
"grad_norm": 0.13887142392886914, |
|
"learning_rate": 2.393218301388123e-05, |
|
"loss": 0.4805, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.3489640130861504, |
|
"grad_norm": 0.1279136541009379, |
|
"learning_rate": 2.3708159257405705e-05, |
|
"loss": 0.4833, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.357688113413304, |
|
"grad_norm": 0.12043380955651341, |
|
"learning_rate": 2.348474649052462e-05, |
|
"loss": 0.4864, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.366412213740458, |
|
"grad_norm": 0.12056545888603093, |
|
"learning_rate": 2.3261953091841553e-05, |
|
"loss": 0.4946, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.375136314067612, |
|
"grad_norm": 0.12834970766272358, |
|
"learning_rate": 2.3039787416732143e-05, |
|
"loss": 0.4883, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.3838604143947655, |
|
"grad_norm": 0.12525269244211057, |
|
"learning_rate": 2.281825779703054e-05, |
|
"loss": 0.4848, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.392584514721919, |
|
"grad_norm": 0.11069140829422457, |
|
"learning_rate": 2.2597372540717083e-05, |
|
"loss": 0.49, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.4013086150490732, |
|
"grad_norm": 0.1186817635248957, |
|
"learning_rate": 2.237713993160668e-05, |
|
"loss": 0.4904, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.410032715376227, |
|
"grad_norm": 0.11808550288276015, |
|
"learning_rate": 2.2157568229038145e-05, |
|
"loss": 0.4902, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.4187568157033805, |
|
"grad_norm": 0.11993464935882125, |
|
"learning_rate": 2.1938665667564435e-05, |
|
"loss": 0.4845, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.427480916030534, |
|
"grad_norm": 0.11969074606441606, |
|
"learning_rate": 2.172044045664386e-05, |
|
"loss": 0.5019, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.4362050163576883, |
|
"grad_norm": 0.12038618305452632, |
|
"learning_rate": 2.1502900780332183e-05, |
|
"loss": 0.4901, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.444929116684842, |
|
"grad_norm": 0.11405277423193325, |
|
"learning_rate": 2.1286054796975696e-05, |
|
"loss": 0.4869, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.4536532170119956, |
|
"grad_norm": 0.13087116805331267, |
|
"learning_rate": 2.1069910638905277e-05, |
|
"loss": 0.4894, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.462377317339149, |
|
"grad_norm": 0.10658227722079491, |
|
"learning_rate": 2.085447641213138e-05, |
|
"loss": 0.4873, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.4711014176663033, |
|
"grad_norm": 0.11425468095535157, |
|
"learning_rate": 2.063976019604006e-05, |
|
"loss": 0.4831, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.479825517993457, |
|
"grad_norm": 0.11858217904877387, |
|
"learning_rate": 2.0425770043089957e-05, |
|
"loss": 0.4825, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.4885496183206106, |
|
"grad_norm": 0.10999883286826867, |
|
"learning_rate": 2.021251397851031e-05, |
|
"loss": 0.485, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.4972737186477643, |
|
"grad_norm": 0.11453238535992676, |
|
"learning_rate": 2.0000000000000012e-05, |
|
"loss": 0.4771, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 3.5059978189749184, |
|
"grad_norm": 0.11260955156023703, |
|
"learning_rate": 1.9788236077427617e-05, |
|
"loss": 0.4849, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.514721919302072, |
|
"grad_norm": 0.10788933054700972, |
|
"learning_rate": 1.957723015253252e-05, |
|
"loss": 0.4879, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 3.5234460196292257, |
|
"grad_norm": 0.1242817430775905, |
|
"learning_rate": 1.9366990138627054e-05, |
|
"loss": 0.4954, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.5321701199563798, |
|
"grad_norm": 0.1250660403349655, |
|
"learning_rate": 1.915752392029976e-05, |
|
"loss": 0.4873, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 3.5408942202835334, |
|
"grad_norm": 0.10866091824566891, |
|
"learning_rate": 1.894883935311969e-05, |
|
"loss": 0.4938, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.549618320610687, |
|
"grad_norm": 0.134004404392773, |
|
"learning_rate": 1.8740944263341773e-05, |
|
"loss": 0.4842, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.5583424209378407, |
|
"grad_norm": 0.10798242739035056, |
|
"learning_rate": 1.8533846447613342e-05, |
|
"loss": 0.489, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.5670665212649943, |
|
"grad_norm": 0.12508201458500512, |
|
"learning_rate": 1.8327553672681717e-05, |
|
"loss": 0.4887, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 3.5757906215921484, |
|
"grad_norm": 0.10902393852897485, |
|
"learning_rate": 1.8122073675102935e-05, |
|
"loss": 0.4794, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.584514721919302, |
|
"grad_norm": 0.1147667540376711, |
|
"learning_rate": 1.7917414160951612e-05, |
|
"loss": 0.4926, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 3.5932388222464557, |
|
"grad_norm": 0.10301491583219155, |
|
"learning_rate": 1.7713582805531938e-05, |
|
"loss": 0.4855, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.60196292257361, |
|
"grad_norm": 0.11784283632033268, |
|
"learning_rate": 1.7510587253089842e-05, |
|
"loss": 0.4923, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 3.6106870229007635, |
|
"grad_norm": 0.11268723502106516, |
|
"learning_rate": 1.7308435116526293e-05, |
|
"loss": 0.4798, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.619411123227917, |
|
"grad_norm": 0.10107252924229544, |
|
"learning_rate": 1.7107133977111815e-05, |
|
"loss": 0.4903, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 3.628135223555071, |
|
"grad_norm": 0.12328942904698265, |
|
"learning_rate": 1.690669138420215e-05, |
|
"loss": 0.4958, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.6368593238822244, |
|
"grad_norm": 0.10387729435616382, |
|
"learning_rate": 1.6707114854955148e-05, |
|
"loss": 0.4833, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.6455834242093785, |
|
"grad_norm": 0.10945876226281129, |
|
"learning_rate": 1.6508411874048856e-05, |
|
"loss": 0.4883, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.654307524536532, |
|
"grad_norm": 0.10647402251039108, |
|
"learning_rate": 1.6310589893400804e-05, |
|
"loss": 0.4888, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 3.663031624863686, |
|
"grad_norm": 0.10316209865500071, |
|
"learning_rate": 1.6113656331888563e-05, |
|
"loss": 0.4867, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.67175572519084, |
|
"grad_norm": 0.10970228886677176, |
|
"learning_rate": 1.5917618575071482e-05, |
|
"loss": 0.4805, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 3.6804798255179936, |
|
"grad_norm": 0.10554815817345704, |
|
"learning_rate": 1.5722483974913737e-05, |
|
"loss": 0.4895, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.6892039258451472, |
|
"grad_norm": 0.09884727405243657, |
|
"learning_rate": 1.5528259849508617e-05, |
|
"loss": 0.4869, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 3.697928026172301, |
|
"grad_norm": 0.11478692305275993, |
|
"learning_rate": 1.533495348280404e-05, |
|
"loss": 0.492, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.7066521264994545, |
|
"grad_norm": 0.10056955523850906, |
|
"learning_rate": 1.5142572124329418e-05, |
|
"loss": 0.4952, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 3.7153762268266086, |
|
"grad_norm": 0.10282422328881895, |
|
"learning_rate": 1.4951122988923765e-05, |
|
"loss": 0.4928, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.7241003271537623, |
|
"grad_norm": 0.11131425137941661, |
|
"learning_rate": 1.4760613256465121e-05, |
|
"loss": 0.4895, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.732824427480916, |
|
"grad_norm": 0.10147036335235084, |
|
"learning_rate": 1.457105007160129e-05, |
|
"loss": 0.4891, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.74154852780807, |
|
"grad_norm": 0.0979128656061041, |
|
"learning_rate": 1.4382440543481888e-05, |
|
"loss": 0.4962, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.7502726281352237, |
|
"grad_norm": 0.09836700247355973, |
|
"learning_rate": 1.4194791745491748e-05, |
|
"loss": 0.4887, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.7589967284623773, |
|
"grad_norm": 0.1025226008534582, |
|
"learning_rate": 1.4008110714985623e-05, |
|
"loss": 0.4921, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.767720828789531, |
|
"grad_norm": 0.09532630365560431, |
|
"learning_rate": 1.3822404453024292e-05, |
|
"loss": 0.4817, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.7764449291166846, |
|
"grad_norm": 0.10123230213257639, |
|
"learning_rate": 1.363767992411197e-05, |
|
"loss": 0.4902, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.7851690294438387, |
|
"grad_norm": 0.0991247900008933, |
|
"learning_rate": 1.3453944055935151e-05, |
|
"loss": 0.4848, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.7938931297709924, |
|
"grad_norm": 0.10006268884120084, |
|
"learning_rate": 1.3271203739102783e-05, |
|
"loss": 0.4964, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.802617230098146, |
|
"grad_norm": 0.10054874848417918, |
|
"learning_rate": 1.308946582688785e-05, |
|
"loss": 0.4922, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.8113413304253, |
|
"grad_norm": 0.0987785901332479, |
|
"learning_rate": 1.2908737134970367e-05, |
|
"loss": 0.4939, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.8200654307524538, |
|
"grad_norm": 0.09483259622573526, |
|
"learning_rate": 1.272902444118175e-05, |
|
"loss": 0.4844, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.8287895310796074, |
|
"grad_norm": 0.10027042176264714, |
|
"learning_rate": 1.2550334485250661e-05, |
|
"loss": 0.4961, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.837513631406761, |
|
"grad_norm": 0.10249666075304369, |
|
"learning_rate": 1.2372673968550229e-05, |
|
"loss": 0.485, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.8462377317339147, |
|
"grad_norm": 0.09908348278775064, |
|
"learning_rate": 1.2196049553846718e-05, |
|
"loss": 0.4929, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.854961832061069, |
|
"grad_norm": 0.10609152583672458, |
|
"learning_rate": 1.202046786504969e-05, |
|
"loss": 0.4899, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.8636859323882224, |
|
"grad_norm": 0.10563226786853933, |
|
"learning_rate": 1.1845935486963546e-05, |
|
"loss": 0.4897, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.872410032715376, |
|
"grad_norm": 0.09809013057751448, |
|
"learning_rate": 1.1672458965040624e-05, |
|
"loss": 0.484, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.88113413304253, |
|
"grad_norm": 0.10049787341345903, |
|
"learning_rate": 1.1500044805135686e-05, |
|
"loss": 0.4891, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.889858233369684, |
|
"grad_norm": 0.10177360915060568, |
|
"learning_rate": 1.1328699473261957e-05, |
|
"loss": 0.4894, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.8985823336968375, |
|
"grad_norm": 0.09437906218967344, |
|
"learning_rate": 1.1158429395348614e-05, |
|
"loss": 0.4883, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.907306434023991, |
|
"grad_norm": 0.10239261134633734, |
|
"learning_rate": 1.0989240956999807e-05, |
|
"loss": 0.4849, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.916030534351145, |
|
"grad_norm": 0.11040622306921051, |
|
"learning_rate": 1.0821140503255174e-05, |
|
"loss": 0.4856, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.924754634678299, |
|
"grad_norm": 0.10030020907072919, |
|
"learning_rate": 1.0654134338351896e-05, |
|
"loss": 0.4963, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.9334787350054525, |
|
"grad_norm": 0.10185833012036703, |
|
"learning_rate": 1.0488228725488252e-05, |
|
"loss": 0.4781, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 3.942202835332606, |
|
"grad_norm": 0.11302408247244193, |
|
"learning_rate": 1.0323429886588743e-05, |
|
"loss": 0.4952, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.9509269356597603, |
|
"grad_norm": 0.10091602155398324, |
|
"learning_rate": 1.0159744002070756e-05, |
|
"loss": 0.4843, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 3.959651035986914, |
|
"grad_norm": 0.0981028735736355, |
|
"learning_rate": 9.997177210612774e-06, |
|
"loss": 0.4705, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 3.9683751363140676, |
|
"grad_norm": 0.09982190269637335, |
|
"learning_rate": 9.835735608924155e-06, |
|
"loss": 0.4848, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 3.9770992366412212, |
|
"grad_norm": 0.10286988193952572, |
|
"learning_rate": 9.675425251516502e-06, |
|
"loss": 0.4862, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 3.985823336968375, |
|
"grad_norm": 0.09915491694077962, |
|
"learning_rate": 9.516252150476583e-06, |
|
"loss": 0.4822, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.994547437295529, |
|
"grad_norm": 0.1015074890493012, |
|
"learning_rate": 9.358222275240884e-06, |
|
"loss": 0.486, |
|
"step": 456 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 570, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2161148234462396e+19, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|