{ "best_metric": 0.5626052618026733, "best_model_checkpoint": "/content/drive/MyDrive/tw-roberta-base-sentiment-FT-v2/checkpoint-2603", "epoch": 2.0, "eval_steps": 500, "global_step": 5206, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.00384172109104879, "grad_norm": 19.648235321044922, "learning_rate": 1.2195121951219514e-07, "loss": 0.7091, "step": 10 }, { "epoch": 0.00768344218209758, "grad_norm": 15.138011932373047, "learning_rate": 2.439024390243903e-07, "loss": 0.5428, "step": 20 }, { "epoch": 0.01152516327314637, "grad_norm": 15.91871452331543, "learning_rate": 3.6585365853658536e-07, "loss": 0.709, "step": 30 }, { "epoch": 0.01536688436419516, "grad_norm": 11.673059463500977, "learning_rate": 4.878048780487805e-07, "loss": 0.6269, "step": 40 }, { "epoch": 0.01920860545524395, "grad_norm": 10.827598571777344, "learning_rate": 6.097560975609757e-07, "loss": 0.6899, "step": 50 }, { "epoch": 0.02305032654629274, "grad_norm": 29.76123809814453, "learning_rate": 7.317073170731707e-07, "loss": 0.7235, "step": 60 }, { "epoch": 0.02689204763734153, "grad_norm": 16.770448684692383, "learning_rate": 8.53658536585366e-07, "loss": 0.731, "step": 70 }, { "epoch": 0.03073376872839032, "grad_norm": 17.501832962036133, "learning_rate": 9.75609756097561e-07, "loss": 0.7907, "step": 80 }, { "epoch": 0.03457548981943911, "grad_norm": 5.903749465942383, "learning_rate": 1.0975609756097562e-06, "loss": 0.6549, "step": 90 }, { "epoch": 0.0384172109104879, "grad_norm": 12.823253631591797, "learning_rate": 1.2195121951219514e-06, "loss": 0.6833, "step": 100 }, { "epoch": 0.042258932001536686, "grad_norm": 14.516304969787598, "learning_rate": 1.3414634146341465e-06, "loss": 0.7764, "step": 110 }, { "epoch": 0.04610065309258548, "grad_norm": 17.775850296020508, "learning_rate": 1.4634146341463414e-06, "loss": 0.6677, "step": 120 }, { "epoch": 0.049942374183634265, "grad_norm": 16.348901748657227, "learning_rate": 1.5853658536585368e-06, "loss": 0.6708, "step": 130 }, { "epoch": 0.05378409527468306, "grad_norm": 20.729867935180664, "learning_rate": 1.707317073170732e-06, "loss": 0.685, "step": 140 }, { "epoch": 0.05762581636573185, "grad_norm": 23.60687255859375, "learning_rate": 1.8292682926829268e-06, "loss": 0.7048, "step": 150 }, { "epoch": 0.06146753745678064, "grad_norm": 24.990461349487305, "learning_rate": 1.951219512195122e-06, "loss": 0.6849, "step": 160 }, { "epoch": 0.06530925854782943, "grad_norm": 21.754741668701172, "learning_rate": 2.073170731707317e-06, "loss": 0.6801, "step": 170 }, { "epoch": 0.06915097963887822, "grad_norm": 19.427799224853516, "learning_rate": 2.1951219512195125e-06, "loss": 0.6006, "step": 180 }, { "epoch": 0.072992700729927, "grad_norm": 11.91465950012207, "learning_rate": 2.317073170731708e-06, "loss": 0.6498, "step": 190 }, { "epoch": 0.0768344218209758, "grad_norm": 24.343521118164062, "learning_rate": 2.4390243902439027e-06, "loss": 0.8272, "step": 200 }, { "epoch": 0.08067614291202459, "grad_norm": 11.623435020446777, "learning_rate": 2.5609756097560977e-06, "loss": 0.6703, "step": 210 }, { "epoch": 0.08451786400307337, "grad_norm": 8.984451293945312, "learning_rate": 2.682926829268293e-06, "loss": 0.6965, "step": 220 }, { "epoch": 0.08835958509412217, "grad_norm": 26.78940773010254, "learning_rate": 2.8048780487804884e-06, "loss": 0.7135, "step": 230 }, { "epoch": 0.09220130618517096, "grad_norm": 17.50589370727539, "learning_rate": 2.926829268292683e-06, "loss": 0.6637, "step": 240 }, { "epoch": 0.09604302727621974, "grad_norm": 13.149524688720703, "learning_rate": 3.0487804878048782e-06, "loss": 0.5756, "step": 250 }, { "epoch": 0.09988474836726853, "grad_norm": 10.727895736694336, "learning_rate": 3.1707317073170736e-06, "loss": 0.594, "step": 260 }, { "epoch": 0.10372646945831733, "grad_norm": 15.031158447265625, "learning_rate": 3.292682926829269e-06, "loss": 0.7186, "step": 270 }, { "epoch": 0.10756819054936612, "grad_norm": 10.520498275756836, "learning_rate": 3.414634146341464e-06, "loss": 0.5746, "step": 280 }, { "epoch": 0.1114099116404149, "grad_norm": 19.504133224487305, "learning_rate": 3.5365853658536588e-06, "loss": 0.7709, "step": 290 }, { "epoch": 0.1152516327314637, "grad_norm": 12.495412826538086, "learning_rate": 3.6585365853658537e-06, "loss": 0.6508, "step": 300 }, { "epoch": 0.11909335382251249, "grad_norm": 9.115503311157227, "learning_rate": 3.780487804878049e-06, "loss": 0.5847, "step": 310 }, { "epoch": 0.12293507491356127, "grad_norm": 12.583561897277832, "learning_rate": 3.902439024390244e-06, "loss": 0.6114, "step": 320 }, { "epoch": 0.12677679600461006, "grad_norm": 16.474716186523438, "learning_rate": 4.024390243902439e-06, "loss": 0.5819, "step": 330 }, { "epoch": 0.13061851709565886, "grad_norm": 12.282912254333496, "learning_rate": 4.146341463414634e-06, "loss": 0.5974, "step": 340 }, { "epoch": 0.13446023818670763, "grad_norm": 14.967294692993164, "learning_rate": 4.268292682926829e-06, "loss": 0.515, "step": 350 }, { "epoch": 0.13830195927775643, "grad_norm": 22.999889373779297, "learning_rate": 4.390243902439025e-06, "loss": 0.8314, "step": 360 }, { "epoch": 0.14214368036880523, "grad_norm": 13.173623085021973, "learning_rate": 4.51219512195122e-06, "loss": 0.7274, "step": 370 }, { "epoch": 0.145985401459854, "grad_norm": 10.259631156921387, "learning_rate": 4.634146341463416e-06, "loss": 0.5758, "step": 380 }, { "epoch": 0.1498271225509028, "grad_norm": 20.109394073486328, "learning_rate": 4.75609756097561e-06, "loss": 0.6767, "step": 390 }, { "epoch": 0.1536688436419516, "grad_norm": 12.53744125366211, "learning_rate": 4.8780487804878055e-06, "loss": 0.6554, "step": 400 }, { "epoch": 0.15751056473300037, "grad_norm": 32.90926742553711, "learning_rate": 5e-06, "loss": 0.751, "step": 410 }, { "epoch": 0.16135228582404917, "grad_norm": 13.864742279052734, "learning_rate": 5.121951219512195e-06, "loss": 0.5644, "step": 420 }, { "epoch": 0.16519400691509797, "grad_norm": 16.864086151123047, "learning_rate": 5.243902439024391e-06, "loss": 0.7212, "step": 430 }, { "epoch": 0.16903572800614675, "grad_norm": 8.035212516784668, "learning_rate": 5.365853658536586e-06, "loss": 0.6411, "step": 440 }, { "epoch": 0.17287744909719555, "grad_norm": 15.027800559997559, "learning_rate": 5.487804878048781e-06, "loss": 0.7331, "step": 450 }, { "epoch": 0.17671917018824435, "grad_norm": 9.771449089050293, "learning_rate": 5.609756097560977e-06, "loss": 0.6245, "step": 460 }, { "epoch": 0.18056089127929312, "grad_norm": 7.735960960388184, "learning_rate": 5.731707317073171e-06, "loss": 0.5251, "step": 470 }, { "epoch": 0.18440261237034192, "grad_norm": 7.348488807678223, "learning_rate": 5.853658536585366e-06, "loss": 0.5108, "step": 480 }, { "epoch": 0.1882443334613907, "grad_norm": 30.567018508911133, "learning_rate": 5.9756097560975615e-06, "loss": 0.5508, "step": 490 }, { "epoch": 0.1920860545524395, "grad_norm": 19.212230682373047, "learning_rate": 6.0975609756097564e-06, "loss": 0.5997, "step": 500 }, { "epoch": 0.1959277756434883, "grad_norm": 17.70198631286621, "learning_rate": 6.219512195121951e-06, "loss": 0.8108, "step": 510 }, { "epoch": 0.19976949673453706, "grad_norm": 13.471924781799316, "learning_rate": 6.341463414634147e-06, "loss": 0.6738, "step": 520 }, { "epoch": 0.20361121782558586, "grad_norm": 15.494725227355957, "learning_rate": 6.463414634146342e-06, "loss": 0.8114, "step": 530 }, { "epoch": 0.20745293891663466, "grad_norm": 9.314640045166016, "learning_rate": 6.585365853658538e-06, "loss": 0.5791, "step": 540 }, { "epoch": 0.21129466000768343, "grad_norm": 11.314807891845703, "learning_rate": 6.707317073170733e-06, "loss": 0.6536, "step": 550 }, { "epoch": 0.21513638109873223, "grad_norm": 16.462522506713867, "learning_rate": 6.829268292682928e-06, "loss": 0.5598, "step": 560 }, { "epoch": 0.21897810218978103, "grad_norm": 36.564476013183594, "learning_rate": 6.951219512195122e-06, "loss": 0.5573, "step": 570 }, { "epoch": 0.2228198232808298, "grad_norm": 16.07019805908203, "learning_rate": 7.0731707317073175e-06, "loss": 0.5328, "step": 580 }, { "epoch": 0.2266615443718786, "grad_norm": 6.132660388946533, "learning_rate": 7.1951219512195125e-06, "loss": 0.4499, "step": 590 }, { "epoch": 0.2305032654629274, "grad_norm": 21.4416561126709, "learning_rate": 7.317073170731707e-06, "loss": 0.6584, "step": 600 }, { "epoch": 0.23434498655397618, "grad_norm": 18.0037841796875, "learning_rate": 7.439024390243903e-06, "loss": 0.4844, "step": 610 }, { "epoch": 0.23818670764502498, "grad_norm": 17.625553131103516, "learning_rate": 7.560975609756098e-06, "loss": 0.5122, "step": 620 }, { "epoch": 0.24202842873607375, "grad_norm": 10.607305526733398, "learning_rate": 7.682926829268293e-06, "loss": 0.5188, "step": 630 }, { "epoch": 0.24587014982712255, "grad_norm": 26.88294792175293, "learning_rate": 7.804878048780489e-06, "loss": 0.7002, "step": 640 }, { "epoch": 0.24971187091817135, "grad_norm": 38.178287506103516, "learning_rate": 7.926829268292685e-06, "loss": 0.9407, "step": 650 }, { "epoch": 0.2535535920092201, "grad_norm": 8.800226211547852, "learning_rate": 8.048780487804879e-06, "loss": 0.4968, "step": 660 }, { "epoch": 0.2573953131002689, "grad_norm": 30.46478843688965, "learning_rate": 8.170731707317073e-06, "loss": 0.5293, "step": 670 }, { "epoch": 0.2612370341913177, "grad_norm": 20.630985260009766, "learning_rate": 8.292682926829268e-06, "loss": 0.5382, "step": 680 }, { "epoch": 0.2650787552823665, "grad_norm": 19.19484519958496, "learning_rate": 8.414634146341464e-06, "loss": 0.6311, "step": 690 }, { "epoch": 0.26892047637341526, "grad_norm": 26.084064483642578, "learning_rate": 8.536585365853658e-06, "loss": 0.6893, "step": 700 }, { "epoch": 0.27276219746446406, "grad_norm": 11.942285537719727, "learning_rate": 8.658536585365854e-06, "loss": 0.5938, "step": 710 }, { "epoch": 0.27660391855551286, "grad_norm": 25.995960235595703, "learning_rate": 8.78048780487805e-06, "loss": 0.5693, "step": 720 }, { "epoch": 0.28044563964656166, "grad_norm": 10.440145492553711, "learning_rate": 8.902439024390244e-06, "loss": 0.5937, "step": 730 }, { "epoch": 0.28428736073761046, "grad_norm": 21.39019012451172, "learning_rate": 9.02439024390244e-06, "loss": 0.664, "step": 740 }, { "epoch": 0.28812908182865926, "grad_norm": 11.281585693359375, "learning_rate": 9.146341463414635e-06, "loss": 0.5178, "step": 750 }, { "epoch": 0.291970802919708, "grad_norm": 13.577981948852539, "learning_rate": 9.268292682926831e-06, "loss": 0.5405, "step": 760 }, { "epoch": 0.2958125240107568, "grad_norm": 55.213340759277344, "learning_rate": 9.390243902439025e-06, "loss": 0.5633, "step": 770 }, { "epoch": 0.2996542451018056, "grad_norm": 19.74529266357422, "learning_rate": 9.51219512195122e-06, "loss": 0.7541, "step": 780 }, { "epoch": 0.3034959661928544, "grad_norm": 45.70171356201172, "learning_rate": 9.634146341463415e-06, "loss": 0.6196, "step": 790 }, { "epoch": 0.3073376872839032, "grad_norm": 23.01153564453125, "learning_rate": 9.756097560975611e-06, "loss": 0.7121, "step": 800 }, { "epoch": 0.311179408374952, "grad_norm": 17.099529266357422, "learning_rate": 9.878048780487805e-06, "loss": 0.6021, "step": 810 }, { "epoch": 0.31502112946600075, "grad_norm": 11.891583442687988, "learning_rate": 1e-05, "loss": 0.6071, "step": 820 }, { "epoch": 0.31886285055704955, "grad_norm": 16.423572540283203, "learning_rate": 9.98957464553795e-06, "loss": 0.539, "step": 830 }, { "epoch": 0.32270457164809835, "grad_norm": 11.026520729064941, "learning_rate": 9.979149291075898e-06, "loss": 0.615, "step": 840 }, { "epoch": 0.32654629273914715, "grad_norm": 25.40671730041504, "learning_rate": 9.968723936613845e-06, "loss": 0.4819, "step": 850 }, { "epoch": 0.33038801383019595, "grad_norm": 30.425880432128906, "learning_rate": 9.958298582151794e-06, "loss": 0.7362, "step": 860 }, { "epoch": 0.3342297349212447, "grad_norm": 21.710176467895508, "learning_rate": 9.947873227689742e-06, "loss": 0.5052, "step": 870 }, { "epoch": 0.3380714560122935, "grad_norm": 13.537766456604004, "learning_rate": 9.93744787322769e-06, "loss": 0.6032, "step": 880 }, { "epoch": 0.3419131771033423, "grad_norm": 19.592073440551758, "learning_rate": 9.927022518765639e-06, "loss": 0.5222, "step": 890 }, { "epoch": 0.3457548981943911, "grad_norm": 10.528463363647461, "learning_rate": 9.916597164303588e-06, "loss": 0.596, "step": 900 }, { "epoch": 0.3495966192854399, "grad_norm": 15.643308639526367, "learning_rate": 9.906171809841536e-06, "loss": 0.7414, "step": 910 }, { "epoch": 0.3534383403764887, "grad_norm": 22.77689552307129, "learning_rate": 9.895746455379483e-06, "loss": 0.5151, "step": 920 }, { "epoch": 0.35728006146753744, "grad_norm": 23.217538833618164, "learning_rate": 9.885321100917432e-06, "loss": 0.7501, "step": 930 }, { "epoch": 0.36112178255858624, "grad_norm": 21.08719825744629, "learning_rate": 9.874895746455382e-06, "loss": 0.4944, "step": 940 }, { "epoch": 0.36496350364963503, "grad_norm": 19.93057632446289, "learning_rate": 9.864470391993327e-06, "loss": 0.7289, "step": 950 }, { "epoch": 0.36880522474068383, "grad_norm": 14.810643196105957, "learning_rate": 9.854045037531277e-06, "loss": 0.4956, "step": 960 }, { "epoch": 0.37264694583173263, "grad_norm": 19.596710205078125, "learning_rate": 9.843619683069226e-06, "loss": 0.5528, "step": 970 }, { "epoch": 0.3764886669227814, "grad_norm": 23.963172912597656, "learning_rate": 9.833194328607173e-06, "loss": 0.5904, "step": 980 }, { "epoch": 0.3803303880138302, "grad_norm": 14.250468254089355, "learning_rate": 9.822768974145121e-06, "loss": 0.6762, "step": 990 }, { "epoch": 0.384172109104879, "grad_norm": 30.411888122558594, "learning_rate": 9.81234361968307e-06, "loss": 0.6031, "step": 1000 }, { "epoch": 0.3880138301959278, "grad_norm": 4.892022132873535, "learning_rate": 9.80191826522102e-06, "loss": 0.5832, "step": 1010 }, { "epoch": 0.3918555512869766, "grad_norm": 14.026780128479004, "learning_rate": 9.791492910758967e-06, "loss": 0.4432, "step": 1020 }, { "epoch": 0.3956972723780254, "grad_norm": 15.22079086303711, "learning_rate": 9.781067556296915e-06, "loss": 0.5003, "step": 1030 }, { "epoch": 0.3995389934690741, "grad_norm": 38.46358871459961, "learning_rate": 9.770642201834864e-06, "loss": 0.6863, "step": 1040 }, { "epoch": 0.4033807145601229, "grad_norm": 28.509458541870117, "learning_rate": 9.760216847372811e-06, "loss": 0.6557, "step": 1050 }, { "epoch": 0.4072224356511717, "grad_norm": 10.18283748626709, "learning_rate": 9.749791492910759e-06, "loss": 0.475, "step": 1060 }, { "epoch": 0.4110641567422205, "grad_norm": 16.280475616455078, "learning_rate": 9.739366138448708e-06, "loss": 0.5423, "step": 1070 }, { "epoch": 0.4149058778332693, "grad_norm": 27.256633758544922, "learning_rate": 9.728940783986657e-06, "loss": 0.4638, "step": 1080 }, { "epoch": 0.4187475989243181, "grad_norm": 26.906049728393555, "learning_rate": 9.718515429524605e-06, "loss": 0.6528, "step": 1090 }, { "epoch": 0.42258932001536686, "grad_norm": 10.475133895874023, "learning_rate": 9.708090075062552e-06, "loss": 0.8387, "step": 1100 }, { "epoch": 0.42643104110641566, "grad_norm": 14.576977729797363, "learning_rate": 9.697664720600502e-06, "loss": 0.6325, "step": 1110 }, { "epoch": 0.43027276219746446, "grad_norm": 17.823413848876953, "learning_rate": 9.68723936613845e-06, "loss": 0.613, "step": 1120 }, { "epoch": 0.43411448328851326, "grad_norm": 12.499800682067871, "learning_rate": 9.676814011676397e-06, "loss": 0.5685, "step": 1130 }, { "epoch": 0.43795620437956206, "grad_norm": 19.12653923034668, "learning_rate": 9.666388657214346e-06, "loss": 0.8678, "step": 1140 }, { "epoch": 0.4417979254706108, "grad_norm": 5.942495822906494, "learning_rate": 9.655963302752295e-06, "loss": 0.4594, "step": 1150 }, { "epoch": 0.4456396465616596, "grad_norm": 19.233552932739258, "learning_rate": 9.645537948290243e-06, "loss": 0.5747, "step": 1160 }, { "epoch": 0.4494813676527084, "grad_norm": 17.434133529663086, "learning_rate": 9.63511259382819e-06, "loss": 0.5121, "step": 1170 }, { "epoch": 0.4533230887437572, "grad_norm": 14.78231143951416, "learning_rate": 9.62468723936614e-06, "loss": 0.7225, "step": 1180 }, { "epoch": 0.457164809834806, "grad_norm": 23.81663703918457, "learning_rate": 9.614261884904089e-06, "loss": 0.8527, "step": 1190 }, { "epoch": 0.4610065309258548, "grad_norm": 18.266740798950195, "learning_rate": 9.603836530442035e-06, "loss": 0.4101, "step": 1200 }, { "epoch": 0.46484825201690355, "grad_norm": 23.31222152709961, "learning_rate": 9.593411175979984e-06, "loss": 0.5847, "step": 1210 }, { "epoch": 0.46868997310795235, "grad_norm": 11.039971351623535, "learning_rate": 9.582985821517933e-06, "loss": 0.6515, "step": 1220 }, { "epoch": 0.47253169419900115, "grad_norm": 27.05122184753418, "learning_rate": 9.57256046705588e-06, "loss": 0.5968, "step": 1230 }, { "epoch": 0.47637341529004995, "grad_norm": 18.416839599609375, "learning_rate": 9.562135112593828e-06, "loss": 0.6339, "step": 1240 }, { "epoch": 0.48021513638109875, "grad_norm": 18.275182723999023, "learning_rate": 9.551709758131778e-06, "loss": 0.3595, "step": 1250 }, { "epoch": 0.4840568574721475, "grad_norm": 32.038143157958984, "learning_rate": 9.541284403669727e-06, "loss": 0.8281, "step": 1260 }, { "epoch": 0.4878985785631963, "grad_norm": 19.010108947753906, "learning_rate": 9.530859049207674e-06, "loss": 0.7158, "step": 1270 }, { "epoch": 0.4917402996542451, "grad_norm": 9.5922269821167, "learning_rate": 9.520433694745622e-06, "loss": 0.5315, "step": 1280 }, { "epoch": 0.4955820207452939, "grad_norm": 11.005895614624023, "learning_rate": 9.510008340283571e-06, "loss": 0.3599, "step": 1290 }, { "epoch": 0.4994237418363427, "grad_norm": 51.6233024597168, "learning_rate": 9.499582985821519e-06, "loss": 0.537, "step": 1300 }, { "epoch": 0.5032654629273915, "grad_norm": 19.033329010009766, "learning_rate": 9.489157631359466e-06, "loss": 0.6083, "step": 1310 }, { "epoch": 0.5071071840184402, "grad_norm": 16.91973114013672, "learning_rate": 9.478732276897415e-06, "loss": 0.7693, "step": 1320 }, { "epoch": 0.5109489051094891, "grad_norm": 23.38747215270996, "learning_rate": 9.468306922435365e-06, "loss": 0.6646, "step": 1330 }, { "epoch": 0.5147906262005378, "grad_norm": 13.772806167602539, "learning_rate": 9.457881567973312e-06, "loss": 0.4763, "step": 1340 }, { "epoch": 0.5186323472915866, "grad_norm": 8.950833320617676, "learning_rate": 9.44745621351126e-06, "loss": 0.5793, "step": 1350 }, { "epoch": 0.5224740683826354, "grad_norm": 5.6142964363098145, "learning_rate": 9.437030859049209e-06, "loss": 0.5797, "step": 1360 }, { "epoch": 0.5263157894736842, "grad_norm": 18.615188598632812, "learning_rate": 9.426605504587157e-06, "loss": 0.6041, "step": 1370 }, { "epoch": 0.530157510564733, "grad_norm": 10.953849792480469, "learning_rate": 9.416180150125104e-06, "loss": 0.5933, "step": 1380 }, { "epoch": 0.5339992316557818, "grad_norm": 11.613428115844727, "learning_rate": 9.405754795663053e-06, "loss": 0.5275, "step": 1390 }, { "epoch": 0.5378409527468305, "grad_norm": 12.725924491882324, "learning_rate": 9.395329441201003e-06, "loss": 0.5673, "step": 1400 }, { "epoch": 0.5416826738378794, "grad_norm": 34.54634094238281, "learning_rate": 9.38490408673895e-06, "loss": 0.6717, "step": 1410 }, { "epoch": 0.5455243949289281, "grad_norm": 21.028316497802734, "learning_rate": 9.374478732276898e-06, "loss": 0.5483, "step": 1420 }, { "epoch": 0.549366116019977, "grad_norm": 30.281667709350586, "learning_rate": 9.364053377814847e-06, "loss": 0.7806, "step": 1430 }, { "epoch": 0.5532078371110257, "grad_norm": 11.983960151672363, "learning_rate": 9.353628023352795e-06, "loss": 0.5061, "step": 1440 }, { "epoch": 0.5570495582020746, "grad_norm": 6.99747896194458, "learning_rate": 9.343202668890742e-06, "loss": 0.5623, "step": 1450 }, { "epoch": 0.5608912792931233, "grad_norm": 11.219843864440918, "learning_rate": 9.332777314428691e-06, "loss": 0.6227, "step": 1460 }, { "epoch": 0.5647330003841721, "grad_norm": 8.85550308227539, "learning_rate": 9.32235195996664e-06, "loss": 0.5908, "step": 1470 }, { "epoch": 0.5685747214752209, "grad_norm": 15.55632209777832, "learning_rate": 9.311926605504588e-06, "loss": 0.5888, "step": 1480 }, { "epoch": 0.5724164425662697, "grad_norm": 5.281271457672119, "learning_rate": 9.301501251042536e-06, "loss": 0.4795, "step": 1490 }, { "epoch": 0.5762581636573185, "grad_norm": 10.58825397491455, "learning_rate": 9.291075896580485e-06, "loss": 0.4825, "step": 1500 }, { "epoch": 0.5800998847483673, "grad_norm": 13.970091819763184, "learning_rate": 9.280650542118432e-06, "loss": 0.6107, "step": 1510 }, { "epoch": 0.583941605839416, "grad_norm": 15.610709190368652, "learning_rate": 9.270225187656382e-06, "loss": 0.454, "step": 1520 }, { "epoch": 0.5877833269304649, "grad_norm": 9.203128814697266, "learning_rate": 9.25979983319433e-06, "loss": 0.596, "step": 1530 }, { "epoch": 0.5916250480215136, "grad_norm": 12.340123176574707, "learning_rate": 9.249374478732278e-06, "loss": 0.6622, "step": 1540 }, { "epoch": 0.5954667691125625, "grad_norm": 6.894665718078613, "learning_rate": 9.238949124270226e-06, "loss": 0.4944, "step": 1550 }, { "epoch": 0.5993084902036112, "grad_norm": 22.704559326171875, "learning_rate": 9.228523769808174e-06, "loss": 0.6151, "step": 1560 }, { "epoch": 0.60315021129466, "grad_norm": 6.272796630859375, "learning_rate": 9.218098415346123e-06, "loss": 0.4866, "step": 1570 }, { "epoch": 0.6069919323857088, "grad_norm": 40.39881134033203, "learning_rate": 9.20767306088407e-06, "loss": 0.5471, "step": 1580 }, { "epoch": 0.6108336534767576, "grad_norm": 9.417654037475586, "learning_rate": 9.19724770642202e-06, "loss": 0.5702, "step": 1590 }, { "epoch": 0.6146753745678064, "grad_norm": 8.880293846130371, "learning_rate": 9.186822351959967e-06, "loss": 0.7593, "step": 1600 }, { "epoch": 0.6185170956588552, "grad_norm": 16.337783813476562, "learning_rate": 9.176396997497916e-06, "loss": 0.3708, "step": 1610 }, { "epoch": 0.622358816749904, "grad_norm": 15.34815502166748, "learning_rate": 9.165971643035864e-06, "loss": 0.6829, "step": 1620 }, { "epoch": 0.6262005378409528, "grad_norm": 12.125506401062012, "learning_rate": 9.155546288573811e-06, "loss": 0.5839, "step": 1630 }, { "epoch": 0.6300422589320015, "grad_norm": 12.340716361999512, "learning_rate": 9.14512093411176e-06, "loss": 0.5855, "step": 1640 }, { "epoch": 0.6338839800230504, "grad_norm": 17.276071548461914, "learning_rate": 9.134695579649708e-06, "loss": 0.4579, "step": 1650 }, { "epoch": 0.6377257011140991, "grad_norm": 4.054512977600098, "learning_rate": 9.124270225187658e-06, "loss": 0.3717, "step": 1660 }, { "epoch": 0.641567422205148, "grad_norm": 26.277875900268555, "learning_rate": 9.113844870725605e-06, "loss": 0.6934, "step": 1670 }, { "epoch": 0.6454091432961967, "grad_norm": 23.17993927001953, "learning_rate": 9.103419516263554e-06, "loss": 0.8507, "step": 1680 }, { "epoch": 0.6492508643872454, "grad_norm": 30.25948715209961, "learning_rate": 9.092994161801502e-06, "loss": 0.5851, "step": 1690 }, { "epoch": 0.6530925854782943, "grad_norm": 12.083464622497559, "learning_rate": 9.08256880733945e-06, "loss": 0.5214, "step": 1700 }, { "epoch": 0.656934306569343, "grad_norm": 26.132946014404297, "learning_rate": 9.072143452877399e-06, "loss": 0.5715, "step": 1710 }, { "epoch": 0.6607760276603919, "grad_norm": 13.83061408996582, "learning_rate": 9.061718098415346e-06, "loss": 0.6076, "step": 1720 }, { "epoch": 0.6646177487514406, "grad_norm": 36.992679595947266, "learning_rate": 9.051292743953295e-06, "loss": 0.5795, "step": 1730 }, { "epoch": 0.6684594698424894, "grad_norm": 24.426977157592773, "learning_rate": 9.040867389491243e-06, "loss": 0.6913, "step": 1740 }, { "epoch": 0.6723011909335382, "grad_norm": 15.399202346801758, "learning_rate": 9.030442035029192e-06, "loss": 0.647, "step": 1750 }, { "epoch": 0.676142912024587, "grad_norm": 36.72813034057617, "learning_rate": 9.02001668056714e-06, "loss": 0.7641, "step": 1760 }, { "epoch": 0.6799846331156358, "grad_norm": 19.219661712646484, "learning_rate": 9.009591326105089e-06, "loss": 0.7111, "step": 1770 }, { "epoch": 0.6838263542066846, "grad_norm": 10.353839874267578, "learning_rate": 8.999165971643037e-06, "loss": 0.437, "step": 1780 }, { "epoch": 0.6876680752977334, "grad_norm": 12.179790496826172, "learning_rate": 8.988740617180984e-06, "loss": 0.6514, "step": 1790 }, { "epoch": 0.6915097963887822, "grad_norm": 15.036273956298828, "learning_rate": 8.978315262718933e-06, "loss": 0.4611, "step": 1800 }, { "epoch": 0.6953515174798309, "grad_norm": 12.146955490112305, "learning_rate": 8.967889908256881e-06, "loss": 0.5176, "step": 1810 }, { "epoch": 0.6991932385708798, "grad_norm": 16.004959106445312, "learning_rate": 8.95746455379483e-06, "loss": 0.3749, "step": 1820 }, { "epoch": 0.7030349596619285, "grad_norm": 23.500526428222656, "learning_rate": 8.947039199332778e-06, "loss": 0.6124, "step": 1830 }, { "epoch": 0.7068766807529774, "grad_norm": 11.367331504821777, "learning_rate": 8.936613844870727e-06, "loss": 0.3982, "step": 1840 }, { "epoch": 0.7107184018440261, "grad_norm": 13.60319709777832, "learning_rate": 8.926188490408674e-06, "loss": 0.4618, "step": 1850 }, { "epoch": 0.7145601229350749, "grad_norm": 9.807296752929688, "learning_rate": 8.915763135946624e-06, "loss": 0.552, "step": 1860 }, { "epoch": 0.7184018440261237, "grad_norm": 41.238895416259766, "learning_rate": 8.905337781484571e-06, "loss": 0.738, "step": 1870 }, { "epoch": 0.7222435651171725, "grad_norm": 8.117176055908203, "learning_rate": 8.894912427022519e-06, "loss": 0.546, "step": 1880 }, { "epoch": 0.7260852862082213, "grad_norm": 8.292084693908691, "learning_rate": 8.884487072560468e-06, "loss": 0.5098, "step": 1890 }, { "epoch": 0.7299270072992701, "grad_norm": 16.20579719543457, "learning_rate": 8.874061718098416e-06, "loss": 0.5693, "step": 1900 }, { "epoch": 0.7337687283903188, "grad_norm": 10.686980247497559, "learning_rate": 8.863636363636365e-06, "loss": 0.6848, "step": 1910 }, { "epoch": 0.7376104494813677, "grad_norm": 12.386652946472168, "learning_rate": 8.853211009174312e-06, "loss": 0.5282, "step": 1920 }, { "epoch": 0.7414521705724164, "grad_norm": 11.129962921142578, "learning_rate": 8.842785654712262e-06, "loss": 0.5789, "step": 1930 }, { "epoch": 0.7452938916634653, "grad_norm": 8.727615356445312, "learning_rate": 8.83236030025021e-06, "loss": 0.5936, "step": 1940 }, { "epoch": 0.749135612754514, "grad_norm": 11.261787414550781, "learning_rate": 8.821934945788157e-06, "loss": 0.5308, "step": 1950 }, { "epoch": 0.7529773338455628, "grad_norm": 23.387935638427734, "learning_rate": 8.811509591326106e-06, "loss": 0.5074, "step": 1960 }, { "epoch": 0.7568190549366116, "grad_norm": 20.772794723510742, "learning_rate": 8.801084236864054e-06, "loss": 0.6157, "step": 1970 }, { "epoch": 0.7606607760276604, "grad_norm": 23.0604305267334, "learning_rate": 8.790658882402003e-06, "loss": 0.5272, "step": 1980 }, { "epoch": 0.7645024971187092, "grad_norm": 44.302425384521484, "learning_rate": 8.78023352793995e-06, "loss": 0.6709, "step": 1990 }, { "epoch": 0.768344218209758, "grad_norm": 16.66979217529297, "learning_rate": 8.7698081734779e-06, "loss": 0.4651, "step": 2000 }, { "epoch": 0.7721859393008068, "grad_norm": 18.14614486694336, "learning_rate": 8.759382819015847e-06, "loss": 0.5747, "step": 2010 }, { "epoch": 0.7760276603918556, "grad_norm": 10.635650634765625, "learning_rate": 8.748957464553796e-06, "loss": 0.5169, "step": 2020 }, { "epoch": 0.7798693814829043, "grad_norm": 13.54704475402832, "learning_rate": 8.738532110091744e-06, "loss": 0.4784, "step": 2030 }, { "epoch": 0.7837111025739532, "grad_norm": 12.35689926147461, "learning_rate": 8.728106755629691e-06, "loss": 0.5529, "step": 2040 }, { "epoch": 0.7875528236650019, "grad_norm": 7.250340461730957, "learning_rate": 8.71768140116764e-06, "loss": 0.6229, "step": 2050 }, { "epoch": 0.7913945447560508, "grad_norm": 16.60529327392578, "learning_rate": 8.707256046705588e-06, "loss": 0.5726, "step": 2060 }, { "epoch": 0.7952362658470995, "grad_norm": 18.4666805267334, "learning_rate": 8.696830692243537e-06, "loss": 0.5643, "step": 2070 }, { "epoch": 0.7990779869381482, "grad_norm": 31.986207962036133, "learning_rate": 8.686405337781485e-06, "loss": 0.4759, "step": 2080 }, { "epoch": 0.8029197080291971, "grad_norm": 30.724218368530273, "learning_rate": 8.675979983319434e-06, "loss": 0.6527, "step": 2090 }, { "epoch": 0.8067614291202458, "grad_norm": 22.7759952545166, "learning_rate": 8.665554628857382e-06, "loss": 0.6438, "step": 2100 }, { "epoch": 0.8106031502112947, "grad_norm": 14.61020565032959, "learning_rate": 8.65512927439533e-06, "loss": 0.3962, "step": 2110 }, { "epoch": 0.8144448713023434, "grad_norm": 20.27998161315918, "learning_rate": 8.644703919933279e-06, "loss": 0.6989, "step": 2120 }, { "epoch": 0.8182865923933922, "grad_norm": 10.2035493850708, "learning_rate": 8.634278565471226e-06, "loss": 0.5543, "step": 2130 }, { "epoch": 0.822128313484441, "grad_norm": 16.954448699951172, "learning_rate": 8.623853211009175e-06, "loss": 0.4598, "step": 2140 }, { "epoch": 0.8259700345754898, "grad_norm": 24.188817977905273, "learning_rate": 8.613427856547123e-06, "loss": 0.674, "step": 2150 }, { "epoch": 0.8298117556665386, "grad_norm": 8.472796440124512, "learning_rate": 8.603002502085072e-06, "loss": 0.4246, "step": 2160 }, { "epoch": 0.8336534767575874, "grad_norm": 21.893178939819336, "learning_rate": 8.59257714762302e-06, "loss": 0.5788, "step": 2170 }, { "epoch": 0.8374951978486362, "grad_norm": 8.200776100158691, "learning_rate": 8.582151793160967e-06, "loss": 0.4215, "step": 2180 }, { "epoch": 0.841336918939685, "grad_norm": 21.523435592651367, "learning_rate": 8.571726438698917e-06, "loss": 0.4367, "step": 2190 }, { "epoch": 0.8451786400307337, "grad_norm": 18.608898162841797, "learning_rate": 8.561301084236864e-06, "loss": 0.6324, "step": 2200 }, { "epoch": 0.8490203611217826, "grad_norm": 19.39713478088379, "learning_rate": 8.550875729774813e-06, "loss": 0.382, "step": 2210 }, { "epoch": 0.8528620822128313, "grad_norm": 15.368677139282227, "learning_rate": 8.540450375312761e-06, "loss": 0.492, "step": 2220 }, { "epoch": 0.8567038033038802, "grad_norm": 6.85573673248291, "learning_rate": 8.53002502085071e-06, "loss": 0.6801, "step": 2230 }, { "epoch": 0.8605455243949289, "grad_norm": 11.223825454711914, "learning_rate": 8.519599666388658e-06, "loss": 0.7763, "step": 2240 }, { "epoch": 0.8643872454859777, "grad_norm": 11.18885326385498, "learning_rate": 8.509174311926605e-06, "loss": 0.585, "step": 2250 }, { "epoch": 0.8682289665770265, "grad_norm": 21.877548217773438, "learning_rate": 8.498748957464554e-06, "loss": 0.5873, "step": 2260 }, { "epoch": 0.8720706876680753, "grad_norm": 25.72136116027832, "learning_rate": 8.488323603002504e-06, "loss": 0.5796, "step": 2270 }, { "epoch": 0.8759124087591241, "grad_norm": 16.472366333007812, "learning_rate": 8.477898248540451e-06, "loss": 0.4431, "step": 2280 }, { "epoch": 0.8797541298501729, "grad_norm": 5.752821445465088, "learning_rate": 8.467472894078399e-06, "loss": 0.7004, "step": 2290 }, { "epoch": 0.8835958509412216, "grad_norm": 13.56191349029541, "learning_rate": 8.457047539616348e-06, "loss": 0.4899, "step": 2300 }, { "epoch": 0.8874375720322705, "grad_norm": 5.017563343048096, "learning_rate": 8.446622185154296e-06, "loss": 0.7014, "step": 2310 }, { "epoch": 0.8912792931233192, "grad_norm": 15.450356483459473, "learning_rate": 8.436196830692243e-06, "loss": 0.5414, "step": 2320 }, { "epoch": 0.8951210142143681, "grad_norm": 16.416250228881836, "learning_rate": 8.425771476230192e-06, "loss": 0.698, "step": 2330 }, { "epoch": 0.8989627353054168, "grad_norm": 11.185935020446777, "learning_rate": 8.415346121768142e-06, "loss": 0.6264, "step": 2340 }, { "epoch": 0.9028044563964657, "grad_norm": 22.787181854248047, "learning_rate": 8.40492076730609e-06, "loss": 0.6908, "step": 2350 }, { "epoch": 0.9066461774875144, "grad_norm": 11.522934913635254, "learning_rate": 8.394495412844037e-06, "loss": 0.5546, "step": 2360 }, { "epoch": 0.9104878985785632, "grad_norm": 18.260616302490234, "learning_rate": 8.384070058381986e-06, "loss": 0.5925, "step": 2370 }, { "epoch": 0.914329619669612, "grad_norm": 7.180076599121094, "learning_rate": 8.373644703919933e-06, "loss": 0.6639, "step": 2380 }, { "epoch": 0.9181713407606608, "grad_norm": 11.107264518737793, "learning_rate": 8.363219349457881e-06, "loss": 0.6762, "step": 2390 }, { "epoch": 0.9220130618517096, "grad_norm": 12.528190612792969, "learning_rate": 8.35279399499583e-06, "loss": 0.5435, "step": 2400 }, { "epoch": 0.9258547829427584, "grad_norm": 29.454421997070312, "learning_rate": 8.34236864053378e-06, "loss": 0.5074, "step": 2410 }, { "epoch": 0.9296965040338071, "grad_norm": 14.677248001098633, "learning_rate": 8.331943286071727e-06, "loss": 0.6161, "step": 2420 }, { "epoch": 0.933538225124856, "grad_norm": 8.907113075256348, "learning_rate": 8.321517931609675e-06, "loss": 0.353, "step": 2430 }, { "epoch": 0.9373799462159047, "grad_norm": 11.691315650939941, "learning_rate": 8.311092577147624e-06, "loss": 0.4516, "step": 2440 }, { "epoch": 0.9412216673069536, "grad_norm": 2.8710756301879883, "learning_rate": 8.300667222685571e-06, "loss": 0.6726, "step": 2450 }, { "epoch": 0.9450633883980023, "grad_norm": 11.67735481262207, "learning_rate": 8.290241868223519e-06, "loss": 0.6797, "step": 2460 }, { "epoch": 0.948905109489051, "grad_norm": 17.547286987304688, "learning_rate": 8.279816513761468e-06, "loss": 0.7029, "step": 2470 }, { "epoch": 0.9527468305800999, "grad_norm": 11.663725852966309, "learning_rate": 8.269391159299417e-06, "loss": 0.5016, "step": 2480 }, { "epoch": 0.9565885516711486, "grad_norm": 9.743104934692383, "learning_rate": 8.258965804837365e-06, "loss": 0.5489, "step": 2490 }, { "epoch": 0.9604302727621975, "grad_norm": 9.579474449157715, "learning_rate": 8.248540450375313e-06, "loss": 0.5867, "step": 2500 }, { "epoch": 0.9642719938532462, "grad_norm": 13.63699722290039, "learning_rate": 8.238115095913262e-06, "loss": 0.5191, "step": 2510 }, { "epoch": 0.968113714944295, "grad_norm": 10.331293106079102, "learning_rate": 8.227689741451211e-06, "loss": 0.6654, "step": 2520 }, { "epoch": 0.9719554360353438, "grad_norm": 10.614498138427734, "learning_rate": 8.217264386989159e-06, "loss": 0.5947, "step": 2530 }, { "epoch": 0.9757971571263926, "grad_norm": 10.182368278503418, "learning_rate": 8.206839032527106e-06, "loss": 0.5482, "step": 2540 }, { "epoch": 0.9796388782174414, "grad_norm": 15.42397403717041, "learning_rate": 8.196413678065055e-06, "loss": 0.5144, "step": 2550 }, { "epoch": 0.9834805993084902, "grad_norm": 8.317682266235352, "learning_rate": 8.185988323603003e-06, "loss": 0.4518, "step": 2560 }, { "epoch": 0.987322320399539, "grad_norm": 43.10714340209961, "learning_rate": 8.17556296914095e-06, "loss": 0.5974, "step": 2570 }, { "epoch": 0.9911640414905878, "grad_norm": 7.906277656555176, "learning_rate": 8.1651376146789e-06, "loss": 0.5745, "step": 2580 }, { "epoch": 0.9950057625816365, "grad_norm": 10.229177474975586, "learning_rate": 8.154712260216849e-06, "loss": 0.4322, "step": 2590 }, { "epoch": 0.9988474836726854, "grad_norm": 12.773459434509277, "learning_rate": 8.144286905754796e-06, "loss": 0.5947, "step": 2600 }, { "epoch": 1.0, "eval_accuracy": 0.7737969455383729, "eval_f1_per_label": [ 0.778160354156419, 0.7230364524614806, 0.8299703264094955 ], "eval_f1_weighted": 0.774424228670159, "eval_loss": 0.5626052618026733, "eval_precision_per_label": [ 0.7752368507023848, 0.7131208302446257, 0.8468059339993945 ], "eval_precision_weighted": 0.7753803799282116, "eval_recall_per_label": [ 0.7811059907834101, 0.7332317073170732, 0.8137910968868199 ], "eval_recall_weighted": 0.7737969455383729, "eval_runtime": 38.6689, "eval_samples_per_second": 269.235, "eval_steps_per_second": 33.67, "step": 2603 }, { "epoch": 1.0026892047637341, "grad_norm": 10.15665340423584, "learning_rate": 8.133861551292744e-06, "loss": 0.4549, "step": 2610 }, { "epoch": 1.006530925854783, "grad_norm": 15.496761322021484, "learning_rate": 8.123436196830693e-06, "loss": 0.3519, "step": 2620 }, { "epoch": 1.0103726469458318, "grad_norm": 15.590469360351562, "learning_rate": 8.11301084236864e-06, "loss": 0.5326, "step": 2630 }, { "epoch": 1.0142143680368805, "grad_norm": 23.553977966308594, "learning_rate": 8.102585487906588e-06, "loss": 0.448, "step": 2640 }, { "epoch": 1.0180560891279293, "grad_norm": 12.566988945007324, "learning_rate": 8.092160133444538e-06, "loss": 0.4067, "step": 2650 }, { "epoch": 1.0218978102189782, "grad_norm": 18.021800994873047, "learning_rate": 8.081734778982487e-06, "loss": 0.6817, "step": 2660 }, { "epoch": 1.0257395313100268, "grad_norm": 2.4938793182373047, "learning_rate": 8.071309424520434e-06, "loss": 0.4567, "step": 2670 }, { "epoch": 1.0295812524010757, "grad_norm": 19.75528335571289, "learning_rate": 8.060884070058382e-06, "loss": 0.5433, "step": 2680 }, { "epoch": 1.0334229734921245, "grad_norm": 20.85611343383789, "learning_rate": 8.050458715596331e-06, "loss": 0.4946, "step": 2690 }, { "epoch": 1.0372646945831732, "grad_norm": 11.405384063720703, "learning_rate": 8.040033361134279e-06, "loss": 0.4871, "step": 2700 }, { "epoch": 1.041106415674222, "grad_norm": 20.596933364868164, "learning_rate": 8.029608006672226e-06, "loss": 0.5361, "step": 2710 }, { "epoch": 1.0449481367652709, "grad_norm": 19.30833625793457, "learning_rate": 8.019182652210176e-06, "loss": 0.5362, "step": 2720 }, { "epoch": 1.0487898578563197, "grad_norm": 29.342666625976562, "learning_rate": 8.008757297748125e-06, "loss": 0.4175, "step": 2730 }, { "epoch": 1.0526315789473684, "grad_norm": 31.298583984375, "learning_rate": 7.998331943286072e-06, "loss": 0.4556, "step": 2740 }, { "epoch": 1.0564733000384172, "grad_norm": 22.037307739257812, "learning_rate": 7.98790658882402e-06, "loss": 0.4966, "step": 2750 }, { "epoch": 1.060315021129466, "grad_norm": 29.734373092651367, "learning_rate": 7.977481234361969e-06, "loss": 0.4146, "step": 2760 }, { "epoch": 1.0641567422205147, "grad_norm": 26.209257125854492, "learning_rate": 7.967055879899918e-06, "loss": 0.5298, "step": 2770 }, { "epoch": 1.0679984633115636, "grad_norm": 28.883424758911133, "learning_rate": 7.956630525437866e-06, "loss": 0.4872, "step": 2780 }, { "epoch": 1.0718401844026124, "grad_norm": 20.605607986450195, "learning_rate": 7.946205170975813e-06, "loss": 0.3929, "step": 2790 }, { "epoch": 1.0756819054936613, "grad_norm": 4.712803363800049, "learning_rate": 7.935779816513763e-06, "loss": 0.429, "step": 2800 }, { "epoch": 1.07952362658471, "grad_norm": 24.59100341796875, "learning_rate": 7.92535446205171e-06, "loss": 0.4387, "step": 2810 }, { "epoch": 1.0833653476757588, "grad_norm": 41.113101959228516, "learning_rate": 7.914929107589658e-06, "loss": 0.3661, "step": 2820 }, { "epoch": 1.0872070687668076, "grad_norm": 19.407039642333984, "learning_rate": 7.904503753127607e-06, "loss": 0.3685, "step": 2830 }, { "epoch": 1.0910487898578562, "grad_norm": 17.455015182495117, "learning_rate": 7.894078398665556e-06, "loss": 0.3471, "step": 2840 }, { "epoch": 1.094890510948905, "grad_norm": 31.661861419677734, "learning_rate": 7.883653044203504e-06, "loss": 0.4217, "step": 2850 }, { "epoch": 1.098732232039954, "grad_norm": 25.433256149291992, "learning_rate": 7.873227689741451e-06, "loss": 0.563, "step": 2860 }, { "epoch": 1.1025739531310026, "grad_norm": 17.527708053588867, "learning_rate": 7.8628023352794e-06, "loss": 0.6654, "step": 2870 }, { "epoch": 1.1064156742220514, "grad_norm": 16.530296325683594, "learning_rate": 7.852376980817348e-06, "loss": 0.397, "step": 2880 }, { "epoch": 1.1102573953131003, "grad_norm": 20.510169982910156, "learning_rate": 7.841951626355296e-06, "loss": 0.5602, "step": 2890 }, { "epoch": 1.1140991164041492, "grad_norm": 17.688804626464844, "learning_rate": 7.831526271893245e-06, "loss": 0.4872, "step": 2900 }, { "epoch": 1.1179408374951978, "grad_norm": 14.219194412231445, "learning_rate": 7.821100917431194e-06, "loss": 0.4509, "step": 2910 }, { "epoch": 1.1217825585862466, "grad_norm": 19.561573028564453, "learning_rate": 7.810675562969142e-06, "loss": 0.516, "step": 2920 }, { "epoch": 1.1256242796772955, "grad_norm": 4.555116176605225, "learning_rate": 7.80025020850709e-06, "loss": 0.3659, "step": 2930 }, { "epoch": 1.1294660007683441, "grad_norm": 21.258682250976562, "learning_rate": 7.789824854045039e-06, "loss": 0.7407, "step": 2940 }, { "epoch": 1.133307721859393, "grad_norm": 23.416915893554688, "learning_rate": 7.779399499582986e-06, "loss": 0.5668, "step": 2950 }, { "epoch": 1.1371494429504418, "grad_norm": 15.231550216674805, "learning_rate": 7.768974145120934e-06, "loss": 0.7209, "step": 2960 }, { "epoch": 1.1409911640414907, "grad_norm": 13.552851676940918, "learning_rate": 7.758548790658883e-06, "loss": 0.3603, "step": 2970 }, { "epoch": 1.1448328851325393, "grad_norm": 13.771663665771484, "learning_rate": 7.748123436196832e-06, "loss": 0.4201, "step": 2980 }, { "epoch": 1.1486746062235882, "grad_norm": 10.52929973602295, "learning_rate": 7.73769808173478e-06, "loss": 0.4927, "step": 2990 }, { "epoch": 1.152516327314637, "grad_norm": 10.99349594116211, "learning_rate": 7.727272727272727e-06, "loss": 0.4471, "step": 3000 }, { "epoch": 1.1563580484056857, "grad_norm": 28.14784812927246, "learning_rate": 7.716847372810676e-06, "loss": 0.4885, "step": 3010 }, { "epoch": 1.1601997694967345, "grad_norm": 14.829123497009277, "learning_rate": 7.706422018348626e-06, "loss": 0.4161, "step": 3020 }, { "epoch": 1.1640414905877834, "grad_norm": 16.16433334350586, "learning_rate": 7.695996663886573e-06, "loss": 0.5096, "step": 3030 }, { "epoch": 1.167883211678832, "grad_norm": 22.093318939208984, "learning_rate": 7.68557130942452e-06, "loss": 0.4168, "step": 3040 }, { "epoch": 1.1717249327698809, "grad_norm": 13.417600631713867, "learning_rate": 7.67514595496247e-06, "loss": 0.5085, "step": 3050 }, { "epoch": 1.1755666538609297, "grad_norm": 18.72173309326172, "learning_rate": 7.664720600500418e-06, "loss": 0.3777, "step": 3060 }, { "epoch": 1.1794083749519784, "grad_norm": 30.103759765625, "learning_rate": 7.654295246038365e-06, "loss": 0.6035, "step": 3070 }, { "epoch": 1.1832500960430272, "grad_norm": 2.8469719886779785, "learning_rate": 7.643869891576314e-06, "loss": 0.2965, "step": 3080 }, { "epoch": 1.187091817134076, "grad_norm": 20.917043685913086, "learning_rate": 7.633444537114264e-06, "loss": 0.3834, "step": 3090 }, { "epoch": 1.190933538225125, "grad_norm": 30.07198715209961, "learning_rate": 7.62301918265221e-06, "loss": 0.4808, "step": 3100 }, { "epoch": 1.1947752593161736, "grad_norm": 17.897123336791992, "learning_rate": 7.612593828190159e-06, "loss": 0.5116, "step": 3110 }, { "epoch": 1.1986169804072224, "grad_norm": 20.788223266601562, "learning_rate": 7.602168473728108e-06, "loss": 0.5371, "step": 3120 }, { "epoch": 1.2024587014982713, "grad_norm": 17.470685958862305, "learning_rate": 7.5917431192660555e-06, "loss": 0.6223, "step": 3130 }, { "epoch": 1.2063004225893201, "grad_norm": 5.824326515197754, "learning_rate": 7.581317764804004e-06, "loss": 0.3919, "step": 3140 }, { "epoch": 1.2101421436803688, "grad_norm": 26.392175674438477, "learning_rate": 7.570892410341952e-06, "loss": 0.499, "step": 3150 }, { "epoch": 1.2139838647714176, "grad_norm": 8.42440414428711, "learning_rate": 7.560467055879901e-06, "loss": 0.3343, "step": 3160 }, { "epoch": 1.2178255858624665, "grad_norm": 10.132403373718262, "learning_rate": 7.550041701417848e-06, "loss": 0.5502, "step": 3170 }, { "epoch": 1.221667306953515, "grad_norm": 8.304975509643555, "learning_rate": 7.539616346955797e-06, "loss": 0.5273, "step": 3180 }, { "epoch": 1.225509028044564, "grad_norm": 8.726995468139648, "learning_rate": 7.529190992493746e-06, "loss": 0.5491, "step": 3190 }, { "epoch": 1.2293507491356128, "grad_norm": 21.480060577392578, "learning_rate": 7.518765638031694e-06, "loss": 0.4478, "step": 3200 }, { "epoch": 1.2331924702266615, "grad_norm": 19.07476806640625, "learning_rate": 7.508340283569642e-06, "loss": 0.6649, "step": 3210 }, { "epoch": 1.2370341913177103, "grad_norm": 22.936288833618164, "learning_rate": 7.49791492910759e-06, "loss": 0.5483, "step": 3220 }, { "epoch": 1.2408759124087592, "grad_norm": 11.86825942993164, "learning_rate": 7.487489574645539e-06, "loss": 0.5724, "step": 3230 }, { "epoch": 1.2447176334998078, "grad_norm": 3.569042921066284, "learning_rate": 7.477064220183486e-06, "loss": 0.2133, "step": 3240 }, { "epoch": 1.2485593545908567, "grad_norm": 21.424320220947266, "learning_rate": 7.466638865721435e-06, "loss": 0.7125, "step": 3250 }, { "epoch": 1.2524010756819055, "grad_norm": 10.503767967224121, "learning_rate": 7.456213511259384e-06, "loss": 0.3605, "step": 3260 }, { "epoch": 1.2562427967729544, "grad_norm": 14.187332153320312, "learning_rate": 7.445788156797332e-06, "loss": 0.337, "step": 3270 }, { "epoch": 1.260084517864003, "grad_norm": 39.91868591308594, "learning_rate": 7.43536280233528e-06, "loss": 0.6577, "step": 3280 }, { "epoch": 1.2639262389550519, "grad_norm": 22.75690269470215, "learning_rate": 7.424937447873228e-06, "loss": 0.514, "step": 3290 }, { "epoch": 1.2677679600461007, "grad_norm": 7.429053783416748, "learning_rate": 7.414512093411177e-06, "loss": 0.4178, "step": 3300 }, { "epoch": 1.2716096811371496, "grad_norm": 33.08567428588867, "learning_rate": 7.404086738949124e-06, "loss": 0.5662, "step": 3310 }, { "epoch": 1.2754514022281982, "grad_norm": 3.4354841709136963, "learning_rate": 7.393661384487073e-06, "loss": 0.3871, "step": 3320 }, { "epoch": 1.279293123319247, "grad_norm": 7.866048336029053, "learning_rate": 7.383236030025022e-06, "loss": 0.3705, "step": 3330 }, { "epoch": 1.283134844410296, "grad_norm": 21.70540428161621, "learning_rate": 7.37281067556297e-06, "loss": 0.4563, "step": 3340 }, { "epoch": 1.2869765655013445, "grad_norm": 8.615718841552734, "learning_rate": 7.362385321100918e-06, "loss": 0.4756, "step": 3350 }, { "epoch": 1.2908182865923934, "grad_norm": 7.302259922027588, "learning_rate": 7.351959966638866e-06, "loss": 0.4554, "step": 3360 }, { "epoch": 1.2946600076834422, "grad_norm": 22.20801544189453, "learning_rate": 7.341534612176815e-06, "loss": 0.5313, "step": 3370 }, { "epoch": 1.2985017287744909, "grad_norm": 30.15619468688965, "learning_rate": 7.331109257714763e-06, "loss": 0.496, "step": 3380 }, { "epoch": 1.3023434498655397, "grad_norm": 14.92813491821289, "learning_rate": 7.320683903252711e-06, "loss": 0.3749, "step": 3390 }, { "epoch": 1.3061851709565886, "grad_norm": 29.67280387878418, "learning_rate": 7.31025854879066e-06, "loss": 0.6701, "step": 3400 }, { "epoch": 1.3100268920476372, "grad_norm": 15.611763954162598, "learning_rate": 7.299833194328608e-06, "loss": 0.4524, "step": 3410 }, { "epoch": 1.313868613138686, "grad_norm": 26.292102813720703, "learning_rate": 7.2894078398665556e-06, "loss": 0.4924, "step": 3420 }, { "epoch": 1.317710334229735, "grad_norm": 15.659175872802734, "learning_rate": 7.278982485404504e-06, "loss": 0.6496, "step": 3430 }, { "epoch": 1.3215520553207838, "grad_norm": 14.91651725769043, "learning_rate": 7.268557130942453e-06, "loss": 0.4623, "step": 3440 }, { "epoch": 1.3253937764118324, "grad_norm": 31.96514892578125, "learning_rate": 7.258131776480401e-06, "loss": 0.6592, "step": 3450 }, { "epoch": 1.3292354975028813, "grad_norm": 21.0323543548584, "learning_rate": 7.247706422018349e-06, "loss": 0.67, "step": 3460 }, { "epoch": 1.3330772185939301, "grad_norm": 8.115620613098145, "learning_rate": 7.2372810675562975e-06, "loss": 0.3956, "step": 3470 }, { "epoch": 1.336918939684979, "grad_norm": 5.198733329772949, "learning_rate": 7.226855713094246e-06, "loss": 0.3424, "step": 3480 }, { "epoch": 1.3407606607760276, "grad_norm": 3.520685911178589, "learning_rate": 7.2164303586321935e-06, "loss": 0.5398, "step": 3490 }, { "epoch": 1.3446023818670765, "grad_norm": 17.651782989501953, "learning_rate": 7.206005004170143e-06, "loss": 0.5158, "step": 3500 }, { "epoch": 1.3484441029581253, "grad_norm": 6.332894325256348, "learning_rate": 7.195579649708091e-06, "loss": 0.396, "step": 3510 }, { "epoch": 1.352285824049174, "grad_norm": 17.260141372680664, "learning_rate": 7.185154295246039e-06, "loss": 0.6168, "step": 3520 }, { "epoch": 1.3561275451402228, "grad_norm": 13.815728187561035, "learning_rate": 7.174728940783987e-06, "loss": 0.3849, "step": 3530 }, { "epoch": 1.3599692662312717, "grad_norm": 64.78901672363281, "learning_rate": 7.1643035863219355e-06, "loss": 0.6198, "step": 3540 }, { "epoch": 1.3638109873223203, "grad_norm": 18.190109252929688, "learning_rate": 7.153878231859885e-06, "loss": 0.5692, "step": 3550 }, { "epoch": 1.3676527084133692, "grad_norm": 21.627002716064453, "learning_rate": 7.143452877397831e-06, "loss": 0.5806, "step": 3560 }, { "epoch": 1.371494429504418, "grad_norm": 13.441298484802246, "learning_rate": 7.133027522935781e-06, "loss": 0.5592, "step": 3570 }, { "epoch": 1.3753361505954667, "grad_norm": 19.517723083496094, "learning_rate": 7.122602168473729e-06, "loss": 0.4344, "step": 3580 }, { "epoch": 1.3791778716865155, "grad_norm": 16.326400756835938, "learning_rate": 7.112176814011677e-06, "loss": 0.4496, "step": 3590 }, { "epoch": 1.3830195927775644, "grad_norm": 7.95460844039917, "learning_rate": 7.101751459549625e-06, "loss": 0.2619, "step": 3600 }, { "epoch": 1.3868613138686132, "grad_norm": 23.37911033630371, "learning_rate": 7.091326105087573e-06, "loss": 0.467, "step": 3610 }, { "epoch": 1.3907030349596619, "grad_norm": 13.212058067321777, "learning_rate": 7.080900750625523e-06, "loss": 0.4576, "step": 3620 }, { "epoch": 1.3945447560507107, "grad_norm": 21.34543800354004, "learning_rate": 7.07047539616347e-06, "loss": 0.5783, "step": 3630 }, { "epoch": 1.3983864771417596, "grad_norm": 20.114788055419922, "learning_rate": 7.0600500417014186e-06, "loss": 0.5636, "step": 3640 }, { "epoch": 1.4022281982328084, "grad_norm": 2.998847484588623, "learning_rate": 7.049624687239367e-06, "loss": 0.4757, "step": 3650 }, { "epoch": 1.406069919323857, "grad_norm": 11.517350196838379, "learning_rate": 7.0391993327773145e-06, "loss": 0.3284, "step": 3660 }, { "epoch": 1.409911640414906, "grad_norm": 14.91612434387207, "learning_rate": 7.028773978315263e-06, "loss": 0.4234, "step": 3670 }, { "epoch": 1.4137533615059548, "grad_norm": 20.586612701416016, "learning_rate": 7.018348623853211e-06, "loss": 0.5065, "step": 3680 }, { "epoch": 1.4175950825970034, "grad_norm": 23.526439666748047, "learning_rate": 7.0079232693911605e-06, "loss": 0.6208, "step": 3690 }, { "epoch": 1.4214368036880523, "grad_norm": 27.38988494873047, "learning_rate": 6.997497914929108e-06, "loss": 0.4807, "step": 3700 }, { "epoch": 1.425278524779101, "grad_norm": 4.145619869232178, "learning_rate": 6.9870725604670565e-06, "loss": 0.3003, "step": 3710 }, { "epoch": 1.4291202458701497, "grad_norm": 7.592724323272705, "learning_rate": 6.976647206005005e-06, "loss": 0.7445, "step": 3720 }, { "epoch": 1.4329619669611986, "grad_norm": 7.246058940887451, "learning_rate": 6.9662218515429524e-06, "loss": 0.4186, "step": 3730 }, { "epoch": 1.4368036880522475, "grad_norm": 15.480023384094238, "learning_rate": 6.955796497080901e-06, "loss": 0.6303, "step": 3740 }, { "epoch": 1.440645409143296, "grad_norm": 33.452980041503906, "learning_rate": 6.94537114261885e-06, "loss": 0.3228, "step": 3750 }, { "epoch": 1.444487130234345, "grad_norm": 9.024140357971191, "learning_rate": 6.9349457881567985e-06, "loss": 0.5242, "step": 3760 }, { "epoch": 1.4483288513253938, "grad_norm": 34.077571868896484, "learning_rate": 6.924520433694746e-06, "loss": 0.5266, "step": 3770 }, { "epoch": 1.4521705724164424, "grad_norm": 23.063976287841797, "learning_rate": 6.914095079232694e-06, "loss": 0.4766, "step": 3780 }, { "epoch": 1.4560122935074913, "grad_norm": 29.312820434570312, "learning_rate": 6.903669724770643e-06, "loss": 0.4143, "step": 3790 }, { "epoch": 1.4598540145985401, "grad_norm": 31.113893508911133, "learning_rate": 6.89324437030859e-06, "loss": 0.805, "step": 3800 }, { "epoch": 1.463695735689589, "grad_norm": 8.02818489074707, "learning_rate": 6.882819015846539e-06, "loss": 0.5619, "step": 3810 }, { "epoch": 1.4675374567806379, "grad_norm": 25.848047256469727, "learning_rate": 6.872393661384488e-06, "loss": 0.542, "step": 3820 }, { "epoch": 1.4713791778716865, "grad_norm": 17.603303909301758, "learning_rate": 6.861968306922436e-06, "loss": 0.5158, "step": 3830 }, { "epoch": 1.4752208989627353, "grad_norm": 5.893566608428955, "learning_rate": 6.851542952460384e-06, "loss": 0.4472, "step": 3840 }, { "epoch": 1.4790626200537842, "grad_norm": 25.97250747680664, "learning_rate": 6.841117597998332e-06, "loss": 0.416, "step": 3850 }, { "epoch": 1.4829043411448328, "grad_norm": 13.01366901397705, "learning_rate": 6.830692243536281e-06, "loss": 0.4919, "step": 3860 }, { "epoch": 1.4867460622358817, "grad_norm": 16.056318283081055, "learning_rate": 6.82026688907423e-06, "loss": 0.3523, "step": 3870 }, { "epoch": 1.4905877833269305, "grad_norm": 28.603076934814453, "learning_rate": 6.8098415346121775e-06, "loss": 0.3091, "step": 3880 }, { "epoch": 1.4944295044179792, "grad_norm": 30.29176902770996, "learning_rate": 6.799416180150126e-06, "loss": 0.4874, "step": 3890 }, { "epoch": 1.498271225509028, "grad_norm": 22.889122009277344, "learning_rate": 6.788990825688074e-06, "loss": 0.5437, "step": 3900 }, { "epoch": 1.5021129466000769, "grad_norm": 7.505212306976318, "learning_rate": 6.778565471226022e-06, "loss": 0.4872, "step": 3910 }, { "epoch": 1.5059546676911255, "grad_norm": 23.37257194519043, "learning_rate": 6.76814011676397e-06, "loss": 0.3906, "step": 3920 }, { "epoch": 1.5097963887821744, "grad_norm": 9.461624145507812, "learning_rate": 6.757714762301919e-06, "loss": 0.4099, "step": 3930 }, { "epoch": 1.5136381098732232, "grad_norm": 21.49395751953125, "learning_rate": 6.747289407839868e-06, "loss": 0.5254, "step": 3940 }, { "epoch": 1.5174798309642719, "grad_norm": 17.44995880126953, "learning_rate": 6.736864053377815e-06, "loss": 0.4447, "step": 3950 }, { "epoch": 1.521321552055321, "grad_norm": 11.34809398651123, "learning_rate": 6.726438698915764e-06, "loss": 0.5709, "step": 3960 }, { "epoch": 1.5251632731463696, "grad_norm": 18.853090286254883, "learning_rate": 6.716013344453712e-06, "loss": 0.34, "step": 3970 }, { "epoch": 1.5290049942374182, "grad_norm": 42.40155792236328, "learning_rate": 6.70558798999166e-06, "loss": 0.4124, "step": 3980 }, { "epoch": 1.5328467153284673, "grad_norm": 16.232521057128906, "learning_rate": 6.695162635529608e-06, "loss": 0.5542, "step": 3990 }, { "epoch": 1.536688436419516, "grad_norm": 3.611929178237915, "learning_rate": 6.684737281067557e-06, "loss": 0.4139, "step": 4000 }, { "epoch": 1.5405301575105648, "grad_norm": 10.575961112976074, "learning_rate": 6.674311926605506e-06, "loss": 0.4181, "step": 4010 }, { "epoch": 1.5443718786016136, "grad_norm": 12.085956573486328, "learning_rate": 6.663886572143453e-06, "loss": 0.3825, "step": 4020 }, { "epoch": 1.5482135996926623, "grad_norm": 20.601011276245117, "learning_rate": 6.653461217681402e-06, "loss": 0.3923, "step": 4030 }, { "epoch": 1.5520553207837111, "grad_norm": 9.62112808227539, "learning_rate": 6.64303586321935e-06, "loss": 0.3697, "step": 4040 }, { "epoch": 1.55589704187476, "grad_norm": 34.249290466308594, "learning_rate": 6.632610508757298e-06, "loss": 0.5826, "step": 4050 }, { "epoch": 1.5597387629658086, "grad_norm": 16.832563400268555, "learning_rate": 6.622185154295246e-06, "loss": 0.52, "step": 4060 }, { "epoch": 1.5635804840568575, "grad_norm": 33.596351623535156, "learning_rate": 6.611759799833195e-06, "loss": 0.4241, "step": 4070 }, { "epoch": 1.5674222051479063, "grad_norm": 7.013982772827148, "learning_rate": 6.601334445371144e-06, "loss": 0.46, "step": 4080 }, { "epoch": 1.571263926238955, "grad_norm": 24.022945404052734, "learning_rate": 6.590909090909091e-06, "loss": 0.4563, "step": 4090 }, { "epoch": 1.5751056473300038, "grad_norm": 7.977485656738281, "learning_rate": 6.58048373644704e-06, "loss": 0.4118, "step": 4100 }, { "epoch": 1.5789473684210527, "grad_norm": 15.87927532196045, "learning_rate": 6.570058381984988e-06, "loss": 0.5734, "step": 4110 }, { "epoch": 1.5827890895121013, "grad_norm": 18.238597869873047, "learning_rate": 6.559633027522936e-06, "loss": 0.5423, "step": 4120 }, { "epoch": 1.5866308106031504, "grad_norm": 12.554328918457031, "learning_rate": 6.549207673060885e-06, "loss": 0.3568, "step": 4130 }, { "epoch": 1.590472531694199, "grad_norm": 12.9606294631958, "learning_rate": 6.538782318598833e-06, "loss": 0.5037, "step": 4140 }, { "epoch": 1.5943142527852476, "grad_norm": 15.28049373626709, "learning_rate": 6.528356964136782e-06, "loss": 0.5114, "step": 4150 }, { "epoch": 1.5981559738762967, "grad_norm": 22.42987823486328, "learning_rate": 6.517931609674729e-06, "loss": 0.5374, "step": 4160 }, { "epoch": 1.6019976949673453, "grad_norm": 36.76483917236328, "learning_rate": 6.5075062552126776e-06, "loss": 0.5011, "step": 4170 }, { "epoch": 1.6058394160583942, "grad_norm": 22.071678161621094, "learning_rate": 6.497080900750626e-06, "loss": 0.4214, "step": 4180 }, { "epoch": 1.609681137149443, "grad_norm": 2.2837672233581543, "learning_rate": 6.4866555462885735e-06, "loss": 0.4492, "step": 4190 }, { "epoch": 1.6135228582404917, "grad_norm": 16.134366989135742, "learning_rate": 6.476230191826523e-06, "loss": 0.5131, "step": 4200 }, { "epoch": 1.6173645793315405, "grad_norm": 14.408499717712402, "learning_rate": 6.465804837364471e-06, "loss": 0.6302, "step": 4210 }, { "epoch": 1.6212063004225894, "grad_norm": 3.4417994022369385, "learning_rate": 6.4553794829024195e-06, "loss": 0.5384, "step": 4220 }, { "epoch": 1.625048021513638, "grad_norm": 11.830604553222656, "learning_rate": 6.444954128440367e-06, "loss": 0.4217, "step": 4230 }, { "epoch": 1.6288897426046869, "grad_norm": 9.047849655151367, "learning_rate": 6.4345287739783155e-06, "loss": 0.3594, "step": 4240 }, { "epoch": 1.6327314636957357, "grad_norm": 7.678354740142822, "learning_rate": 6.424103419516265e-06, "loss": 0.4508, "step": 4250 }, { "epoch": 1.6365731847867844, "grad_norm": 5.697566032409668, "learning_rate": 6.413678065054212e-06, "loss": 0.4601, "step": 4260 }, { "epoch": 1.6404149058778332, "grad_norm": 15.278615951538086, "learning_rate": 6.403252710592161e-06, "loss": 0.4632, "step": 4270 }, { "epoch": 1.644256626968882, "grad_norm": 6.866236209869385, "learning_rate": 6.392827356130109e-06, "loss": 0.6104, "step": 4280 }, { "epoch": 1.6480983480599307, "grad_norm": 42.67068862915039, "learning_rate": 6.3824020016680575e-06, "loss": 0.7307, "step": 4290 }, { "epoch": 1.6519400691509798, "grad_norm": 42.591697692871094, "learning_rate": 6.371976647206005e-06, "loss": 0.4915, "step": 4300 }, { "epoch": 1.6557817902420284, "grad_norm": 33.19038391113281, "learning_rate": 6.361551292743953e-06, "loss": 0.3862, "step": 4310 }, { "epoch": 1.659623511333077, "grad_norm": 21.831031799316406, "learning_rate": 6.351125938281903e-06, "loss": 0.4684, "step": 4320 }, { "epoch": 1.6634652324241261, "grad_norm": 10.749385833740234, "learning_rate": 6.34070058381985e-06, "loss": 0.5235, "step": 4330 }, { "epoch": 1.6673069535151748, "grad_norm": 8.519107818603516, "learning_rate": 6.330275229357799e-06, "loss": 0.4614, "step": 4340 }, { "epoch": 1.6711486746062236, "grad_norm": 10.042859077453613, "learning_rate": 6.319849874895747e-06, "loss": 0.4232, "step": 4350 }, { "epoch": 1.6749903956972725, "grad_norm": 15.31753921508789, "learning_rate": 6.309424520433695e-06, "loss": 0.6633, "step": 4360 }, { "epoch": 1.6788321167883211, "grad_norm": 15.346444129943848, "learning_rate": 6.298999165971643e-06, "loss": 0.4809, "step": 4370 }, { "epoch": 1.68267383787937, "grad_norm": 14.51533031463623, "learning_rate": 6.288573811509592e-06, "loss": 0.3116, "step": 4380 }, { "epoch": 1.6865155589704188, "grad_norm": 10.794851303100586, "learning_rate": 6.2781484570475406e-06, "loss": 0.4442, "step": 4390 }, { "epoch": 1.6903572800614675, "grad_norm": 26.648527145385742, "learning_rate": 6.267723102585488e-06, "loss": 0.4151, "step": 4400 }, { "epoch": 1.6941990011525163, "grad_norm": 14.879584312438965, "learning_rate": 6.2572977481234365e-06, "loss": 0.5536, "step": 4410 }, { "epoch": 1.6980407222435652, "grad_norm": 28.443931579589844, "learning_rate": 6.246872393661385e-06, "loss": 0.4466, "step": 4420 }, { "epoch": 1.7018824433346138, "grad_norm": 22.08489227294922, "learning_rate": 6.236447039199333e-06, "loss": 0.5109, "step": 4430 }, { "epoch": 1.7057241644256627, "grad_norm": 21.044944763183594, "learning_rate": 6.226021684737281e-06, "loss": 0.5316, "step": 4440 }, { "epoch": 1.7095658855167115, "grad_norm": 29.13035774230957, "learning_rate": 6.21559633027523e-06, "loss": 0.4993, "step": 4450 }, { "epoch": 1.7134076066077601, "grad_norm": 11.93812084197998, "learning_rate": 6.2051709758131785e-06, "loss": 0.4563, "step": 4460 }, { "epoch": 1.7172493276988092, "grad_norm": 25.17680549621582, "learning_rate": 6.194745621351126e-06, "loss": 0.4524, "step": 4470 }, { "epoch": 1.7210910487898579, "grad_norm": 11.476607322692871, "learning_rate": 6.1843202668890744e-06, "loss": 0.4841, "step": 4480 }, { "epoch": 1.7249327698809065, "grad_norm": 18.251564025878906, "learning_rate": 6.173894912427023e-06, "loss": 0.513, "step": 4490 }, { "epoch": 1.7287744909719556, "grad_norm": 19.187213897705078, "learning_rate": 6.163469557964972e-06, "loss": 0.5729, "step": 4500 }, { "epoch": 1.7326162120630042, "grad_norm": 6.800131320953369, "learning_rate": 6.15304420350292e-06, "loss": 0.4997, "step": 4510 }, { "epoch": 1.736457933154053, "grad_norm": 4.658173561096191, "learning_rate": 6.142618849040868e-06, "loss": 0.6002, "step": 4520 }, { "epoch": 1.740299654245102, "grad_norm": 16.352256774902344, "learning_rate": 6.132193494578816e-06, "loss": 0.4366, "step": 4530 }, { "epoch": 1.7441413753361505, "grad_norm": 15.1599760055542, "learning_rate": 6.121768140116765e-06, "loss": 0.4287, "step": 4540 }, { "epoch": 1.7479830964271994, "grad_norm": 7.974790573120117, "learning_rate": 6.111342785654712e-06, "loss": 0.4333, "step": 4550 }, { "epoch": 1.7518248175182483, "grad_norm": 16.976285934448242, "learning_rate": 6.100917431192661e-06, "loss": 0.3914, "step": 4560 }, { "epoch": 1.755666538609297, "grad_norm": 3.174334764480591, "learning_rate": 6.09049207673061e-06, "loss": 0.3679, "step": 4570 }, { "epoch": 1.7595082597003457, "grad_norm": 32.927494049072266, "learning_rate": 6.0800667222685575e-06, "loss": 0.6631, "step": 4580 }, { "epoch": 1.7633499807913946, "grad_norm": 10.811646461486816, "learning_rate": 6.069641367806506e-06, "loss": 0.5384, "step": 4590 }, { "epoch": 1.7671917018824432, "grad_norm": 18.781339645385742, "learning_rate": 6.059216013344454e-06, "loss": 0.5194, "step": 4600 }, { "epoch": 1.771033422973492, "grad_norm": 21.202974319458008, "learning_rate": 6.048790658882403e-06, "loss": 0.5962, "step": 4610 }, { "epoch": 1.774875144064541, "grad_norm": 15.951532363891602, "learning_rate": 6.03836530442035e-06, "loss": 0.515, "step": 4620 }, { "epoch": 1.7787168651555896, "grad_norm": 21.22430419921875, "learning_rate": 6.0279399499582995e-06, "loss": 0.5223, "step": 4630 }, { "epoch": 1.7825585862466387, "grad_norm": 15.455586433410645, "learning_rate": 6.017514595496248e-06, "loss": 0.526, "step": 4640 }, { "epoch": 1.7864003073376873, "grad_norm": 7.954679012298584, "learning_rate": 6.0070892410341954e-06, "loss": 0.528, "step": 4650 }, { "epoch": 1.790242028428736, "grad_norm": 10.174606323242188, "learning_rate": 5.996663886572144e-06, "loss": 0.4072, "step": 4660 }, { "epoch": 1.794083749519785, "grad_norm": 30.961978912353516, "learning_rate": 5.986238532110092e-06, "loss": 0.4113, "step": 4670 }, { "epoch": 1.7979254706108336, "grad_norm": 20.904510498046875, "learning_rate": 5.975813177648041e-06, "loss": 0.5494, "step": 4680 }, { "epoch": 1.8017671917018825, "grad_norm": 32.23243713378906, "learning_rate": 5.965387823185988e-06, "loss": 0.65, "step": 4690 }, { "epoch": 1.8056089127929313, "grad_norm": 9.392688751220703, "learning_rate": 5.954962468723937e-06, "loss": 0.4458, "step": 4700 }, { "epoch": 1.80945063388398, "grad_norm": 23.203550338745117, "learning_rate": 5.944537114261886e-06, "loss": 0.3476, "step": 4710 }, { "epoch": 1.8132923549750288, "grad_norm": 21.198944091796875, "learning_rate": 5.934111759799833e-06, "loss": 0.4905, "step": 4720 }, { "epoch": 1.8171340760660777, "grad_norm": 7.847315788269043, "learning_rate": 5.923686405337782e-06, "loss": 0.2561, "step": 4730 }, { "epoch": 1.8209757971571263, "grad_norm": 19.153392791748047, "learning_rate": 5.91326105087573e-06, "loss": 0.5025, "step": 4740 }, { "epoch": 1.8248175182481752, "grad_norm": 13.056893348693848, "learning_rate": 5.902835696413679e-06, "loss": 0.6548, "step": 4750 }, { "epoch": 1.828659239339224, "grad_norm": 6.967535972595215, "learning_rate": 5.892410341951627e-06, "loss": 0.7137, "step": 4760 }, { "epoch": 1.8325009604302727, "grad_norm": 15.452996253967285, "learning_rate": 5.881984987489575e-06, "loss": 0.4757, "step": 4770 }, { "epoch": 1.8363426815213215, "grad_norm": 26.09940528869629, "learning_rate": 5.871559633027524e-06, "loss": 0.3872, "step": 4780 }, { "epoch": 1.8401844026123704, "grad_norm": 13.307591438293457, "learning_rate": 5.861134278565471e-06, "loss": 0.3825, "step": 4790 }, { "epoch": 1.844026123703419, "grad_norm": 11.655804634094238, "learning_rate": 5.85070892410342e-06, "loss": 0.3827, "step": 4800 }, { "epoch": 1.847867844794468, "grad_norm": 30.40113067626953, "learning_rate": 5.840283569641368e-06, "loss": 0.4702, "step": 4810 }, { "epoch": 1.8517095658855167, "grad_norm": 35.177120208740234, "learning_rate": 5.829858215179317e-06, "loss": 0.3508, "step": 4820 }, { "epoch": 1.8555512869765654, "grad_norm": 22.448490142822266, "learning_rate": 5.819432860717265e-06, "loss": 0.5955, "step": 4830 }, { "epoch": 1.8593930080676144, "grad_norm": 12.29417610168457, "learning_rate": 5.809007506255213e-06, "loss": 0.382, "step": 4840 }, { "epoch": 1.863234729158663, "grad_norm": 49.52952194213867, "learning_rate": 5.798582151793162e-06, "loss": 0.519, "step": 4850 }, { "epoch": 1.867076450249712, "grad_norm": 11.148819923400879, "learning_rate": 5.788156797331109e-06, "loss": 0.568, "step": 4860 }, { "epoch": 1.8709181713407608, "grad_norm": 5.599549770355225, "learning_rate": 5.777731442869058e-06, "loss": 0.5568, "step": 4870 }, { "epoch": 1.8747598924318094, "grad_norm": 10.196747779846191, "learning_rate": 5.767306088407007e-06, "loss": 0.4946, "step": 4880 }, { "epoch": 1.8786016135228583, "grad_norm": 32.05455017089844, "learning_rate": 5.756880733944955e-06, "loss": 0.4545, "step": 4890 }, { "epoch": 1.8824433346139071, "grad_norm": 10.12232494354248, "learning_rate": 5.746455379482903e-06, "loss": 0.534, "step": 4900 }, { "epoch": 1.8862850557049557, "grad_norm": 2.4176321029663086, "learning_rate": 5.736030025020851e-06, "loss": 0.4063, "step": 4910 }, { "epoch": 1.8901267767960046, "grad_norm": 10.467967987060547, "learning_rate": 5.7256046705587996e-06, "loss": 0.5102, "step": 4920 }, { "epoch": 1.8939684978870535, "grad_norm": 13.962821006774902, "learning_rate": 5.715179316096747e-06, "loss": 0.5077, "step": 4930 }, { "epoch": 1.897810218978102, "grad_norm": 5.787616729736328, "learning_rate": 5.7047539616346955e-06, "loss": 0.4232, "step": 4940 }, { "epoch": 1.901651940069151, "grad_norm": 17.215404510498047, "learning_rate": 5.694328607172645e-06, "loss": 0.5902, "step": 4950 }, { "epoch": 1.9054936611601998, "grad_norm": 11.960589408874512, "learning_rate": 5.683903252710593e-06, "loss": 0.4809, "step": 4960 }, { "epoch": 1.9093353822512484, "grad_norm": 25.745502471923828, "learning_rate": 5.673477898248541e-06, "loss": 0.4995, "step": 4970 }, { "epoch": 1.9131771033422975, "grad_norm": 10.54256534576416, "learning_rate": 5.663052543786489e-06, "loss": 0.3592, "step": 4980 }, { "epoch": 1.9170188244333461, "grad_norm": 19.720035552978516, "learning_rate": 5.6526271893244375e-06, "loss": 0.3611, "step": 4990 }, { "epoch": 1.9208605455243948, "grad_norm": 8.2754487991333, "learning_rate": 5.642201834862385e-06, "loss": 0.3646, "step": 5000 }, { "epoch": 1.9247022666154439, "grad_norm": 16.336483001708984, "learning_rate": 5.631776480400334e-06, "loss": 0.6616, "step": 5010 }, { "epoch": 1.9285439877064925, "grad_norm": 24.838665008544922, "learning_rate": 5.621351125938283e-06, "loss": 0.529, "step": 5020 }, { "epoch": 1.9323857087975413, "grad_norm": 16.165803909301758, "learning_rate": 5.610925771476231e-06, "loss": 0.5018, "step": 5030 }, { "epoch": 1.9362274298885902, "grad_norm": 12.195728302001953, "learning_rate": 5.600500417014179e-06, "loss": 0.4034, "step": 5040 }, { "epoch": 1.9400691509796388, "grad_norm": 16.773571014404297, "learning_rate": 5.590075062552127e-06, "loss": 0.5083, "step": 5050 }, { "epoch": 1.9439108720706877, "grad_norm": 22.202640533447266, "learning_rate": 5.579649708090075e-06, "loss": 0.6077, "step": 5060 }, { "epoch": 1.9477525931617365, "grad_norm": 14.229264259338379, "learning_rate": 5.569224353628023e-06, "loss": 0.3105, "step": 5070 }, { "epoch": 1.9515943142527852, "grad_norm": 13.377602577209473, "learning_rate": 5.558798999165972e-06, "loss": 0.4201, "step": 5080 }, { "epoch": 1.955436035343834, "grad_norm": 3.7054190635681152, "learning_rate": 5.548373644703921e-06, "loss": 0.3833, "step": 5090 }, { "epoch": 1.9592777564348829, "grad_norm": 1.4279061555862427, "learning_rate": 5.537948290241869e-06, "loss": 0.2661, "step": 5100 }, { "epoch": 1.9631194775259315, "grad_norm": 13.63090705871582, "learning_rate": 5.5275229357798165e-06, "loss": 0.559, "step": 5110 }, { "epoch": 1.9669611986169804, "grad_norm": 5.49478816986084, "learning_rate": 5.517097581317765e-06, "loss": 0.5809, "step": 5120 }, { "epoch": 1.9708029197080292, "grad_norm": 14.8590726852417, "learning_rate": 5.506672226855714e-06, "loss": 0.3949, "step": 5130 }, { "epoch": 1.9746446407990779, "grad_norm": 24.46096420288086, "learning_rate": 5.496246872393661e-06, "loss": 0.5104, "step": 5140 }, { "epoch": 1.978486361890127, "grad_norm": 18.829071044921875, "learning_rate": 5.48582151793161e-06, "loss": 0.5408, "step": 5150 }, { "epoch": 1.9823280829811756, "grad_norm": 12.263307571411133, "learning_rate": 5.4753961634695585e-06, "loss": 0.4514, "step": 5160 }, { "epoch": 1.9861698040722242, "grad_norm": 3.8610880374908447, "learning_rate": 5.464970809007507e-06, "loss": 0.534, "step": 5170 }, { "epoch": 1.9900115251632733, "grad_norm": 14.170186042785645, "learning_rate": 5.4545454545454545e-06, "loss": 0.4005, "step": 5180 }, { "epoch": 1.993853246254322, "grad_norm": 13.322096824645996, "learning_rate": 5.444120100083403e-06, "loss": 0.3869, "step": 5190 }, { "epoch": 1.9976949673453708, "grad_norm": 24.19983673095703, "learning_rate": 5.433694745621352e-06, "loss": 0.6315, "step": 5200 }, { "epoch": 2.0, "eval_accuracy": 0.7710114302180386, "eval_f1_per_label": [ 0.7862362971985384, 0.701864199210777, 0.8301230992034757 ], "eval_f1_weighted": 0.768826861103172, "eval_loss": 0.6022440791130066, "eval_precision_per_label": [ 0.7314447592067989, 0.7556401992382069, 0.8264129181084199 ], "eval_precision_weighted": 0.77194411701186, "eval_recall_per_label": [ 0.8499012508229098, 0.6552337398373984, 0.8338667442537097 ], "eval_recall_weighted": 0.7710114302180386, "eval_runtime": 38.8175, "eval_samples_per_second": 268.204, "eval_steps_per_second": 33.542, "step": 5206 } ], "logging_steps": 10, "max_steps": 10412, "num_input_tokens_seen": 0, "num_train_epochs": 4, "save_steps": 500, "stateful_callbacks": { "EarlyStoppingCallback": { "args": { "early_stopping_patience": 3, "early_stopping_threshold": 0.0 }, "attributes": { "early_stopping_patience_counter": 1 } }, "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 1317340020849300.0, "train_batch_size": 8, "trial_name": null, "trial_params": null }