|
{ |
|
"best_metric": 0.5626052618026733, |
|
"best_model_checkpoint": "/content/drive/MyDrive/tw-roberta-base-sentiment-FT-v2/checkpoint-2603", |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 2603, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00384172109104879, |
|
"grad_norm": 19.648235321044922, |
|
"learning_rate": 1.2195121951219514e-07, |
|
"loss": 0.7091, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00768344218209758, |
|
"grad_norm": 15.138011932373047, |
|
"learning_rate": 2.439024390243903e-07, |
|
"loss": 0.5428, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01152516327314637, |
|
"grad_norm": 15.91871452331543, |
|
"learning_rate": 3.6585365853658536e-07, |
|
"loss": 0.709, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01536688436419516, |
|
"grad_norm": 11.673059463500977, |
|
"learning_rate": 4.878048780487805e-07, |
|
"loss": 0.6269, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01920860545524395, |
|
"grad_norm": 10.827598571777344, |
|
"learning_rate": 6.097560975609757e-07, |
|
"loss": 0.6899, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02305032654629274, |
|
"grad_norm": 29.76123809814453, |
|
"learning_rate": 7.317073170731707e-07, |
|
"loss": 0.7235, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02689204763734153, |
|
"grad_norm": 16.770448684692383, |
|
"learning_rate": 8.53658536585366e-07, |
|
"loss": 0.731, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03073376872839032, |
|
"grad_norm": 17.501832962036133, |
|
"learning_rate": 9.75609756097561e-07, |
|
"loss": 0.7907, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03457548981943911, |
|
"grad_norm": 5.903749465942383, |
|
"learning_rate": 1.0975609756097562e-06, |
|
"loss": 0.6549, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0384172109104879, |
|
"grad_norm": 12.823253631591797, |
|
"learning_rate": 1.2195121951219514e-06, |
|
"loss": 0.6833, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.042258932001536686, |
|
"grad_norm": 14.516304969787598, |
|
"learning_rate": 1.3414634146341465e-06, |
|
"loss": 0.7764, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04610065309258548, |
|
"grad_norm": 17.775850296020508, |
|
"learning_rate": 1.4634146341463414e-06, |
|
"loss": 0.6677, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.049942374183634265, |
|
"grad_norm": 16.348901748657227, |
|
"learning_rate": 1.5853658536585368e-06, |
|
"loss": 0.6708, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05378409527468306, |
|
"grad_norm": 20.729867935180664, |
|
"learning_rate": 1.707317073170732e-06, |
|
"loss": 0.685, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05762581636573185, |
|
"grad_norm": 23.60687255859375, |
|
"learning_rate": 1.8292682926829268e-06, |
|
"loss": 0.7048, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06146753745678064, |
|
"grad_norm": 24.990461349487305, |
|
"learning_rate": 1.951219512195122e-06, |
|
"loss": 0.6849, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06530925854782943, |
|
"grad_norm": 21.754741668701172, |
|
"learning_rate": 2.073170731707317e-06, |
|
"loss": 0.6801, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06915097963887822, |
|
"grad_norm": 19.427799224853516, |
|
"learning_rate": 2.1951219512195125e-06, |
|
"loss": 0.6006, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.072992700729927, |
|
"grad_norm": 11.91465950012207, |
|
"learning_rate": 2.317073170731708e-06, |
|
"loss": 0.6498, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.0768344218209758, |
|
"grad_norm": 24.343521118164062, |
|
"learning_rate": 2.4390243902439027e-06, |
|
"loss": 0.8272, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08067614291202459, |
|
"grad_norm": 11.623435020446777, |
|
"learning_rate": 2.5609756097560977e-06, |
|
"loss": 0.6703, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08451786400307337, |
|
"grad_norm": 8.984451293945312, |
|
"learning_rate": 2.682926829268293e-06, |
|
"loss": 0.6965, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.08835958509412217, |
|
"grad_norm": 26.78940773010254, |
|
"learning_rate": 2.8048780487804884e-06, |
|
"loss": 0.7135, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09220130618517096, |
|
"grad_norm": 17.50589370727539, |
|
"learning_rate": 2.926829268292683e-06, |
|
"loss": 0.6637, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.09604302727621974, |
|
"grad_norm": 13.149524688720703, |
|
"learning_rate": 3.0487804878048782e-06, |
|
"loss": 0.5756, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.09988474836726853, |
|
"grad_norm": 10.727895736694336, |
|
"learning_rate": 3.1707317073170736e-06, |
|
"loss": 0.594, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.10372646945831733, |
|
"grad_norm": 15.031158447265625, |
|
"learning_rate": 3.292682926829269e-06, |
|
"loss": 0.7186, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.10756819054936612, |
|
"grad_norm": 10.520498275756836, |
|
"learning_rate": 3.414634146341464e-06, |
|
"loss": 0.5746, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1114099116404149, |
|
"grad_norm": 19.504133224487305, |
|
"learning_rate": 3.5365853658536588e-06, |
|
"loss": 0.7709, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1152516327314637, |
|
"grad_norm": 12.495412826538086, |
|
"learning_rate": 3.6585365853658537e-06, |
|
"loss": 0.6508, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.11909335382251249, |
|
"grad_norm": 9.115503311157227, |
|
"learning_rate": 3.780487804878049e-06, |
|
"loss": 0.5847, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.12293507491356127, |
|
"grad_norm": 12.583561897277832, |
|
"learning_rate": 3.902439024390244e-06, |
|
"loss": 0.6114, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.12677679600461006, |
|
"grad_norm": 16.474716186523438, |
|
"learning_rate": 4.024390243902439e-06, |
|
"loss": 0.5819, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.13061851709565886, |
|
"grad_norm": 12.282912254333496, |
|
"learning_rate": 4.146341463414634e-06, |
|
"loss": 0.5974, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.13446023818670763, |
|
"grad_norm": 14.967294692993164, |
|
"learning_rate": 4.268292682926829e-06, |
|
"loss": 0.515, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.13830195927775643, |
|
"grad_norm": 22.999889373779297, |
|
"learning_rate": 4.390243902439025e-06, |
|
"loss": 0.8314, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.14214368036880523, |
|
"grad_norm": 13.173623085021973, |
|
"learning_rate": 4.51219512195122e-06, |
|
"loss": 0.7274, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.145985401459854, |
|
"grad_norm": 10.259631156921387, |
|
"learning_rate": 4.634146341463416e-06, |
|
"loss": 0.5758, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1498271225509028, |
|
"grad_norm": 20.109394073486328, |
|
"learning_rate": 4.75609756097561e-06, |
|
"loss": 0.6767, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1536688436419516, |
|
"grad_norm": 12.53744125366211, |
|
"learning_rate": 4.8780487804878055e-06, |
|
"loss": 0.6554, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.15751056473300037, |
|
"grad_norm": 32.90926742553711, |
|
"learning_rate": 5e-06, |
|
"loss": 0.751, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.16135228582404917, |
|
"grad_norm": 13.864742279052734, |
|
"learning_rate": 5.121951219512195e-06, |
|
"loss": 0.5644, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.16519400691509797, |
|
"grad_norm": 16.864086151123047, |
|
"learning_rate": 5.243902439024391e-06, |
|
"loss": 0.7212, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.16903572800614675, |
|
"grad_norm": 8.035212516784668, |
|
"learning_rate": 5.365853658536586e-06, |
|
"loss": 0.6411, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.17287744909719555, |
|
"grad_norm": 15.027800559997559, |
|
"learning_rate": 5.487804878048781e-06, |
|
"loss": 0.7331, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.17671917018824435, |
|
"grad_norm": 9.771449089050293, |
|
"learning_rate": 5.609756097560977e-06, |
|
"loss": 0.6245, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.18056089127929312, |
|
"grad_norm": 7.735960960388184, |
|
"learning_rate": 5.731707317073171e-06, |
|
"loss": 0.5251, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.18440261237034192, |
|
"grad_norm": 7.348488807678223, |
|
"learning_rate": 5.853658536585366e-06, |
|
"loss": 0.5108, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1882443334613907, |
|
"grad_norm": 30.567018508911133, |
|
"learning_rate": 5.9756097560975615e-06, |
|
"loss": 0.5508, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.1920860545524395, |
|
"grad_norm": 19.212230682373047, |
|
"learning_rate": 6.0975609756097564e-06, |
|
"loss": 0.5997, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.1959277756434883, |
|
"grad_norm": 17.70198631286621, |
|
"learning_rate": 6.219512195121951e-06, |
|
"loss": 0.8108, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.19976949673453706, |
|
"grad_norm": 13.471924781799316, |
|
"learning_rate": 6.341463414634147e-06, |
|
"loss": 0.6738, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.20361121782558586, |
|
"grad_norm": 15.494725227355957, |
|
"learning_rate": 6.463414634146342e-06, |
|
"loss": 0.8114, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.20745293891663466, |
|
"grad_norm": 9.314640045166016, |
|
"learning_rate": 6.585365853658538e-06, |
|
"loss": 0.5791, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.21129466000768343, |
|
"grad_norm": 11.314807891845703, |
|
"learning_rate": 6.707317073170733e-06, |
|
"loss": 0.6536, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.21513638109873223, |
|
"grad_norm": 16.462522506713867, |
|
"learning_rate": 6.829268292682928e-06, |
|
"loss": 0.5598, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.21897810218978103, |
|
"grad_norm": 36.564476013183594, |
|
"learning_rate": 6.951219512195122e-06, |
|
"loss": 0.5573, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.2228198232808298, |
|
"grad_norm": 16.07019805908203, |
|
"learning_rate": 7.0731707317073175e-06, |
|
"loss": 0.5328, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.2266615443718786, |
|
"grad_norm": 6.132660388946533, |
|
"learning_rate": 7.1951219512195125e-06, |
|
"loss": 0.4499, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2305032654629274, |
|
"grad_norm": 21.4416561126709, |
|
"learning_rate": 7.317073170731707e-06, |
|
"loss": 0.6584, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.23434498655397618, |
|
"grad_norm": 18.0037841796875, |
|
"learning_rate": 7.439024390243903e-06, |
|
"loss": 0.4844, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.23818670764502498, |
|
"grad_norm": 17.625553131103516, |
|
"learning_rate": 7.560975609756098e-06, |
|
"loss": 0.5122, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.24202842873607375, |
|
"grad_norm": 10.607305526733398, |
|
"learning_rate": 7.682926829268293e-06, |
|
"loss": 0.5188, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.24587014982712255, |
|
"grad_norm": 26.88294792175293, |
|
"learning_rate": 7.804878048780489e-06, |
|
"loss": 0.7002, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.24971187091817135, |
|
"grad_norm": 38.178287506103516, |
|
"learning_rate": 7.926829268292685e-06, |
|
"loss": 0.9407, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.2535535920092201, |
|
"grad_norm": 8.800226211547852, |
|
"learning_rate": 8.048780487804879e-06, |
|
"loss": 0.4968, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.2573953131002689, |
|
"grad_norm": 30.46478843688965, |
|
"learning_rate": 8.170731707317073e-06, |
|
"loss": 0.5293, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2612370341913177, |
|
"grad_norm": 20.630985260009766, |
|
"learning_rate": 8.292682926829268e-06, |
|
"loss": 0.5382, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.2650787552823665, |
|
"grad_norm": 19.19484519958496, |
|
"learning_rate": 8.414634146341464e-06, |
|
"loss": 0.6311, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.26892047637341526, |
|
"grad_norm": 26.084064483642578, |
|
"learning_rate": 8.536585365853658e-06, |
|
"loss": 0.6893, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.27276219746446406, |
|
"grad_norm": 11.942285537719727, |
|
"learning_rate": 8.658536585365854e-06, |
|
"loss": 0.5938, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.27660391855551286, |
|
"grad_norm": 25.995960235595703, |
|
"learning_rate": 8.78048780487805e-06, |
|
"loss": 0.5693, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.28044563964656166, |
|
"grad_norm": 10.440145492553711, |
|
"learning_rate": 8.902439024390244e-06, |
|
"loss": 0.5937, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.28428736073761046, |
|
"grad_norm": 21.39019012451172, |
|
"learning_rate": 9.02439024390244e-06, |
|
"loss": 0.664, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.28812908182865926, |
|
"grad_norm": 11.281585693359375, |
|
"learning_rate": 9.146341463414635e-06, |
|
"loss": 0.5178, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.291970802919708, |
|
"grad_norm": 13.577981948852539, |
|
"learning_rate": 9.268292682926831e-06, |
|
"loss": 0.5405, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.2958125240107568, |
|
"grad_norm": 55.213340759277344, |
|
"learning_rate": 9.390243902439025e-06, |
|
"loss": 0.5633, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.2996542451018056, |
|
"grad_norm": 19.74529266357422, |
|
"learning_rate": 9.51219512195122e-06, |
|
"loss": 0.7541, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.3034959661928544, |
|
"grad_norm": 45.70171356201172, |
|
"learning_rate": 9.634146341463415e-06, |
|
"loss": 0.6196, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.3073376872839032, |
|
"grad_norm": 23.01153564453125, |
|
"learning_rate": 9.756097560975611e-06, |
|
"loss": 0.7121, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.311179408374952, |
|
"grad_norm": 17.099529266357422, |
|
"learning_rate": 9.878048780487805e-06, |
|
"loss": 0.6021, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.31502112946600075, |
|
"grad_norm": 11.891583442687988, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6071, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.31886285055704955, |
|
"grad_norm": 16.423572540283203, |
|
"learning_rate": 9.98957464553795e-06, |
|
"loss": 0.539, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.32270457164809835, |
|
"grad_norm": 11.026520729064941, |
|
"learning_rate": 9.979149291075898e-06, |
|
"loss": 0.615, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.32654629273914715, |
|
"grad_norm": 25.40671730041504, |
|
"learning_rate": 9.968723936613845e-06, |
|
"loss": 0.4819, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.33038801383019595, |
|
"grad_norm": 30.425880432128906, |
|
"learning_rate": 9.958298582151794e-06, |
|
"loss": 0.7362, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3342297349212447, |
|
"grad_norm": 21.710176467895508, |
|
"learning_rate": 9.947873227689742e-06, |
|
"loss": 0.5052, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3380714560122935, |
|
"grad_norm": 13.537766456604004, |
|
"learning_rate": 9.93744787322769e-06, |
|
"loss": 0.6032, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3419131771033423, |
|
"grad_norm": 19.592073440551758, |
|
"learning_rate": 9.927022518765639e-06, |
|
"loss": 0.5222, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.3457548981943911, |
|
"grad_norm": 10.528463363647461, |
|
"learning_rate": 9.916597164303588e-06, |
|
"loss": 0.596, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3495966192854399, |
|
"grad_norm": 15.643308639526367, |
|
"learning_rate": 9.906171809841536e-06, |
|
"loss": 0.7414, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3534383403764887, |
|
"grad_norm": 22.77689552307129, |
|
"learning_rate": 9.895746455379483e-06, |
|
"loss": 0.5151, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.35728006146753744, |
|
"grad_norm": 23.217538833618164, |
|
"learning_rate": 9.885321100917432e-06, |
|
"loss": 0.7501, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.36112178255858624, |
|
"grad_norm": 21.08719825744629, |
|
"learning_rate": 9.874895746455382e-06, |
|
"loss": 0.4944, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.36496350364963503, |
|
"grad_norm": 19.93057632446289, |
|
"learning_rate": 9.864470391993327e-06, |
|
"loss": 0.7289, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.36880522474068383, |
|
"grad_norm": 14.810643196105957, |
|
"learning_rate": 9.854045037531277e-06, |
|
"loss": 0.4956, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.37264694583173263, |
|
"grad_norm": 19.596710205078125, |
|
"learning_rate": 9.843619683069226e-06, |
|
"loss": 0.5528, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.3764886669227814, |
|
"grad_norm": 23.963172912597656, |
|
"learning_rate": 9.833194328607173e-06, |
|
"loss": 0.5904, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.3803303880138302, |
|
"grad_norm": 14.250468254089355, |
|
"learning_rate": 9.822768974145121e-06, |
|
"loss": 0.6762, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.384172109104879, |
|
"grad_norm": 30.411888122558594, |
|
"learning_rate": 9.81234361968307e-06, |
|
"loss": 0.6031, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3880138301959278, |
|
"grad_norm": 4.892022132873535, |
|
"learning_rate": 9.80191826522102e-06, |
|
"loss": 0.5832, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.3918555512869766, |
|
"grad_norm": 14.026780128479004, |
|
"learning_rate": 9.791492910758967e-06, |
|
"loss": 0.4432, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.3956972723780254, |
|
"grad_norm": 15.22079086303711, |
|
"learning_rate": 9.781067556296915e-06, |
|
"loss": 0.5003, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.3995389934690741, |
|
"grad_norm": 38.46358871459961, |
|
"learning_rate": 9.770642201834864e-06, |
|
"loss": 0.6863, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.4033807145601229, |
|
"grad_norm": 28.509458541870117, |
|
"learning_rate": 9.760216847372811e-06, |
|
"loss": 0.6557, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.4072224356511717, |
|
"grad_norm": 10.18283748626709, |
|
"learning_rate": 9.749791492910759e-06, |
|
"loss": 0.475, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.4110641567422205, |
|
"grad_norm": 16.280475616455078, |
|
"learning_rate": 9.739366138448708e-06, |
|
"loss": 0.5423, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.4149058778332693, |
|
"grad_norm": 27.256633758544922, |
|
"learning_rate": 9.728940783986657e-06, |
|
"loss": 0.4638, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.4187475989243181, |
|
"grad_norm": 26.906049728393555, |
|
"learning_rate": 9.718515429524605e-06, |
|
"loss": 0.6528, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.42258932001536686, |
|
"grad_norm": 10.475133895874023, |
|
"learning_rate": 9.708090075062552e-06, |
|
"loss": 0.8387, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.42643104110641566, |
|
"grad_norm": 14.576977729797363, |
|
"learning_rate": 9.697664720600502e-06, |
|
"loss": 0.6325, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.43027276219746446, |
|
"grad_norm": 17.823413848876953, |
|
"learning_rate": 9.68723936613845e-06, |
|
"loss": 0.613, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.43411448328851326, |
|
"grad_norm": 12.499800682067871, |
|
"learning_rate": 9.676814011676397e-06, |
|
"loss": 0.5685, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.43795620437956206, |
|
"grad_norm": 19.12653923034668, |
|
"learning_rate": 9.666388657214346e-06, |
|
"loss": 0.8678, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.4417979254706108, |
|
"grad_norm": 5.942495822906494, |
|
"learning_rate": 9.655963302752295e-06, |
|
"loss": 0.4594, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.4456396465616596, |
|
"grad_norm": 19.233552932739258, |
|
"learning_rate": 9.645537948290243e-06, |
|
"loss": 0.5747, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.4494813676527084, |
|
"grad_norm": 17.434133529663086, |
|
"learning_rate": 9.63511259382819e-06, |
|
"loss": 0.5121, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.4533230887437572, |
|
"grad_norm": 14.78231143951416, |
|
"learning_rate": 9.62468723936614e-06, |
|
"loss": 0.7225, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.457164809834806, |
|
"grad_norm": 23.81663703918457, |
|
"learning_rate": 9.614261884904089e-06, |
|
"loss": 0.8527, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.4610065309258548, |
|
"grad_norm": 18.266740798950195, |
|
"learning_rate": 9.603836530442035e-06, |
|
"loss": 0.4101, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.46484825201690355, |
|
"grad_norm": 23.31222152709961, |
|
"learning_rate": 9.593411175979984e-06, |
|
"loss": 0.5847, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.46868997310795235, |
|
"grad_norm": 11.039971351623535, |
|
"learning_rate": 9.582985821517933e-06, |
|
"loss": 0.6515, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.47253169419900115, |
|
"grad_norm": 27.05122184753418, |
|
"learning_rate": 9.57256046705588e-06, |
|
"loss": 0.5968, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.47637341529004995, |
|
"grad_norm": 18.416839599609375, |
|
"learning_rate": 9.562135112593828e-06, |
|
"loss": 0.6339, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.48021513638109875, |
|
"grad_norm": 18.275182723999023, |
|
"learning_rate": 9.551709758131778e-06, |
|
"loss": 0.3595, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.4840568574721475, |
|
"grad_norm": 32.038143157958984, |
|
"learning_rate": 9.541284403669727e-06, |
|
"loss": 0.8281, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.4878985785631963, |
|
"grad_norm": 19.010108947753906, |
|
"learning_rate": 9.530859049207674e-06, |
|
"loss": 0.7158, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.4917402996542451, |
|
"grad_norm": 9.5922269821167, |
|
"learning_rate": 9.520433694745622e-06, |
|
"loss": 0.5315, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.4955820207452939, |
|
"grad_norm": 11.005895614624023, |
|
"learning_rate": 9.510008340283571e-06, |
|
"loss": 0.3599, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.4994237418363427, |
|
"grad_norm": 51.6233024597168, |
|
"learning_rate": 9.499582985821519e-06, |
|
"loss": 0.537, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5032654629273915, |
|
"grad_norm": 19.033329010009766, |
|
"learning_rate": 9.489157631359466e-06, |
|
"loss": 0.6083, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.5071071840184402, |
|
"grad_norm": 16.91973114013672, |
|
"learning_rate": 9.478732276897415e-06, |
|
"loss": 0.7693, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.5109489051094891, |
|
"grad_norm": 23.38747215270996, |
|
"learning_rate": 9.468306922435365e-06, |
|
"loss": 0.6646, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.5147906262005378, |
|
"grad_norm": 13.772806167602539, |
|
"learning_rate": 9.457881567973312e-06, |
|
"loss": 0.4763, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.5186323472915866, |
|
"grad_norm": 8.950833320617676, |
|
"learning_rate": 9.44745621351126e-06, |
|
"loss": 0.5793, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.5224740683826354, |
|
"grad_norm": 5.6142964363098145, |
|
"learning_rate": 9.437030859049209e-06, |
|
"loss": 0.5797, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 18.615188598632812, |
|
"learning_rate": 9.426605504587157e-06, |
|
"loss": 0.6041, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.530157510564733, |
|
"grad_norm": 10.953849792480469, |
|
"learning_rate": 9.416180150125104e-06, |
|
"loss": 0.5933, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5339992316557818, |
|
"grad_norm": 11.613428115844727, |
|
"learning_rate": 9.405754795663053e-06, |
|
"loss": 0.5275, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5378409527468305, |
|
"grad_norm": 12.725924491882324, |
|
"learning_rate": 9.395329441201003e-06, |
|
"loss": 0.5673, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.5416826738378794, |
|
"grad_norm": 34.54634094238281, |
|
"learning_rate": 9.38490408673895e-06, |
|
"loss": 0.6717, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.5455243949289281, |
|
"grad_norm": 21.028316497802734, |
|
"learning_rate": 9.374478732276898e-06, |
|
"loss": 0.5483, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.549366116019977, |
|
"grad_norm": 30.281667709350586, |
|
"learning_rate": 9.364053377814847e-06, |
|
"loss": 0.7806, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.5532078371110257, |
|
"grad_norm": 11.983960151672363, |
|
"learning_rate": 9.353628023352795e-06, |
|
"loss": 0.5061, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.5570495582020746, |
|
"grad_norm": 6.99747896194458, |
|
"learning_rate": 9.343202668890742e-06, |
|
"loss": 0.5623, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.5608912792931233, |
|
"grad_norm": 11.219843864440918, |
|
"learning_rate": 9.332777314428691e-06, |
|
"loss": 0.6227, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.5647330003841721, |
|
"grad_norm": 8.85550308227539, |
|
"learning_rate": 9.32235195996664e-06, |
|
"loss": 0.5908, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.5685747214752209, |
|
"grad_norm": 15.55632209777832, |
|
"learning_rate": 9.311926605504588e-06, |
|
"loss": 0.5888, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.5724164425662697, |
|
"grad_norm": 5.281271457672119, |
|
"learning_rate": 9.301501251042536e-06, |
|
"loss": 0.4795, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.5762581636573185, |
|
"grad_norm": 10.58825397491455, |
|
"learning_rate": 9.291075896580485e-06, |
|
"loss": 0.4825, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.5800998847483673, |
|
"grad_norm": 13.970091819763184, |
|
"learning_rate": 9.280650542118432e-06, |
|
"loss": 0.6107, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.583941605839416, |
|
"grad_norm": 15.610709190368652, |
|
"learning_rate": 9.270225187656382e-06, |
|
"loss": 0.454, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.5877833269304649, |
|
"grad_norm": 9.203128814697266, |
|
"learning_rate": 9.25979983319433e-06, |
|
"loss": 0.596, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.5916250480215136, |
|
"grad_norm": 12.340123176574707, |
|
"learning_rate": 9.249374478732278e-06, |
|
"loss": 0.6622, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.5954667691125625, |
|
"grad_norm": 6.894665718078613, |
|
"learning_rate": 9.238949124270226e-06, |
|
"loss": 0.4944, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.5993084902036112, |
|
"grad_norm": 22.704559326171875, |
|
"learning_rate": 9.228523769808174e-06, |
|
"loss": 0.6151, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.60315021129466, |
|
"grad_norm": 6.272796630859375, |
|
"learning_rate": 9.218098415346123e-06, |
|
"loss": 0.4866, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.6069919323857088, |
|
"grad_norm": 40.39881134033203, |
|
"learning_rate": 9.20767306088407e-06, |
|
"loss": 0.5471, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.6108336534767576, |
|
"grad_norm": 9.417654037475586, |
|
"learning_rate": 9.19724770642202e-06, |
|
"loss": 0.5702, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.6146753745678064, |
|
"grad_norm": 8.880293846130371, |
|
"learning_rate": 9.186822351959967e-06, |
|
"loss": 0.7593, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.6185170956588552, |
|
"grad_norm": 16.337783813476562, |
|
"learning_rate": 9.176396997497916e-06, |
|
"loss": 0.3708, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.622358816749904, |
|
"grad_norm": 15.34815502166748, |
|
"learning_rate": 9.165971643035864e-06, |
|
"loss": 0.6829, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.6262005378409528, |
|
"grad_norm": 12.125506401062012, |
|
"learning_rate": 9.155546288573811e-06, |
|
"loss": 0.5839, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.6300422589320015, |
|
"grad_norm": 12.340716361999512, |
|
"learning_rate": 9.14512093411176e-06, |
|
"loss": 0.5855, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.6338839800230504, |
|
"grad_norm": 17.276071548461914, |
|
"learning_rate": 9.134695579649708e-06, |
|
"loss": 0.4579, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6377257011140991, |
|
"grad_norm": 4.054512977600098, |
|
"learning_rate": 9.124270225187658e-06, |
|
"loss": 0.3717, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.641567422205148, |
|
"grad_norm": 26.277875900268555, |
|
"learning_rate": 9.113844870725605e-06, |
|
"loss": 0.6934, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6454091432961967, |
|
"grad_norm": 23.17993927001953, |
|
"learning_rate": 9.103419516263554e-06, |
|
"loss": 0.8507, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.6492508643872454, |
|
"grad_norm": 30.25948715209961, |
|
"learning_rate": 9.092994161801502e-06, |
|
"loss": 0.5851, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.6530925854782943, |
|
"grad_norm": 12.083464622497559, |
|
"learning_rate": 9.08256880733945e-06, |
|
"loss": 0.5214, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.656934306569343, |
|
"grad_norm": 26.132946014404297, |
|
"learning_rate": 9.072143452877399e-06, |
|
"loss": 0.5715, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.6607760276603919, |
|
"grad_norm": 13.83061408996582, |
|
"learning_rate": 9.061718098415346e-06, |
|
"loss": 0.6076, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.6646177487514406, |
|
"grad_norm": 36.992679595947266, |
|
"learning_rate": 9.051292743953295e-06, |
|
"loss": 0.5795, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.6684594698424894, |
|
"grad_norm": 24.426977157592773, |
|
"learning_rate": 9.040867389491243e-06, |
|
"loss": 0.6913, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.6723011909335382, |
|
"grad_norm": 15.399202346801758, |
|
"learning_rate": 9.030442035029192e-06, |
|
"loss": 0.647, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.676142912024587, |
|
"grad_norm": 36.72813034057617, |
|
"learning_rate": 9.02001668056714e-06, |
|
"loss": 0.7641, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.6799846331156358, |
|
"grad_norm": 19.219661712646484, |
|
"learning_rate": 9.009591326105089e-06, |
|
"loss": 0.7111, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.6838263542066846, |
|
"grad_norm": 10.353839874267578, |
|
"learning_rate": 8.999165971643037e-06, |
|
"loss": 0.437, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.6876680752977334, |
|
"grad_norm": 12.179790496826172, |
|
"learning_rate": 8.988740617180984e-06, |
|
"loss": 0.6514, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.6915097963887822, |
|
"grad_norm": 15.036273956298828, |
|
"learning_rate": 8.978315262718933e-06, |
|
"loss": 0.4611, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.6953515174798309, |
|
"grad_norm": 12.146955490112305, |
|
"learning_rate": 8.967889908256881e-06, |
|
"loss": 0.5176, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.6991932385708798, |
|
"grad_norm": 16.004959106445312, |
|
"learning_rate": 8.95746455379483e-06, |
|
"loss": 0.3749, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.7030349596619285, |
|
"grad_norm": 23.500526428222656, |
|
"learning_rate": 8.947039199332778e-06, |
|
"loss": 0.6124, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.7068766807529774, |
|
"grad_norm": 11.367331504821777, |
|
"learning_rate": 8.936613844870727e-06, |
|
"loss": 0.3982, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.7107184018440261, |
|
"grad_norm": 13.60319709777832, |
|
"learning_rate": 8.926188490408674e-06, |
|
"loss": 0.4618, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.7145601229350749, |
|
"grad_norm": 9.807296752929688, |
|
"learning_rate": 8.915763135946624e-06, |
|
"loss": 0.552, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.7184018440261237, |
|
"grad_norm": 41.238895416259766, |
|
"learning_rate": 8.905337781484571e-06, |
|
"loss": 0.738, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.7222435651171725, |
|
"grad_norm": 8.117176055908203, |
|
"learning_rate": 8.894912427022519e-06, |
|
"loss": 0.546, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.7260852862082213, |
|
"grad_norm": 8.292084693908691, |
|
"learning_rate": 8.884487072560468e-06, |
|
"loss": 0.5098, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.7299270072992701, |
|
"grad_norm": 16.20579719543457, |
|
"learning_rate": 8.874061718098416e-06, |
|
"loss": 0.5693, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.7337687283903188, |
|
"grad_norm": 10.686980247497559, |
|
"learning_rate": 8.863636363636365e-06, |
|
"loss": 0.6848, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.7376104494813677, |
|
"grad_norm": 12.386652946472168, |
|
"learning_rate": 8.853211009174312e-06, |
|
"loss": 0.5282, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.7414521705724164, |
|
"grad_norm": 11.129962921142578, |
|
"learning_rate": 8.842785654712262e-06, |
|
"loss": 0.5789, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.7452938916634653, |
|
"grad_norm": 8.727615356445312, |
|
"learning_rate": 8.83236030025021e-06, |
|
"loss": 0.5936, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.749135612754514, |
|
"grad_norm": 11.261787414550781, |
|
"learning_rate": 8.821934945788157e-06, |
|
"loss": 0.5308, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.7529773338455628, |
|
"grad_norm": 23.387935638427734, |
|
"learning_rate": 8.811509591326106e-06, |
|
"loss": 0.5074, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.7568190549366116, |
|
"grad_norm": 20.772794723510742, |
|
"learning_rate": 8.801084236864054e-06, |
|
"loss": 0.6157, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.7606607760276604, |
|
"grad_norm": 23.0604305267334, |
|
"learning_rate": 8.790658882402003e-06, |
|
"loss": 0.5272, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.7645024971187092, |
|
"grad_norm": 44.302425384521484, |
|
"learning_rate": 8.78023352793995e-06, |
|
"loss": 0.6709, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.768344218209758, |
|
"grad_norm": 16.66979217529297, |
|
"learning_rate": 8.7698081734779e-06, |
|
"loss": 0.4651, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.7721859393008068, |
|
"grad_norm": 18.14614486694336, |
|
"learning_rate": 8.759382819015847e-06, |
|
"loss": 0.5747, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.7760276603918556, |
|
"grad_norm": 10.635650634765625, |
|
"learning_rate": 8.748957464553796e-06, |
|
"loss": 0.5169, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.7798693814829043, |
|
"grad_norm": 13.54704475402832, |
|
"learning_rate": 8.738532110091744e-06, |
|
"loss": 0.4784, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.7837111025739532, |
|
"grad_norm": 12.35689926147461, |
|
"learning_rate": 8.728106755629691e-06, |
|
"loss": 0.5529, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.7875528236650019, |
|
"grad_norm": 7.250340461730957, |
|
"learning_rate": 8.71768140116764e-06, |
|
"loss": 0.6229, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.7913945447560508, |
|
"grad_norm": 16.60529327392578, |
|
"learning_rate": 8.707256046705588e-06, |
|
"loss": 0.5726, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.7952362658470995, |
|
"grad_norm": 18.4666805267334, |
|
"learning_rate": 8.696830692243537e-06, |
|
"loss": 0.5643, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.7990779869381482, |
|
"grad_norm": 31.986207962036133, |
|
"learning_rate": 8.686405337781485e-06, |
|
"loss": 0.4759, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.8029197080291971, |
|
"grad_norm": 30.724218368530273, |
|
"learning_rate": 8.675979983319434e-06, |
|
"loss": 0.6527, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.8067614291202458, |
|
"grad_norm": 22.7759952545166, |
|
"learning_rate": 8.665554628857382e-06, |
|
"loss": 0.6438, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.8106031502112947, |
|
"grad_norm": 14.61020565032959, |
|
"learning_rate": 8.65512927439533e-06, |
|
"loss": 0.3962, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.8144448713023434, |
|
"grad_norm": 20.27998161315918, |
|
"learning_rate": 8.644703919933279e-06, |
|
"loss": 0.6989, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.8182865923933922, |
|
"grad_norm": 10.2035493850708, |
|
"learning_rate": 8.634278565471226e-06, |
|
"loss": 0.5543, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.822128313484441, |
|
"grad_norm": 16.954448699951172, |
|
"learning_rate": 8.623853211009175e-06, |
|
"loss": 0.4598, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.8259700345754898, |
|
"grad_norm": 24.188817977905273, |
|
"learning_rate": 8.613427856547123e-06, |
|
"loss": 0.674, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.8298117556665386, |
|
"grad_norm": 8.472796440124512, |
|
"learning_rate": 8.603002502085072e-06, |
|
"loss": 0.4246, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.8336534767575874, |
|
"grad_norm": 21.893178939819336, |
|
"learning_rate": 8.59257714762302e-06, |
|
"loss": 0.5788, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.8374951978486362, |
|
"grad_norm": 8.200776100158691, |
|
"learning_rate": 8.582151793160967e-06, |
|
"loss": 0.4215, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.841336918939685, |
|
"grad_norm": 21.523435592651367, |
|
"learning_rate": 8.571726438698917e-06, |
|
"loss": 0.4367, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.8451786400307337, |
|
"grad_norm": 18.608898162841797, |
|
"learning_rate": 8.561301084236864e-06, |
|
"loss": 0.6324, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.8490203611217826, |
|
"grad_norm": 19.39713478088379, |
|
"learning_rate": 8.550875729774813e-06, |
|
"loss": 0.382, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.8528620822128313, |
|
"grad_norm": 15.368677139282227, |
|
"learning_rate": 8.540450375312761e-06, |
|
"loss": 0.492, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.8567038033038802, |
|
"grad_norm": 6.85573673248291, |
|
"learning_rate": 8.53002502085071e-06, |
|
"loss": 0.6801, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.8605455243949289, |
|
"grad_norm": 11.223825454711914, |
|
"learning_rate": 8.519599666388658e-06, |
|
"loss": 0.7763, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.8643872454859777, |
|
"grad_norm": 11.18885326385498, |
|
"learning_rate": 8.509174311926605e-06, |
|
"loss": 0.585, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.8682289665770265, |
|
"grad_norm": 21.877548217773438, |
|
"learning_rate": 8.498748957464554e-06, |
|
"loss": 0.5873, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.8720706876680753, |
|
"grad_norm": 25.72136116027832, |
|
"learning_rate": 8.488323603002504e-06, |
|
"loss": 0.5796, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.8759124087591241, |
|
"grad_norm": 16.472366333007812, |
|
"learning_rate": 8.477898248540451e-06, |
|
"loss": 0.4431, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.8797541298501729, |
|
"grad_norm": 5.752821445465088, |
|
"learning_rate": 8.467472894078399e-06, |
|
"loss": 0.7004, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.8835958509412216, |
|
"grad_norm": 13.56191349029541, |
|
"learning_rate": 8.457047539616348e-06, |
|
"loss": 0.4899, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.8874375720322705, |
|
"grad_norm": 5.017563343048096, |
|
"learning_rate": 8.446622185154296e-06, |
|
"loss": 0.7014, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.8912792931233192, |
|
"grad_norm": 15.450356483459473, |
|
"learning_rate": 8.436196830692243e-06, |
|
"loss": 0.5414, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.8951210142143681, |
|
"grad_norm": 16.416250228881836, |
|
"learning_rate": 8.425771476230192e-06, |
|
"loss": 0.698, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.8989627353054168, |
|
"grad_norm": 11.185935020446777, |
|
"learning_rate": 8.415346121768142e-06, |
|
"loss": 0.6264, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.9028044563964657, |
|
"grad_norm": 22.787181854248047, |
|
"learning_rate": 8.40492076730609e-06, |
|
"loss": 0.6908, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.9066461774875144, |
|
"grad_norm": 11.522934913635254, |
|
"learning_rate": 8.394495412844037e-06, |
|
"loss": 0.5546, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.9104878985785632, |
|
"grad_norm": 18.260616302490234, |
|
"learning_rate": 8.384070058381986e-06, |
|
"loss": 0.5925, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.914329619669612, |
|
"grad_norm": 7.180076599121094, |
|
"learning_rate": 8.373644703919933e-06, |
|
"loss": 0.6639, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.9181713407606608, |
|
"grad_norm": 11.107264518737793, |
|
"learning_rate": 8.363219349457881e-06, |
|
"loss": 0.6762, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.9220130618517096, |
|
"grad_norm": 12.528190612792969, |
|
"learning_rate": 8.35279399499583e-06, |
|
"loss": 0.5435, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.9258547829427584, |
|
"grad_norm": 29.454421997070312, |
|
"learning_rate": 8.34236864053378e-06, |
|
"loss": 0.5074, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.9296965040338071, |
|
"grad_norm": 14.677248001098633, |
|
"learning_rate": 8.331943286071727e-06, |
|
"loss": 0.6161, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.933538225124856, |
|
"grad_norm": 8.907113075256348, |
|
"learning_rate": 8.321517931609675e-06, |
|
"loss": 0.353, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.9373799462159047, |
|
"grad_norm": 11.691315650939941, |
|
"learning_rate": 8.311092577147624e-06, |
|
"loss": 0.4516, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.9412216673069536, |
|
"grad_norm": 2.8710756301879883, |
|
"learning_rate": 8.300667222685571e-06, |
|
"loss": 0.6726, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.9450633883980023, |
|
"grad_norm": 11.67735481262207, |
|
"learning_rate": 8.290241868223519e-06, |
|
"loss": 0.6797, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.948905109489051, |
|
"grad_norm": 17.547286987304688, |
|
"learning_rate": 8.279816513761468e-06, |
|
"loss": 0.7029, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.9527468305800999, |
|
"grad_norm": 11.663725852966309, |
|
"learning_rate": 8.269391159299417e-06, |
|
"loss": 0.5016, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.9565885516711486, |
|
"grad_norm": 9.743104934692383, |
|
"learning_rate": 8.258965804837365e-06, |
|
"loss": 0.5489, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.9604302727621975, |
|
"grad_norm": 9.579474449157715, |
|
"learning_rate": 8.248540450375313e-06, |
|
"loss": 0.5867, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.9642719938532462, |
|
"grad_norm": 13.63699722290039, |
|
"learning_rate": 8.238115095913262e-06, |
|
"loss": 0.5191, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.968113714944295, |
|
"grad_norm": 10.331293106079102, |
|
"learning_rate": 8.227689741451211e-06, |
|
"loss": 0.6654, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.9719554360353438, |
|
"grad_norm": 10.614498138427734, |
|
"learning_rate": 8.217264386989159e-06, |
|
"loss": 0.5947, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.9757971571263926, |
|
"grad_norm": 10.182368278503418, |
|
"learning_rate": 8.206839032527106e-06, |
|
"loss": 0.5482, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.9796388782174414, |
|
"grad_norm": 15.42397403717041, |
|
"learning_rate": 8.196413678065055e-06, |
|
"loss": 0.5144, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.9834805993084902, |
|
"grad_norm": 8.317682266235352, |
|
"learning_rate": 8.185988323603003e-06, |
|
"loss": 0.4518, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.987322320399539, |
|
"grad_norm": 43.10714340209961, |
|
"learning_rate": 8.17556296914095e-06, |
|
"loss": 0.5974, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.9911640414905878, |
|
"grad_norm": 7.906277656555176, |
|
"learning_rate": 8.1651376146789e-06, |
|
"loss": 0.5745, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.9950057625816365, |
|
"grad_norm": 10.229177474975586, |
|
"learning_rate": 8.154712260216849e-06, |
|
"loss": 0.4322, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.9988474836726854, |
|
"grad_norm": 12.773459434509277, |
|
"learning_rate": 8.144286905754796e-06, |
|
"loss": 0.5947, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7737969455383729, |
|
"eval_f1_per_label": [ |
|
0.778160354156419, |
|
0.7230364524614806, |
|
0.8299703264094955 |
|
], |
|
"eval_f1_weighted": 0.774424228670159, |
|
"eval_loss": 0.5626052618026733, |
|
"eval_precision_per_label": [ |
|
0.7752368507023848, |
|
0.7131208302446257, |
|
0.8468059339993945 |
|
], |
|
"eval_precision_weighted": 0.7753803799282116, |
|
"eval_recall_per_label": [ |
|
0.7811059907834101, |
|
0.7332317073170732, |
|
0.8137910968868199 |
|
], |
|
"eval_recall_weighted": 0.7737969455383729, |
|
"eval_runtime": 38.6689, |
|
"eval_samples_per_second": 269.235, |
|
"eval_steps_per_second": 33.67, |
|
"step": 2603 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10412, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 3, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 656244433604970.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|