|
{ |
|
"best_metric": 0.22382062673568726, |
|
"best_model_checkpoint": "./ryan03282024/checkpoint-1600", |
|
"epoch": 4.0, |
|
"eval_steps": 100, |
|
"global_step": 9160, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.5509258508682251, |
|
"learning_rate": 9.972707423580787e-05, |
|
"loss": 0.4095, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.37446415424346924, |
|
"learning_rate": 9.945414847161573e-05, |
|
"loss": 0.3743, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.36988818645477295, |
|
"learning_rate": 9.918122270742359e-05, |
|
"loss": 0.3766, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.9999992847442627, |
|
"learning_rate": 9.890829694323145e-05, |
|
"loss": 0.3421, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"eval_loss": 0.33314022421836853, |
|
"eval_na_accuracy": 0.6911196708679199, |
|
"eval_ordinal_accuracy": 0.3816815912723541, |
|
"eval_ordinal_mae": 0.8749264478683472, |
|
"eval_runtime": 311.6641, |
|
"eval_samples_per_second": 12.767, |
|
"eval_steps_per_second": 1.598, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6784111261367798, |
|
"learning_rate": 9.863537117903931e-05, |
|
"loss": 0.3422, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7690809965133667, |
|
"learning_rate": 9.836244541484717e-05, |
|
"loss": 0.3059, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5732172131538391, |
|
"learning_rate": 9.808951965065503e-05, |
|
"loss": 0.3654, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3226703405380249, |
|
"learning_rate": 9.781659388646288e-05, |
|
"loss": 0.2813, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"eval_loss": 0.29995909333229065, |
|
"eval_na_accuracy": 0.7953668236732483, |
|
"eval_ordinal_accuracy": 0.5117018222808838, |
|
"eval_ordinal_mae": 0.7492409348487854, |
|
"eval_runtime": 157.8026, |
|
"eval_samples_per_second": 25.215, |
|
"eval_steps_per_second": 3.156, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.44079920649528503, |
|
"learning_rate": 9.754366812227075e-05, |
|
"loss": 0.3359, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.5866132974624634, |
|
"learning_rate": 9.727074235807861e-05, |
|
"loss": 0.3131, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.5666215419769287, |
|
"learning_rate": 9.699781659388647e-05, |
|
"loss": 0.2723, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.2892788350582123, |
|
"learning_rate": 9.672489082969433e-05, |
|
"loss": 0.2619, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"eval_loss": 0.3019290566444397, |
|
"eval_na_accuracy": 0.7046331763267517, |
|
"eval_ordinal_accuracy": 0.5273042321205139, |
|
"eval_ordinal_mae": 0.6840940117835999, |
|
"eval_runtime": 158.8663, |
|
"eval_samples_per_second": 25.046, |
|
"eval_steps_per_second": 3.135, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.24918608367443085, |
|
"learning_rate": 9.645196506550219e-05, |
|
"loss": 0.303, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.626022219657898, |
|
"learning_rate": 9.617903930131005e-05, |
|
"loss": 0.2867, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.1694397926330566, |
|
"learning_rate": 9.59061135371179e-05, |
|
"loss": 0.249, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3259480893611908, |
|
"learning_rate": 9.563318777292577e-05, |
|
"loss": 0.2863, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_loss": 0.2960417866706848, |
|
"eval_na_accuracy": 0.7335907220840454, |
|
"eval_ordinal_accuracy": 0.5096792578697205, |
|
"eval_ordinal_mae": 0.6538078188896179, |
|
"eval_runtime": 159.697, |
|
"eval_samples_per_second": 24.916, |
|
"eval_steps_per_second": 3.118, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.1021641492843628, |
|
"learning_rate": 9.536026200873362e-05, |
|
"loss": 0.2729, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.3987232446670532, |
|
"learning_rate": 9.50873362445415e-05, |
|
"loss": 0.2544, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.7090039253234863, |
|
"learning_rate": 9.481441048034934e-05, |
|
"loss": 0.2776, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.737847924232483, |
|
"learning_rate": 9.454148471615721e-05, |
|
"loss": 0.2159, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"eval_loss": 0.26019465923309326, |
|
"eval_na_accuracy": 0.8243243098258972, |
|
"eval_ordinal_accuracy": 0.5660213828086853, |
|
"eval_ordinal_mae": 0.540388822555542, |
|
"eval_runtime": 157.5356, |
|
"eval_samples_per_second": 25.258, |
|
"eval_steps_per_second": 3.161, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 3.2989888191223145, |
|
"learning_rate": 9.426855895196508e-05, |
|
"loss": 0.2554, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7149298191070557, |
|
"learning_rate": 9.399563318777294e-05, |
|
"loss": 0.272, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.90256667137146, |
|
"learning_rate": 9.37227074235808e-05, |
|
"loss": 0.2595, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 2.2932217121124268, |
|
"learning_rate": 9.344978165938864e-05, |
|
"loss": 0.2235, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_loss": 0.2556995749473572, |
|
"eval_na_accuracy": 0.7779922485351562, |
|
"eval_ordinal_accuracy": 0.5874024629592896, |
|
"eval_ordinal_mae": 0.5014671683311462, |
|
"eval_runtime": 159.648, |
|
"eval_samples_per_second": 24.924, |
|
"eval_steps_per_second": 3.119, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.8714308142662048, |
|
"learning_rate": 9.317685589519652e-05, |
|
"loss": 0.2871, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.699299156665802, |
|
"learning_rate": 9.290393013100436e-05, |
|
"loss": 0.2489, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.3761134147644043, |
|
"learning_rate": 9.263100436681224e-05, |
|
"loss": 0.2696, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5747934579849243, |
|
"learning_rate": 9.235807860262009e-05, |
|
"loss": 0.285, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 0.2563941180706024, |
|
"eval_na_accuracy": 0.6853281855583191, |
|
"eval_ordinal_accuracy": 0.6180294752120972, |
|
"eval_ordinal_mae": 0.49999386072158813, |
|
"eval_runtime": 160.5887, |
|
"eval_samples_per_second": 24.778, |
|
"eval_steps_per_second": 3.101, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6936819553375244, |
|
"learning_rate": 9.208515283842796e-05, |
|
"loss": 0.2379, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.3918662071228027, |
|
"learning_rate": 9.18122270742358e-05, |
|
"loss": 0.2509, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.46456876397132874, |
|
"learning_rate": 9.153930131004367e-05, |
|
"loss": 0.2531, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.516076385974884, |
|
"learning_rate": 9.126637554585154e-05, |
|
"loss": 0.2028, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"eval_loss": 0.2861529588699341, |
|
"eval_na_accuracy": 0.7220077514648438, |
|
"eval_ordinal_accuracy": 0.5067899227142334, |
|
"eval_ordinal_mae": 0.633837103843689, |
|
"eval_runtime": 154.4747, |
|
"eval_samples_per_second": 25.758, |
|
"eval_steps_per_second": 3.224, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.5322011709213257, |
|
"learning_rate": 9.100436681222709e-05, |
|
"loss": 0.2936, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.49020248651504517, |
|
"learning_rate": 9.073144104803494e-05, |
|
"loss": 0.2206, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.8753771781921387, |
|
"learning_rate": 9.045851528384281e-05, |
|
"loss": 0.2031, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1926873922348022, |
|
"learning_rate": 9.018558951965066e-05, |
|
"loss": 0.2006, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"eval_loss": 0.24949249625205994, |
|
"eval_na_accuracy": 0.7586872577667236, |
|
"eval_ordinal_accuracy": 0.6298757791519165, |
|
"eval_ordinal_mae": 0.4830287992954254, |
|
"eval_runtime": 156.0615, |
|
"eval_samples_per_second": 25.496, |
|
"eval_steps_per_second": 3.191, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0290440320968628, |
|
"learning_rate": 8.991266375545852e-05, |
|
"loss": 0.2648, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.615397036075592, |
|
"learning_rate": 8.963973799126638e-05, |
|
"loss": 0.1866, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4451560974121094, |
|
"learning_rate": 8.936681222707424e-05, |
|
"loss": 0.2641, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9014913439750671, |
|
"learning_rate": 8.90938864628821e-05, |
|
"loss": 0.2663, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_loss": 0.26604241132736206, |
|
"eval_na_accuracy": 0.8610038757324219, |
|
"eval_ordinal_accuracy": 0.602138102054596, |
|
"eval_ordinal_mae": 0.4893138110637665, |
|
"eval_runtime": 155.5971, |
|
"eval_samples_per_second": 25.572, |
|
"eval_steps_per_second": 3.201, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.4818997383117676, |
|
"learning_rate": 8.882096069868996e-05, |
|
"loss": 0.2454, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.7425001859664917, |
|
"learning_rate": 8.854803493449782e-05, |
|
"loss": 0.215, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 2.0386316776275635, |
|
"learning_rate": 8.827510917030568e-05, |
|
"loss": 0.2368, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.6596102714538574, |
|
"learning_rate": 8.800218340611354e-05, |
|
"loss": 0.2062, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"eval_loss": 0.24812151491641998, |
|
"eval_na_accuracy": 0.8436293601989746, |
|
"eval_ordinal_accuracy": 0.6266974806785583, |
|
"eval_ordinal_mae": 0.47125470638275146, |
|
"eval_runtime": 157.0377, |
|
"eval_samples_per_second": 25.338, |
|
"eval_steps_per_second": 3.171, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4987865686416626, |
|
"learning_rate": 8.77292576419214e-05, |
|
"loss": 0.2151, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.0523704290390015, |
|
"learning_rate": 8.745633187772926e-05, |
|
"loss": 0.2092, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6211034059524536, |
|
"learning_rate": 8.718340611353712e-05, |
|
"loss": 0.2452, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.49944034218788147, |
|
"learning_rate": 8.691048034934498e-05, |
|
"loss": 0.1749, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"eval_loss": 0.25862136483192444, |
|
"eval_na_accuracy": 0.6737451553344727, |
|
"eval_ordinal_accuracy": 0.6422998905181885, |
|
"eval_ordinal_mae": 0.4958673417568207, |
|
"eval_runtime": 161.9513, |
|
"eval_samples_per_second": 24.569, |
|
"eval_steps_per_second": 3.075, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.30346667766571045, |
|
"learning_rate": 8.663755458515284e-05, |
|
"loss": 0.202, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8414286971092224, |
|
"learning_rate": 8.63646288209607e-05, |
|
"loss": 0.2019, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.1115086078643799, |
|
"learning_rate": 8.609170305676856e-05, |
|
"loss": 0.2037, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7678231000900269, |
|
"learning_rate": 8.581877729257642e-05, |
|
"loss": 0.2197, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"eval_loss": 0.23488646745681763, |
|
"eval_na_accuracy": 0.8030887842178345, |
|
"eval_ordinal_accuracy": 0.5980930328369141, |
|
"eval_ordinal_mae": 0.48411670327186584, |
|
"eval_runtime": 161.4724, |
|
"eval_samples_per_second": 24.642, |
|
"eval_steps_per_second": 3.084, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.33742988109588623, |
|
"learning_rate": 8.554585152838429e-05, |
|
"loss": 0.226, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.6625324487686157, |
|
"learning_rate": 8.527292576419215e-05, |
|
"loss": 0.2612, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.5984447002410889, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.1812, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 1.39547860622406, |
|
"learning_rate": 8.472707423580787e-05, |
|
"loss": 0.2073, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"eval_loss": 0.25865110754966736, |
|
"eval_na_accuracy": 0.6949806809425354, |
|
"eval_ordinal_accuracy": 0.6012713313102722, |
|
"eval_ordinal_mae": 0.4877532422542572, |
|
"eval_runtime": 161.5877, |
|
"eval_samples_per_second": 24.624, |
|
"eval_steps_per_second": 3.082, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.6241037845611572, |
|
"learning_rate": 8.445414847161573e-05, |
|
"loss": 0.2783, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.9269381761550903, |
|
"learning_rate": 8.418122270742357e-05, |
|
"loss": 0.1813, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5204592943191528, |
|
"learning_rate": 8.390829694323145e-05, |
|
"loss": 0.1753, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 1.156730055809021, |
|
"learning_rate": 8.36353711790393e-05, |
|
"loss": 0.1915, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"eval_loss": 0.23929761350154877, |
|
"eval_na_accuracy": 0.7683397531509399, |
|
"eval_ordinal_accuracy": 0.6321872472763062, |
|
"eval_ordinal_mae": 0.4770694673061371, |
|
"eval_runtime": 160.096, |
|
"eval_samples_per_second": 24.854, |
|
"eval_steps_per_second": 3.111, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.31671473383903503, |
|
"learning_rate": 8.336244541484717e-05, |
|
"loss": 0.2555, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 1.452971339225769, |
|
"learning_rate": 8.308951965065503e-05, |
|
"loss": 0.2321, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.7802607417106628, |
|
"learning_rate": 8.281659388646289e-05, |
|
"loss": 0.2013, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.238275408744812, |
|
"learning_rate": 8.254366812227075e-05, |
|
"loss": 0.2374, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"eval_loss": 0.22382062673568726, |
|
"eval_na_accuracy": 0.799227774143219, |
|
"eval_ordinal_accuracy": 0.6446113586425781, |
|
"eval_ordinal_mae": 0.4441048502922058, |
|
"eval_runtime": 161.7443, |
|
"eval_samples_per_second": 24.601, |
|
"eval_steps_per_second": 3.079, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.8560432195663452, |
|
"learning_rate": 8.227074235807861e-05, |
|
"loss": 0.2055, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.4070003032684326, |
|
"learning_rate": 8.199781659388647e-05, |
|
"loss": 0.2357, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 2.119150161743164, |
|
"learning_rate": 8.172489082969432e-05, |
|
"loss": 0.2145, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.8999676704406738, |
|
"learning_rate": 8.145196506550219e-05, |
|
"loss": 0.2278, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"eval_loss": 0.24530412256717682, |
|
"eval_na_accuracy": 0.7277992367744446, |
|
"eval_ordinal_accuracy": 0.6538572907447815, |
|
"eval_ordinal_mae": 0.4410313367843628, |
|
"eval_runtime": 160.7764, |
|
"eval_samples_per_second": 24.749, |
|
"eval_steps_per_second": 3.097, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 1.699065923690796, |
|
"learning_rate": 8.117903930131004e-05, |
|
"loss": 0.2416, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 1.4952126741409302, |
|
"learning_rate": 8.090611353711791e-05, |
|
"loss": 0.188, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.4823841452598572, |
|
"learning_rate": 8.063318777292576e-05, |
|
"loss": 0.2148, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.9280353784561157, |
|
"learning_rate": 8.036026200873363e-05, |
|
"loss": 0.2033, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"eval_loss": 0.22507312893867493, |
|
"eval_na_accuracy": 0.8185328245162964, |
|
"eval_ordinal_accuracy": 0.6298757791519165, |
|
"eval_ordinal_mae": 0.4584101140499115, |
|
"eval_runtime": 160.7672, |
|
"eval_samples_per_second": 24.75, |
|
"eval_steps_per_second": 3.098, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.9829936027526855, |
|
"learning_rate": 8.00873362445415e-05, |
|
"loss": 0.2015, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5692235231399536, |
|
"learning_rate": 7.981441048034934e-05, |
|
"loss": 0.2213, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.4303966164588928, |
|
"learning_rate": 7.954148471615722e-05, |
|
"loss": 0.2156, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4745689630508423, |
|
"learning_rate": 7.926855895196506e-05, |
|
"loss": 0.1843, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"eval_loss": 0.2280111461877823, |
|
"eval_na_accuracy": 0.8127413392066956, |
|
"eval_ordinal_accuracy": 0.6512568593025208, |
|
"eval_ordinal_mae": 0.4446066617965698, |
|
"eval_runtime": 160.0671, |
|
"eval_samples_per_second": 24.858, |
|
"eval_steps_per_second": 3.111, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 2.2468037605285645, |
|
"learning_rate": 7.899563318777294e-05, |
|
"loss": 0.2182, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.6818110346794128, |
|
"learning_rate": 7.872270742358078e-05, |
|
"loss": 0.1822, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.802448034286499, |
|
"learning_rate": 7.844978165938866e-05, |
|
"loss": 0.2289, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.6907253861427307, |
|
"learning_rate": 7.81768558951965e-05, |
|
"loss": 0.1878, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"eval_loss": 0.22766314446926117, |
|
"eval_na_accuracy": 0.8127413392066956, |
|
"eval_ordinal_accuracy": 0.6492343544960022, |
|
"eval_ordinal_mae": 0.4454284906387329, |
|
"eval_runtime": 159.2821, |
|
"eval_samples_per_second": 24.981, |
|
"eval_steps_per_second": 3.127, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.1525399684906006, |
|
"learning_rate": 7.790393013100437e-05, |
|
"loss": 0.2146, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.3308144807815552, |
|
"learning_rate": 7.763100436681223e-05, |
|
"loss": 0.2367, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.1532173156738281, |
|
"learning_rate": 7.735807860262009e-05, |
|
"loss": 0.1948, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9184058904647827, |
|
"learning_rate": 7.708515283842796e-05, |
|
"loss": 0.2608, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"eval_loss": 0.23085492849349976, |
|
"eval_na_accuracy": 0.8494208455085754, |
|
"eval_ordinal_accuracy": 0.619185209274292, |
|
"eval_ordinal_mae": 0.4517284035682678, |
|
"eval_runtime": 158.9549, |
|
"eval_samples_per_second": 25.032, |
|
"eval_steps_per_second": 3.133, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 1.0736620426177979, |
|
"learning_rate": 7.681222707423581e-05, |
|
"loss": 0.2409, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5520908832550049, |
|
"learning_rate": 7.653930131004368e-05, |
|
"loss": 0.1722, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.255903720855713, |
|
"learning_rate": 7.626637554585153e-05, |
|
"loss": 0.1996, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.3203591108322144, |
|
"learning_rate": 7.599344978165939e-05, |
|
"loss": 0.201, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"eval_loss": 0.24588599801063538, |
|
"eval_na_accuracy": 0.7277992367744446, |
|
"eval_ordinal_accuracy": 0.6405662894248962, |
|
"eval_ordinal_mae": 0.46535709500312805, |
|
"eval_runtime": 163.7913, |
|
"eval_samples_per_second": 24.293, |
|
"eval_steps_per_second": 3.04, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.3525117635726929, |
|
"learning_rate": 7.572052401746725e-05, |
|
"loss": 0.2068, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 2.916431188583374, |
|
"learning_rate": 7.544759825327511e-05, |
|
"loss": 0.243, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.8391708135604858, |
|
"learning_rate": 7.517467248908297e-05, |
|
"loss": 0.1842, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5292081236839294, |
|
"learning_rate": 7.490174672489083e-05, |
|
"loss": 0.1736, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.24380208551883698, |
|
"eval_na_accuracy": 0.7200772166252136, |
|
"eval_ordinal_accuracy": 0.6475006937980652, |
|
"eval_ordinal_mae": 0.44738492369651794, |
|
"eval_runtime": 161.0768, |
|
"eval_samples_per_second": 24.703, |
|
"eval_steps_per_second": 3.092, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.6971523761749268, |
|
"learning_rate": 7.462882096069869e-05, |
|
"loss": 0.1683, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.8325093388557434, |
|
"learning_rate": 7.435589519650655e-05, |
|
"loss": 0.1177, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.8595998883247375, |
|
"learning_rate": 7.408296943231441e-05, |
|
"loss": 0.1626, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.38421839475631714, |
|
"learning_rate": 7.381004366812227e-05, |
|
"loss": 0.1374, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"eval_loss": 0.23675759136676788, |
|
"eval_na_accuracy": 0.7799227833747864, |
|
"eval_ordinal_accuracy": 0.6622363328933716, |
|
"eval_ordinal_mae": 0.41446253657341003, |
|
"eval_runtime": 160.4437, |
|
"eval_samples_per_second": 24.8, |
|
"eval_steps_per_second": 3.104, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.7025877237319946, |
|
"learning_rate": 7.353711790393013e-05, |
|
"loss": 0.1484, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.4461692571640015, |
|
"learning_rate": 7.3264192139738e-05, |
|
"loss": 0.1564, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 1.1262603998184204, |
|
"learning_rate": 7.299126637554585e-05, |
|
"loss": 0.1252, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 1.1054977178573608, |
|
"learning_rate": 7.271834061135371e-05, |
|
"loss": 0.1334, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"eval_loss": 0.2424485832452774, |
|
"eval_na_accuracy": 0.7509652376174927, |
|
"eval_ordinal_accuracy": 0.6732158064842224, |
|
"eval_ordinal_mae": 0.4105488359928131, |
|
"eval_runtime": 161.6129, |
|
"eval_samples_per_second": 24.621, |
|
"eval_steps_per_second": 3.081, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.6302788257598877, |
|
"learning_rate": 7.244541484716158e-05, |
|
"loss": 0.1252, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 3.8645241260528564, |
|
"learning_rate": 7.217248908296944e-05, |
|
"loss": 0.1444, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 0.7108765244483948, |
|
"learning_rate": 7.18995633187773e-05, |
|
"loss": 0.1273, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 2.3784756660461426, |
|
"learning_rate": 7.162663755458516e-05, |
|
"loss": 0.1319, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"eval_loss": 0.23355962336063385, |
|
"eval_na_accuracy": 0.7741312980651855, |
|
"eval_ordinal_accuracy": 0.6711933016777039, |
|
"eval_ordinal_mae": 0.41552796959877014, |
|
"eval_runtime": 161.0033, |
|
"eval_samples_per_second": 24.714, |
|
"eval_steps_per_second": 3.093, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 0.6569415330886841, |
|
"learning_rate": 7.135371179039302e-05, |
|
"loss": 0.1418, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 2.9414520263671875, |
|
"learning_rate": 7.108078602620088e-05, |
|
"loss": 0.123, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 0.5435983538627625, |
|
"learning_rate": 7.080786026200874e-05, |
|
"loss": 0.1458, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 0.6094673275947571, |
|
"learning_rate": 7.05349344978166e-05, |
|
"loss": 0.1549, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"eval_loss": 0.25251126289367676, |
|
"eval_na_accuracy": 0.7586872577667236, |
|
"eval_ordinal_accuracy": 0.6625252962112427, |
|
"eval_ordinal_mae": 0.40396779775619507, |
|
"eval_runtime": 162.435, |
|
"eval_samples_per_second": 24.496, |
|
"eval_steps_per_second": 3.066, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 0.4580962359905243, |
|
"learning_rate": 7.026200873362446e-05, |
|
"loss": 0.1382, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.7736852765083313, |
|
"learning_rate": 6.998908296943232e-05, |
|
"loss": 0.1595, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"grad_norm": 1.1125404834747314, |
|
"learning_rate": 6.971615720524018e-05, |
|
"loss": 0.1071, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 0.529449999332428, |
|
"learning_rate": 6.944323144104804e-05, |
|
"loss": 0.116, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"eval_loss": 0.25007495284080505, |
|
"eval_na_accuracy": 0.7664092779159546, |
|
"eval_ordinal_accuracy": 0.6370990872383118, |
|
"eval_ordinal_mae": 0.44249165058135986, |
|
"eval_runtime": 161.6127, |
|
"eval_samples_per_second": 24.621, |
|
"eval_steps_per_second": 3.081, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 3.3976492881774902, |
|
"learning_rate": 6.91703056768559e-05, |
|
"loss": 0.1238, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 0.9712594747543335, |
|
"learning_rate": 6.889737991266376e-05, |
|
"loss": 0.1313, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 0.35930949449539185, |
|
"learning_rate": 6.862445414847162e-05, |
|
"loss": 0.1228, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"grad_norm": 1.873953938484192, |
|
"learning_rate": 6.835152838427948e-05, |
|
"loss": 0.1358, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"eval_loss": 0.23235850036144257, |
|
"eval_na_accuracy": 0.8185328245162964, |
|
"eval_ordinal_accuracy": 0.6498122215270996, |
|
"eval_ordinal_mae": 0.4136166572570801, |
|
"eval_runtime": 162.283, |
|
"eval_samples_per_second": 24.519, |
|
"eval_steps_per_second": 3.069, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.8601669073104858, |
|
"learning_rate": 6.807860262008734e-05, |
|
"loss": 0.101, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 0.9282914996147156, |
|
"learning_rate": 6.780567685589519e-05, |
|
"loss": 0.1435, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.7728241682052612, |
|
"learning_rate": 6.753275109170306e-05, |
|
"loss": 0.1125, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"grad_norm": 0.5749986171722412, |
|
"learning_rate": 6.725982532751091e-05, |
|
"loss": 0.1614, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"eval_loss": 0.26365962624549866, |
|
"eval_na_accuracy": 0.7915058135986328, |
|
"eval_ordinal_accuracy": 0.6316093802452087, |
|
"eval_ordinal_mae": 0.43529626727104187, |
|
"eval_runtime": 161.4837, |
|
"eval_samples_per_second": 24.64, |
|
"eval_steps_per_second": 3.084, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 0.44815096259117126, |
|
"learning_rate": 6.698689956331879e-05, |
|
"loss": 0.1436, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"grad_norm": 0.4672500491142273, |
|
"learning_rate": 6.671397379912665e-05, |
|
"loss": 0.0943, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 0.8902660608291626, |
|
"learning_rate": 6.64410480349345e-05, |
|
"loss": 0.1258, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 0.7342121005058289, |
|
"learning_rate": 6.616812227074237e-05, |
|
"loss": 0.1395, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"eval_loss": 0.2445780336856842, |
|
"eval_na_accuracy": 0.8011583089828491, |
|
"eval_ordinal_accuracy": 0.672637939453125, |
|
"eval_ordinal_mae": 0.4019619822502136, |
|
"eval_runtime": 161.5727, |
|
"eval_samples_per_second": 24.627, |
|
"eval_steps_per_second": 3.082, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"grad_norm": 0.7066202163696289, |
|
"learning_rate": 6.589519650655021e-05, |
|
"loss": 0.1627, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 0.8218971490859985, |
|
"learning_rate": 6.562227074235809e-05, |
|
"loss": 0.1116, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"grad_norm": 2.74863600730896, |
|
"learning_rate": 6.534934497816593e-05, |
|
"loss": 0.1151, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 2.341121196746826, |
|
"learning_rate": 6.507641921397381e-05, |
|
"loss": 0.1208, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"eval_loss": 0.24651078879833221, |
|
"eval_na_accuracy": 0.8243243098258972, |
|
"eval_ordinal_accuracy": 0.6763941049575806, |
|
"eval_ordinal_mae": 0.394586980342865, |
|
"eval_runtime": 160.9965, |
|
"eval_samples_per_second": 24.715, |
|
"eval_steps_per_second": 3.093, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 0.610958993434906, |
|
"learning_rate": 6.480349344978166e-05, |
|
"loss": 0.1145, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 0.43066203594207764, |
|
"learning_rate": 6.453056768558953e-05, |
|
"loss": 0.1322, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"grad_norm": 0.21925854682922363, |
|
"learning_rate": 6.425764192139738e-05, |
|
"loss": 0.1602, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 0.34638360142707825, |
|
"learning_rate": 6.398471615720524e-05, |
|
"loss": 0.1432, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"eval_loss": 0.25519701838493347, |
|
"eval_na_accuracy": 0.8899613618850708, |
|
"eval_ordinal_accuracy": 0.6576133966445923, |
|
"eval_ordinal_mae": 0.3918676972389221, |
|
"eval_runtime": 160.3951, |
|
"eval_samples_per_second": 24.807, |
|
"eval_steps_per_second": 3.105, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"grad_norm": 0.5949413776397705, |
|
"learning_rate": 6.371179039301311e-05, |
|
"loss": 0.1249, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 0.8993425965309143, |
|
"learning_rate": 6.343886462882096e-05, |
|
"loss": 0.1139, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 0.7099699974060059, |
|
"learning_rate": 6.316593886462883e-05, |
|
"loss": 0.1019, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"grad_norm": 2.9975674152374268, |
|
"learning_rate": 6.289301310043668e-05, |
|
"loss": 0.1358, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"eval_loss": 0.2561098039150238, |
|
"eval_na_accuracy": 0.7895752787590027, |
|
"eval_ordinal_accuracy": 0.6795724034309387, |
|
"eval_ordinal_mae": 0.39841172099113464, |
|
"eval_runtime": 162.3278, |
|
"eval_samples_per_second": 24.512, |
|
"eval_steps_per_second": 3.068, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 0.259729266166687, |
|
"learning_rate": 6.262008733624455e-05, |
|
"loss": 0.1486, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 4.243904113769531, |
|
"learning_rate": 6.23471615720524e-05, |
|
"loss": 0.1652, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 2.9280548095703125, |
|
"learning_rate": 6.207423580786027e-05, |
|
"loss": 0.1699, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 0.5303541421890259, |
|
"learning_rate": 6.180131004366812e-05, |
|
"loss": 0.0877, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"eval_loss": 0.23811650276184082, |
|
"eval_na_accuracy": 0.7876448035240173, |
|
"eval_ordinal_accuracy": 0.6821727752685547, |
|
"eval_ordinal_mae": 0.3901168704032898, |
|
"eval_runtime": 162.0532, |
|
"eval_samples_per_second": 24.554, |
|
"eval_steps_per_second": 3.073, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 0.42129185795783997, |
|
"learning_rate": 6.152838427947598e-05, |
|
"loss": 0.1149, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"grad_norm": 1.27903151512146, |
|
"learning_rate": 6.125545851528384e-05, |
|
"loss": 0.1208, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 3.1208670139312744, |
|
"learning_rate": 6.09825327510917e-05, |
|
"loss": 0.1106, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 3.3159916400909424, |
|
"learning_rate": 6.070960698689957e-05, |
|
"loss": 0.1212, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"eval_loss": 0.2600151598453522, |
|
"eval_na_accuracy": 0.7258687019348145, |
|
"eval_ordinal_accuracy": 0.6948858499526978, |
|
"eval_ordinal_mae": 0.400073766708374, |
|
"eval_runtime": 160.7378, |
|
"eval_samples_per_second": 24.755, |
|
"eval_steps_per_second": 3.098, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 3.3880019187927246, |
|
"learning_rate": 6.043668122270742e-05, |
|
"loss": 0.1593, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 2.641679286956787, |
|
"learning_rate": 6.016375545851529e-05, |
|
"loss": 0.1489, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.1284505128860474, |
|
"learning_rate": 5.9890829694323144e-05, |
|
"loss": 0.1097, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.6684277057647705, |
|
"learning_rate": 5.9617903930131005e-05, |
|
"loss": 0.1917, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"eval_loss": 0.24585247039794922, |
|
"eval_na_accuracy": 0.7818532586097717, |
|
"eval_ordinal_accuracy": 0.6893961429595947, |
|
"eval_ordinal_mae": 0.3889385461807251, |
|
"eval_runtime": 161.3514, |
|
"eval_samples_per_second": 24.66, |
|
"eval_steps_per_second": 3.086, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"grad_norm": 0.4128411114215851, |
|
"learning_rate": 5.934497816593887e-05, |
|
"loss": 0.1423, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 1.0505822896957397, |
|
"learning_rate": 5.9072052401746726e-05, |
|
"loss": 0.1257, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 0.8468612432479858, |
|
"learning_rate": 5.879912663755459e-05, |
|
"loss": 0.1296, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 0.31584060192108154, |
|
"learning_rate": 5.852620087336245e-05, |
|
"loss": 0.1175, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"eval_loss": 0.2443784475326538, |
|
"eval_na_accuracy": 0.7741312980651855, |
|
"eval_ordinal_accuracy": 0.6818838715553284, |
|
"eval_ordinal_mae": 0.3937167227268219, |
|
"eval_runtime": 161.5524, |
|
"eval_samples_per_second": 24.63, |
|
"eval_steps_per_second": 3.083, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"grad_norm": 5.336325645446777, |
|
"learning_rate": 5.8253275109170314e-05, |
|
"loss": 0.1638, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.6800111532211304, |
|
"learning_rate": 5.798034934497817e-05, |
|
"loss": 0.1314, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"grad_norm": 1.491882085800171, |
|
"learning_rate": 5.770742358078602e-05, |
|
"loss": 0.1415, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 2.401737928390503, |
|
"learning_rate": 5.743449781659389e-05, |
|
"loss": 0.1522, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"eval_loss": 0.2472807914018631, |
|
"eval_na_accuracy": 0.8050193190574646, |
|
"eval_ordinal_accuracy": 0.6607916951179504, |
|
"eval_ordinal_mae": 0.40097710490226746, |
|
"eval_runtime": 160.8219, |
|
"eval_samples_per_second": 24.742, |
|
"eval_steps_per_second": 3.097, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 0.6720189452171326, |
|
"learning_rate": 5.716157205240175e-05, |
|
"loss": 0.1239, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"grad_norm": 2.349804639816284, |
|
"learning_rate": 5.688864628820961e-05, |
|
"loss": 0.1318, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 0.732524573802948, |
|
"learning_rate": 5.661572052401747e-05, |
|
"loss": 0.1423, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"grad_norm": 0.4833851456642151, |
|
"learning_rate": 5.634279475982534e-05, |
|
"loss": 0.1027, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"eval_loss": 0.23541530966758728, |
|
"eval_na_accuracy": 0.7837837934494019, |
|
"eval_ordinal_accuracy": 0.6477896571159363, |
|
"eval_ordinal_mae": 0.420841783285141, |
|
"eval_runtime": 155.1162, |
|
"eval_samples_per_second": 25.652, |
|
"eval_steps_per_second": 3.21, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 0.5297548174858093, |
|
"learning_rate": 5.606986899563319e-05, |
|
"loss": 0.1596, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 0.5475759506225586, |
|
"learning_rate": 5.5796943231441045e-05, |
|
"loss": 0.1272, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 2.1666433811187744, |
|
"learning_rate": 5.552401746724891e-05, |
|
"loss": 0.1382, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"grad_norm": 3.707628011703491, |
|
"learning_rate": 5.5251091703056766e-05, |
|
"loss": 0.1343, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"eval_loss": 0.228408545255661, |
|
"eval_na_accuracy": 0.799227774143219, |
|
"eval_ordinal_accuracy": 0.6743715405464172, |
|
"eval_ordinal_mae": 0.3976960778236389, |
|
"eval_runtime": 158.2657, |
|
"eval_samples_per_second": 25.141, |
|
"eval_steps_per_second": 3.147, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 0.5081536769866943, |
|
"learning_rate": 5.497816593886463e-05, |
|
"loss": 0.1381, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 0.7783900499343872, |
|
"learning_rate": 5.470524017467249e-05, |
|
"loss": 0.1391, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 0.6352062821388245, |
|
"learning_rate": 5.4432314410480354e-05, |
|
"loss": 0.1157, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 0.6280900835990906, |
|
"learning_rate": 5.4159388646288215e-05, |
|
"loss": 0.1552, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"eval_loss": 0.2606957256793976, |
|
"eval_na_accuracy": 0.7779922485351562, |
|
"eval_ordinal_accuracy": 0.6714822053909302, |
|
"eval_ordinal_mae": 0.4044625461101532, |
|
"eval_runtime": 154.4211, |
|
"eval_samples_per_second": 25.767, |
|
"eval_steps_per_second": 3.225, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 1.6340936422348022, |
|
"learning_rate": 5.388646288209607e-05, |
|
"loss": 0.1448, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 3.3546087741851807, |
|
"learning_rate": 5.3613537117903936e-05, |
|
"loss": 0.1485, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"grad_norm": 0.5650043487548828, |
|
"learning_rate": 5.334061135371179e-05, |
|
"loss": 0.127, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 0.8098490834236145, |
|
"learning_rate": 5.306768558951966e-05, |
|
"loss": 0.1172, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"eval_loss": 0.24209196865558624, |
|
"eval_na_accuracy": 0.8281853199005127, |
|
"eval_ordinal_accuracy": 0.6665703654289246, |
|
"eval_ordinal_mae": 0.3971348702907562, |
|
"eval_runtime": 153.4086, |
|
"eval_samples_per_second": 25.937, |
|
"eval_steps_per_second": 3.246, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.127596378326416, |
|
"learning_rate": 5.2805676855895205e-05, |
|
"loss": 0.1808, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 0.5557155609130859, |
|
"learning_rate": 5.253275109170306e-05, |
|
"loss": 0.09, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 0.31405743956565857, |
|
"learning_rate": 5.2259825327510926e-05, |
|
"loss": 0.1061, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 6.475078105926514, |
|
"learning_rate": 5.198689956331878e-05, |
|
"loss": 0.1381, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"eval_loss": 0.2253342866897583, |
|
"eval_na_accuracy": 0.7857142686843872, |
|
"eval_ordinal_accuracy": 0.6792834401130676, |
|
"eval_ordinal_mae": 0.3813394010066986, |
|
"eval_runtime": 156.7221, |
|
"eval_samples_per_second": 25.389, |
|
"eval_steps_per_second": 3.178, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 0.5825577974319458, |
|
"learning_rate": 5.171397379912663e-05, |
|
"loss": 0.1524, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 1.4979143142700195, |
|
"learning_rate": 5.14410480349345e-05, |
|
"loss": 0.142, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 0.5638359785079956, |
|
"learning_rate": 5.116812227074236e-05, |
|
"loss": 0.1225, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 0.44809991121292114, |
|
"learning_rate": 5.089519650655022e-05, |
|
"loss": 0.1282, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"eval_loss": 0.23353050649166107, |
|
"eval_na_accuracy": 0.8436293601989746, |
|
"eval_ordinal_accuracy": 0.6509679555892944, |
|
"eval_ordinal_mae": 0.41455620527267456, |
|
"eval_runtime": 154.2508, |
|
"eval_samples_per_second": 25.796, |
|
"eval_steps_per_second": 3.229, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 2.9640109539031982, |
|
"learning_rate": 5.062227074235808e-05, |
|
"loss": 0.1057, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"grad_norm": 0.44117558002471924, |
|
"learning_rate": 5.034934497816595e-05, |
|
"loss": 0.13, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.6167708039283752, |
|
"learning_rate": 5.00764192139738e-05, |
|
"loss": 0.1223, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 2.0781619548797607, |
|
"learning_rate": 4.9803493449781664e-05, |
|
"loss": 0.0734, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"eval_loss": 0.23820282518863678, |
|
"eval_na_accuracy": 0.7895752787590027, |
|
"eval_ordinal_accuracy": 0.689685046672821, |
|
"eval_ordinal_mae": 0.38021621108055115, |
|
"eval_runtime": 154.7894, |
|
"eval_samples_per_second": 25.706, |
|
"eval_steps_per_second": 3.217, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 3.165067195892334, |
|
"learning_rate": 4.9530567685589524e-05, |
|
"loss": 0.08, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"grad_norm": 0.43800944089889526, |
|
"learning_rate": 4.9257641921397385e-05, |
|
"loss": 0.0571, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 0.45996198058128357, |
|
"learning_rate": 4.898471615720524e-05, |
|
"loss": 0.0579, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.9151467084884644, |
|
"learning_rate": 4.87117903930131e-05, |
|
"loss": 0.1046, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"eval_loss": 0.2358408272266388, |
|
"eval_na_accuracy": 0.8011583089828491, |
|
"eval_ordinal_accuracy": 0.6873735785484314, |
|
"eval_ordinal_mae": 0.36946654319763184, |
|
"eval_runtime": 150.9459, |
|
"eval_samples_per_second": 26.36, |
|
"eval_steps_per_second": 3.299, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"grad_norm": 0.5566712021827698, |
|
"learning_rate": 4.843886462882096e-05, |
|
"loss": 0.068, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 0.5846825838088989, |
|
"learning_rate": 4.8165938864628827e-05, |
|
"loss": 0.0487, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 0.3993060290813446, |
|
"learning_rate": 4.789301310043669e-05, |
|
"loss": 0.0546, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 0.6624791026115417, |
|
"learning_rate": 4.762008733624455e-05, |
|
"loss": 0.0529, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"eval_loss": 0.246298685669899, |
|
"eval_na_accuracy": 0.7934362888336182, |
|
"eval_ordinal_accuracy": 0.7096214890480042, |
|
"eval_ordinal_mae": 0.3595849573612213, |
|
"eval_runtime": 153.0771, |
|
"eval_samples_per_second": 25.993, |
|
"eval_steps_per_second": 3.253, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 0.33810412883758545, |
|
"learning_rate": 4.734716157205241e-05, |
|
"loss": 0.0625, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 3.0376217365264893, |
|
"learning_rate": 4.707423580786026e-05, |
|
"loss": 0.0749, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 0.4552454352378845, |
|
"learning_rate": 4.680131004366812e-05, |
|
"loss": 0.0536, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 0.6775699257850647, |
|
"learning_rate": 4.652838427947598e-05, |
|
"loss": 0.0687, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"eval_loss": 0.26146525144577026, |
|
"eval_na_accuracy": 0.7857142686843872, |
|
"eval_ordinal_accuracy": 0.6737936735153198, |
|
"eval_ordinal_mae": 0.39211294054985046, |
|
"eval_runtime": 156.1814, |
|
"eval_samples_per_second": 25.477, |
|
"eval_steps_per_second": 3.189, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"grad_norm": 0.623523473739624, |
|
"learning_rate": 4.625545851528384e-05, |
|
"loss": 0.068, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.5668771862983704, |
|
"learning_rate": 4.5982532751091704e-05, |
|
"loss": 0.0684, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 0.16505593061447144, |
|
"learning_rate": 4.5709606986899564e-05, |
|
"loss": 0.0546, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"grad_norm": 0.5560925602912903, |
|
"learning_rate": 4.5436681222707425e-05, |
|
"loss": 0.0613, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"eval_loss": 0.2542937397956848, |
|
"eval_na_accuracy": 0.8108108043670654, |
|
"eval_ordinal_accuracy": 0.6876625418663025, |
|
"eval_ordinal_mae": 0.36514100432395935, |
|
"eval_runtime": 156.8699, |
|
"eval_samples_per_second": 25.365, |
|
"eval_steps_per_second": 3.175, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 0.440731942653656, |
|
"learning_rate": 4.5163755458515285e-05, |
|
"loss": 0.0967, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 0.26702314615249634, |
|
"learning_rate": 4.4890829694323146e-05, |
|
"loss": 0.0499, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 0.9617776274681091, |
|
"learning_rate": 4.4617903930131006e-05, |
|
"loss": 0.0792, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"grad_norm": 3.431128740310669, |
|
"learning_rate": 4.434497816593887e-05, |
|
"loss": 0.0591, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"eval_loss": 0.25389814376831055, |
|
"eval_na_accuracy": 0.7915058135986328, |
|
"eval_ordinal_accuracy": 0.6885293126106262, |
|
"eval_ordinal_mae": 0.3693314790725708, |
|
"eval_runtime": 155.0651, |
|
"eval_samples_per_second": 25.66, |
|
"eval_steps_per_second": 3.212, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 5.355819225311279, |
|
"learning_rate": 4.407205240174673e-05, |
|
"loss": 0.0611, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 0.4490479528903961, |
|
"learning_rate": 4.379912663755459e-05, |
|
"loss": 0.0676, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 0.594838559627533, |
|
"learning_rate": 4.352620087336245e-05, |
|
"loss": 0.0567, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 0.2912037670612335, |
|
"learning_rate": 4.325327510917031e-05, |
|
"loss": 0.0474, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"eval_loss": 0.26495158672332764, |
|
"eval_na_accuracy": 0.799227774143219, |
|
"eval_ordinal_accuracy": 0.6836174726486206, |
|
"eval_ordinal_mae": 0.3721810579299927, |
|
"eval_runtime": 156.3533, |
|
"eval_samples_per_second": 25.449, |
|
"eval_steps_per_second": 3.185, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.547253966331482, |
|
"learning_rate": 4.298034934497817e-05, |
|
"loss": 0.0583, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 0.6619101762771606, |
|
"learning_rate": 4.270742358078603e-05, |
|
"loss": 0.0713, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"grad_norm": 0.6830999255180359, |
|
"learning_rate": 4.243449781659389e-05, |
|
"loss": 0.068, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 0.6977065801620483, |
|
"learning_rate": 4.216157205240175e-05, |
|
"loss": 0.0511, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"eval_loss": 0.2630845010280609, |
|
"eval_na_accuracy": 0.8127413392066956, |
|
"eval_ordinal_accuracy": 0.686795711517334, |
|
"eval_ordinal_mae": 0.36813271045684814, |
|
"eval_runtime": 154.8738, |
|
"eval_samples_per_second": 25.692, |
|
"eval_steps_per_second": 3.216, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 0.8015338778495789, |
|
"learning_rate": 4.188864628820961e-05, |
|
"loss": 0.0511, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.4707908630371094, |
|
"learning_rate": 4.161572052401747e-05, |
|
"loss": 0.0704, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"grad_norm": 0.526327908039093, |
|
"learning_rate": 4.134279475982533e-05, |
|
"loss": 0.0824, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 1.6800874471664429, |
|
"learning_rate": 4.1069868995633186e-05, |
|
"loss": 0.0683, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"eval_loss": 0.271382212638855, |
|
"eval_na_accuracy": 0.7837837934494019, |
|
"eval_ordinal_accuracy": 0.6954637169837952, |
|
"eval_ordinal_mae": 0.36302649974823, |
|
"eval_runtime": 155.8769, |
|
"eval_samples_per_second": 25.527, |
|
"eval_steps_per_second": 3.195, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 0.5335835814476013, |
|
"learning_rate": 4.0796943231441046e-05, |
|
"loss": 0.056, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 2.2423832416534424, |
|
"learning_rate": 4.052401746724891e-05, |
|
"loss": 0.0787, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"grad_norm": 0.558754026889801, |
|
"learning_rate": 4.025109170305677e-05, |
|
"loss": 0.0481, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.6908044815063477, |
|
"learning_rate": 3.9978165938864635e-05, |
|
"loss": 0.0654, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"eval_loss": 0.27688542008399963, |
|
"eval_na_accuracy": 0.799227774143219, |
|
"eval_ordinal_accuracy": 0.6787055730819702, |
|
"eval_ordinal_mae": 0.3673117458820343, |
|
"eval_runtime": 154.1587, |
|
"eval_samples_per_second": 25.811, |
|
"eval_steps_per_second": 3.23, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 0.43695610761642456, |
|
"learning_rate": 3.9705240174672495e-05, |
|
"loss": 0.0539, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"grad_norm": 3.290745496749878, |
|
"learning_rate": 3.9432314410480356e-05, |
|
"loss": 0.0608, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 0.25710350275039673, |
|
"learning_rate": 3.9159388646288216e-05, |
|
"loss": 0.0657, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 0.6269412040710449, |
|
"learning_rate": 3.888646288209607e-05, |
|
"loss": 0.0581, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"eval_loss": 0.27770209312438965, |
|
"eval_na_accuracy": 0.799227774143219, |
|
"eval_ordinal_accuracy": 0.6951748132705688, |
|
"eval_ordinal_mae": 0.3627748191356659, |
|
"eval_runtime": 157.9718, |
|
"eval_samples_per_second": 25.188, |
|
"eval_steps_per_second": 3.152, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.5579248666763306, |
|
"learning_rate": 3.861353711790393e-05, |
|
"loss": 0.0556, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"grad_norm": 0.30162498354911804, |
|
"learning_rate": 3.834061135371179e-05, |
|
"loss": 0.0845, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.43656259775161743, |
|
"learning_rate": 3.806768558951965e-05, |
|
"loss": 0.0616, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 0.4505567252635956, |
|
"learning_rate": 3.779475982532751e-05, |
|
"loss": 0.072, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"eval_loss": 0.29192212224006653, |
|
"eval_na_accuracy": 0.7683397531509399, |
|
"eval_ordinal_accuracy": 0.6888182759284973, |
|
"eval_ordinal_mae": 0.36100971698760986, |
|
"eval_runtime": 155.17, |
|
"eval_samples_per_second": 25.643, |
|
"eval_steps_per_second": 3.209, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 0.8305689692497253, |
|
"learning_rate": 3.752183406113537e-05, |
|
"loss": 0.0489, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"grad_norm": 0.5452491641044617, |
|
"learning_rate": 3.724890829694323e-05, |
|
"loss": 0.0584, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 0.7193215489387512, |
|
"learning_rate": 3.697598253275109e-05, |
|
"loss": 0.0643, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 0.4529215693473816, |
|
"learning_rate": 3.6703056768558954e-05, |
|
"loss": 0.0737, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"eval_loss": 0.2807420790195465, |
|
"eval_na_accuracy": 0.7837837934494019, |
|
"eval_ordinal_accuracy": 0.6983530521392822, |
|
"eval_ordinal_mae": 0.3611612617969513, |
|
"eval_runtime": 154.656, |
|
"eval_samples_per_second": 25.728, |
|
"eval_steps_per_second": 3.22, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 0.4110221564769745, |
|
"learning_rate": 3.6430131004366814e-05, |
|
"loss": 0.0548, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 0.7328541278839111, |
|
"learning_rate": 3.6157205240174675e-05, |
|
"loss": 0.0728, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 0.2497873455286026, |
|
"learning_rate": 3.5884279475982535e-05, |
|
"loss": 0.0608, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 6.07706880569458, |
|
"learning_rate": 3.5611353711790396e-05, |
|
"loss": 0.0667, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"eval_loss": 0.292630672454834, |
|
"eval_na_accuracy": 0.7509652376174927, |
|
"eval_ordinal_accuracy": 0.7000866532325745, |
|
"eval_ordinal_mae": 0.36070069670677185, |
|
"eval_runtime": 156.6494, |
|
"eval_samples_per_second": 25.401, |
|
"eval_steps_per_second": 3.179, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"grad_norm": 2.6040148735046387, |
|
"learning_rate": 3.5338427947598256e-05, |
|
"loss": 0.0745, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 0.428023099899292, |
|
"learning_rate": 3.506550218340611e-05, |
|
"loss": 0.0455, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 0.24350808560848236, |
|
"learning_rate": 3.479257641921397e-05, |
|
"loss": 0.067, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 0.6879482865333557, |
|
"learning_rate": 3.451965065502184e-05, |
|
"loss": 0.0669, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"eval_loss": 0.2874707579612732, |
|
"eval_na_accuracy": 0.799227774143219, |
|
"eval_ordinal_accuracy": 0.6891071796417236, |
|
"eval_ordinal_mae": 0.36164331436157227, |
|
"eval_runtime": 155.7575, |
|
"eval_samples_per_second": 25.546, |
|
"eval_steps_per_second": 3.197, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"grad_norm": 0.786054790019989, |
|
"learning_rate": 3.42467248908297e-05, |
|
"loss": 0.0736, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 0.7047861218452454, |
|
"learning_rate": 3.397379912663756e-05, |
|
"loss": 0.0551, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 0.6863640546798706, |
|
"learning_rate": 3.370087336244542e-05, |
|
"loss": 0.0643, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"grad_norm": 0.6037794947624207, |
|
"learning_rate": 3.342794759825328e-05, |
|
"loss": 0.0535, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"eval_loss": 0.2853965759277344, |
|
"eval_na_accuracy": 0.7683397531509399, |
|
"eval_ordinal_accuracy": 0.6960415840148926, |
|
"eval_ordinal_mae": 0.35648074746131897, |
|
"eval_runtime": 156.7027, |
|
"eval_samples_per_second": 25.392, |
|
"eval_steps_per_second": 3.178, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 0.30565348267555237, |
|
"learning_rate": 3.315502183406114e-05, |
|
"loss": 0.0412, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 0.3631564974784851, |
|
"learning_rate": 3.2882096069868994e-05, |
|
"loss": 0.0584, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 0.6103675365447998, |
|
"learning_rate": 3.2609170305676854e-05, |
|
"loss": 0.0481, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"grad_norm": 10.306925773620605, |
|
"learning_rate": 3.2336244541484715e-05, |
|
"loss": 0.06, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"eval_loss": 0.28473392128944397, |
|
"eval_na_accuracy": 0.7741312980651855, |
|
"eval_ordinal_accuracy": 0.7015313506126404, |
|
"eval_ordinal_mae": 0.3500910997390747, |
|
"eval_runtime": 154.8993, |
|
"eval_samples_per_second": 25.688, |
|
"eval_steps_per_second": 3.215, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 0.5145313143730164, |
|
"learning_rate": 3.2063318777292575e-05, |
|
"loss": 0.0586, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 0.9967983365058899, |
|
"learning_rate": 3.1790393013100436e-05, |
|
"loss": 0.0736, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 0.652100682258606, |
|
"learning_rate": 3.15174672489083e-05, |
|
"loss": 0.057, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"grad_norm": 0.64205402135849, |
|
"learning_rate": 3.1244541484716164e-05, |
|
"loss": 0.0534, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"eval_loss": 0.28205522894859314, |
|
"eval_na_accuracy": 0.7625482678413391, |
|
"eval_ordinal_accuracy": 0.7006645202636719, |
|
"eval_ordinal_mae": 0.34947505593299866, |
|
"eval_runtime": 155.841, |
|
"eval_samples_per_second": 25.532, |
|
"eval_steps_per_second": 3.196, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 1.2284494638442993, |
|
"learning_rate": 3.097161572052402e-05, |
|
"loss": 0.0517, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 0.4024920165538788, |
|
"learning_rate": 3.069868995633188e-05, |
|
"loss": 0.0857, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"grad_norm": 0.5288224816322327, |
|
"learning_rate": 3.0425764192139738e-05, |
|
"loss": 0.051, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 0.6519297957420349, |
|
"learning_rate": 3.01528384279476e-05, |
|
"loss": 0.0526, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"eval_loss": 0.28344523906707764, |
|
"eval_na_accuracy": 0.7625482678413391, |
|
"eval_ordinal_accuracy": 0.670037567615509, |
|
"eval_ordinal_mae": 0.385305792093277, |
|
"eval_runtime": 154.7606, |
|
"eval_samples_per_second": 25.711, |
|
"eval_steps_per_second": 3.218, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 0.7009943723678589, |
|
"learning_rate": 2.987991266375546e-05, |
|
"loss": 0.0624, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 0.6731958389282227, |
|
"learning_rate": 2.9606986899563323e-05, |
|
"loss": 0.068, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"grad_norm": 0.3764704763889313, |
|
"learning_rate": 2.9334061135371184e-05, |
|
"loss": 0.0677, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 0.5906457901000977, |
|
"learning_rate": 2.9061135371179037e-05, |
|
"loss": 0.0841, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"eval_loss": 0.28385940194129944, |
|
"eval_na_accuracy": 0.7490347623825073, |
|
"eval_ordinal_accuracy": 0.7044206857681274, |
|
"eval_ordinal_mae": 0.35039493441581726, |
|
"eval_runtime": 156.3169, |
|
"eval_samples_per_second": 25.455, |
|
"eval_steps_per_second": 3.186, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 0.3305709958076477, |
|
"learning_rate": 2.8788209606986898e-05, |
|
"loss": 0.0652, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 0.4257774353027344, |
|
"learning_rate": 2.8526200873362446e-05, |
|
"loss": 0.0608, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 0.9447629451751709, |
|
"learning_rate": 2.8253275109170307e-05, |
|
"loss": 0.0527, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 0.8275176882743835, |
|
"learning_rate": 2.7980349344978167e-05, |
|
"loss": 0.0529, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"eval_loss": 0.2857762575149536, |
|
"eval_na_accuracy": 0.7818532586097717, |
|
"eval_ordinal_accuracy": 0.689685046672821, |
|
"eval_ordinal_mae": 0.35951367020606995, |
|
"eval_runtime": 156.4554, |
|
"eval_samples_per_second": 25.432, |
|
"eval_steps_per_second": 3.183, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 0.5894487500190735, |
|
"learning_rate": 2.770742358078603e-05, |
|
"loss": 0.0643, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"grad_norm": 0.8872036933898926, |
|
"learning_rate": 2.743449781659389e-05, |
|
"loss": 0.0578, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 0.511169970035553, |
|
"learning_rate": 2.7161572052401745e-05, |
|
"loss": 0.0393, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"grad_norm": 0.5537564754486084, |
|
"learning_rate": 2.6888646288209606e-05, |
|
"loss": 0.0811, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"eval_loss": 0.28430891036987305, |
|
"eval_na_accuracy": 0.7799227833747864, |
|
"eval_ordinal_accuracy": 0.7047096490859985, |
|
"eval_ordinal_mae": 0.34795093536376953, |
|
"eval_runtime": 156.7138, |
|
"eval_samples_per_second": 25.39, |
|
"eval_steps_per_second": 3.178, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.6618189811706543, |
|
"learning_rate": 2.661572052401747e-05, |
|
"loss": 0.0705, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"grad_norm": 0.4913439154624939, |
|
"learning_rate": 2.634279475982533e-05, |
|
"loss": 0.0875, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.5068143010139465, |
|
"learning_rate": 2.606986899563319e-05, |
|
"loss": 0.0557, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 0.23857353627681732, |
|
"learning_rate": 2.579694323144105e-05, |
|
"loss": 0.0502, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"eval_loss": 0.28915783762931824, |
|
"eval_na_accuracy": 0.7818532586097717, |
|
"eval_ordinal_accuracy": 0.700953483581543, |
|
"eval_ordinal_mae": 0.34830236434936523, |
|
"eval_runtime": 155.6303, |
|
"eval_samples_per_second": 25.567, |
|
"eval_steps_per_second": 3.2, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 0.9183345437049866, |
|
"learning_rate": 2.552401746724891e-05, |
|
"loss": 0.0514, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"grad_norm": 0.6839514374732971, |
|
"learning_rate": 2.525109170305677e-05, |
|
"loss": 0.0682, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.4068310260772705, |
|
"learning_rate": 2.4978165938864632e-05, |
|
"loss": 0.0709, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 0.04599028080701828, |
|
"learning_rate": 2.470524017467249e-05, |
|
"loss": 0.0273, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"eval_loss": 0.2801385819911957, |
|
"eval_na_accuracy": 0.8108108043670654, |
|
"eval_ordinal_accuracy": 0.6957526803016663, |
|
"eval_ordinal_mae": 0.34536227583885193, |
|
"eval_runtime": 156.2048, |
|
"eval_samples_per_second": 25.473, |
|
"eval_steps_per_second": 3.188, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"grad_norm": 0.5234570503234863, |
|
"learning_rate": 2.443231441048035e-05, |
|
"loss": 0.0273, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 0.4756307601928711, |
|
"learning_rate": 2.415938864628821e-05, |
|
"loss": 0.0338, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"grad_norm": 0.40528589487075806, |
|
"learning_rate": 2.388646288209607e-05, |
|
"loss": 0.0391, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 0.4284062385559082, |
|
"learning_rate": 2.361353711790393e-05, |
|
"loss": 0.0306, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"eval_loss": 0.2782219350337982, |
|
"eval_na_accuracy": 0.8030887842178345, |
|
"eval_ordinal_accuracy": 0.7023981213569641, |
|
"eval_ordinal_mae": 0.3443802297115326, |
|
"eval_runtime": 155.0659, |
|
"eval_samples_per_second": 25.66, |
|
"eval_steps_per_second": 3.212, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"grad_norm": 0.420832097530365, |
|
"learning_rate": 2.3340611353711792e-05, |
|
"loss": 0.0308, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 0.12627224624156952, |
|
"learning_rate": 2.3067685589519653e-05, |
|
"loss": 0.0219, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 0.6852111220359802, |
|
"learning_rate": 2.279475982532751e-05, |
|
"loss": 0.0289, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 0.4591895043849945, |
|
"learning_rate": 2.252183406113537e-05, |
|
"loss": 0.0257, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"eval_loss": 0.2796567380428314, |
|
"eval_na_accuracy": 0.7934362888336182, |
|
"eval_ordinal_accuracy": 0.7084657549858093, |
|
"eval_ordinal_mae": 0.33523455262184143, |
|
"eval_runtime": 155.6618, |
|
"eval_samples_per_second": 25.562, |
|
"eval_steps_per_second": 3.199, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"grad_norm": 7.793858051300049, |
|
"learning_rate": 2.2248908296943234e-05, |
|
"loss": 0.0323, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.4730852246284485, |
|
"learning_rate": 2.1975982532751095e-05, |
|
"loss": 0.0432, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 0.9067665338516235, |
|
"learning_rate": 2.170305676855895e-05, |
|
"loss": 0.0324, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"grad_norm": 0.23400144279003143, |
|
"learning_rate": 2.1430131004366812e-05, |
|
"loss": 0.0241, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"eval_loss": 0.2827575206756592, |
|
"eval_na_accuracy": 0.7953668236732483, |
|
"eval_ordinal_accuracy": 0.7058653831481934, |
|
"eval_ordinal_mae": 0.33425432443618774, |
|
"eval_runtime": 157.3972, |
|
"eval_samples_per_second": 25.28, |
|
"eval_steps_per_second": 3.164, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 0.6540936231613159, |
|
"learning_rate": 2.1157205240174673e-05, |
|
"loss": 0.0266, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"grad_norm": 0.3661313056945801, |
|
"learning_rate": 2.0884279475982536e-05, |
|
"loss": 0.03, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 0.48538270592689514, |
|
"learning_rate": 2.0611353711790394e-05, |
|
"loss": 0.0268, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"grad_norm": 0.12903234362602234, |
|
"learning_rate": 2.0338427947598254e-05, |
|
"loss": 0.0255, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"eval_loss": 0.28903913497924805, |
|
"eval_na_accuracy": 0.8050193190574646, |
|
"eval_ordinal_accuracy": 0.6980641484260559, |
|
"eval_ordinal_mae": 0.3364236354827881, |
|
"eval_runtime": 155.705, |
|
"eval_samples_per_second": 25.555, |
|
"eval_steps_per_second": 3.198, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.6427123546600342, |
|
"learning_rate": 2.0065502183406115e-05, |
|
"loss": 0.0256, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 0.9630228281021118, |
|
"learning_rate": 1.9792576419213975e-05, |
|
"loss": 0.0261, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"grad_norm": 0.4561873972415924, |
|
"learning_rate": 1.9519650655021836e-05, |
|
"loss": 0.0337, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 0.40141957998275757, |
|
"learning_rate": 1.9246724890829696e-05, |
|
"loss": 0.0245, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"eval_loss": 0.29058343172073364, |
|
"eval_na_accuracy": 0.799227774143219, |
|
"eval_ordinal_accuracy": 0.7044206857681274, |
|
"eval_ordinal_mae": 0.3391839265823364, |
|
"eval_runtime": 156.6469, |
|
"eval_samples_per_second": 25.401, |
|
"eval_steps_per_second": 3.179, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 0.04360814392566681, |
|
"learning_rate": 1.8973799126637557e-05, |
|
"loss": 0.0271, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 0.2782489061355591, |
|
"learning_rate": 1.8700873362445414e-05, |
|
"loss": 0.0345, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"grad_norm": 0.4086083769798279, |
|
"learning_rate": 1.8427947598253274e-05, |
|
"loss": 0.0519, |
|
"step": 7475 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 0.37691470980644226, |
|
"learning_rate": 1.8155021834061138e-05, |
|
"loss": 0.0232, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"eval_loss": 0.28911489248275757, |
|
"eval_na_accuracy": 0.7857142686843872, |
|
"eval_ordinal_accuracy": 0.7035539150238037, |
|
"eval_ordinal_mae": 0.3337612450122833, |
|
"eval_runtime": 155.5823, |
|
"eval_samples_per_second": 25.575, |
|
"eval_steps_per_second": 3.201, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"grad_norm": 0.220824733376503, |
|
"learning_rate": 1.7882096069869e-05, |
|
"loss": 0.0274, |
|
"step": 7525 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 0.38171643018722534, |
|
"learning_rate": 1.7609170305676856e-05, |
|
"loss": 0.0257, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"grad_norm": 0.6748324632644653, |
|
"learning_rate": 1.7336244541484716e-05, |
|
"loss": 0.0212, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 0.42487770318984985, |
|
"learning_rate": 1.7063318777292577e-05, |
|
"loss": 0.0352, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"eval_loss": 0.2908113896846771, |
|
"eval_na_accuracy": 0.7895752787590027, |
|
"eval_ordinal_accuracy": 0.6925743818283081, |
|
"eval_ordinal_mae": 0.34433993697166443, |
|
"eval_runtime": 154.4772, |
|
"eval_samples_per_second": 25.758, |
|
"eval_steps_per_second": 3.224, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 0.6813647747039795, |
|
"learning_rate": 1.6790393013100437e-05, |
|
"loss": 0.0405, |
|
"step": 7625 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"grad_norm": 0.46545034646987915, |
|
"learning_rate": 1.6517467248908298e-05, |
|
"loss": 0.0252, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 0.32561665773391724, |
|
"learning_rate": 1.6244541484716158e-05, |
|
"loss": 0.0287, |
|
"step": 7675 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.38751569390296936, |
|
"learning_rate": 1.597161572052402e-05, |
|
"loss": 0.0376, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"eval_loss": 0.2876608371734619, |
|
"eval_na_accuracy": 0.7915058135986328, |
|
"eval_ordinal_accuracy": 0.7049985527992249, |
|
"eval_ordinal_mae": 0.331503301858902, |
|
"eval_runtime": 155.51, |
|
"eval_samples_per_second": 25.587, |
|
"eval_steps_per_second": 3.202, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 0.317843496799469, |
|
"learning_rate": 1.5698689956331876e-05, |
|
"loss": 0.0346, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"grad_norm": 0.09346043318510056, |
|
"learning_rate": 1.542576419213974e-05, |
|
"loss": 0.0187, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 0.508102536201477, |
|
"learning_rate": 1.51528384279476e-05, |
|
"loss": 0.0208, |
|
"step": 7775 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"grad_norm": 0.3914332687854767, |
|
"learning_rate": 1.487991266375546e-05, |
|
"loss": 0.025, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"eval_loss": 0.2889249920845032, |
|
"eval_na_accuracy": 0.7895752787590027, |
|
"eval_ordinal_accuracy": 0.7075989842414856, |
|
"eval_ordinal_mae": 0.33163872361183167, |
|
"eval_runtime": 156.7804, |
|
"eval_samples_per_second": 25.379, |
|
"eval_steps_per_second": 3.176, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 0.6848337054252625, |
|
"learning_rate": 1.460698689956332e-05, |
|
"loss": 0.0259, |
|
"step": 7825 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"grad_norm": 0.5732501149177551, |
|
"learning_rate": 1.433406113537118e-05, |
|
"loss": 0.0222, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.638590931892395, |
|
"learning_rate": 1.406113537117904e-05, |
|
"loss": 0.0237, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 0.7879953980445862, |
|
"learning_rate": 1.37882096069869e-05, |
|
"loss": 0.0225, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"eval_loss": 0.2901510000228882, |
|
"eval_na_accuracy": 0.7818532586097717, |
|
"eval_ordinal_accuracy": 0.7070211172103882, |
|
"eval_ordinal_mae": 0.32855790853500366, |
|
"eval_runtime": 157.7082, |
|
"eval_samples_per_second": 25.23, |
|
"eval_steps_per_second": 3.158, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"grad_norm": 0.45826223492622375, |
|
"learning_rate": 1.351528384279476e-05, |
|
"loss": 0.03, |
|
"step": 7925 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 0.5093744993209839, |
|
"learning_rate": 1.324235807860262e-05, |
|
"loss": 0.023, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 0.39197513461112976, |
|
"learning_rate": 1.2969432314410482e-05, |
|
"loss": 0.0263, |
|
"step": 7975 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 0.4618348777294159, |
|
"learning_rate": 1.269650655021834e-05, |
|
"loss": 0.024, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"eval_loss": 0.29018646478652954, |
|
"eval_na_accuracy": 0.7953668236732483, |
|
"eval_ordinal_accuracy": 0.7101993560791016, |
|
"eval_ordinal_mae": 0.3269650340080261, |
|
"eval_runtime": 155.3244, |
|
"eval_samples_per_second": 25.617, |
|
"eval_steps_per_second": 3.206, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"grad_norm": 0.5933849811553955, |
|
"learning_rate": 1.2423580786026202e-05, |
|
"loss": 0.0225, |
|
"step": 8025 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 0.04294763505458832, |
|
"learning_rate": 1.2150655021834062e-05, |
|
"loss": 0.0228, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"grad_norm": 0.5286312699317932, |
|
"learning_rate": 1.1877729257641921e-05, |
|
"loss": 0.03, |
|
"step": 8075 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 0.09194879978895187, |
|
"learning_rate": 1.1604803493449783e-05, |
|
"loss": 0.0404, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"eval_loss": 0.29498282074928284, |
|
"eval_na_accuracy": 0.7895752787590027, |
|
"eval_ordinal_accuracy": 0.705287516117096, |
|
"eval_ordinal_mae": 0.32936587929725647, |
|
"eval_runtime": 156.3299, |
|
"eval_samples_per_second": 25.453, |
|
"eval_steps_per_second": 3.186, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"grad_norm": 0.3700167238712311, |
|
"learning_rate": 1.1331877729257642e-05, |
|
"loss": 0.0293, |
|
"step": 8125 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 0.10644713789224625, |
|
"learning_rate": 1.1058951965065504e-05, |
|
"loss": 0.0265, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 0.4384317994117737, |
|
"learning_rate": 1.0786026200873363e-05, |
|
"loss": 0.0274, |
|
"step": 8175 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"grad_norm": 0.4964589774608612, |
|
"learning_rate": 1.0513100436681223e-05, |
|
"loss": 0.0221, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"eval_loss": 0.2923668920993805, |
|
"eval_na_accuracy": 0.7934362888336182, |
|
"eval_ordinal_accuracy": 0.7093325853347778, |
|
"eval_ordinal_mae": 0.3270767033100128, |
|
"eval_runtime": 156.0459, |
|
"eval_samples_per_second": 25.499, |
|
"eval_steps_per_second": 3.191, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 0.20232300460338593, |
|
"learning_rate": 1.0240174672489084e-05, |
|
"loss": 0.0253, |
|
"step": 8225 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.19642572104930878, |
|
"learning_rate": 9.967248908296943e-06, |
|
"loss": 0.0309, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 0.12435351312160492, |
|
"learning_rate": 9.694323144104805e-06, |
|
"loss": 0.0275, |
|
"step": 8275 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"grad_norm": 0.3429054915904999, |
|
"learning_rate": 9.421397379912664e-06, |
|
"loss": 0.0182, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"eval_loss": 0.29207319021224976, |
|
"eval_na_accuracy": 0.7934362888336182, |
|
"eval_ordinal_accuracy": 0.7104883193969727, |
|
"eval_ordinal_mae": 0.32371771335601807, |
|
"eval_runtime": 157.1157, |
|
"eval_samples_per_second": 25.325, |
|
"eval_steps_per_second": 3.17, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"grad_norm": 0.11734936386346817, |
|
"learning_rate": 9.148471615720524e-06, |
|
"loss": 0.0341, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"grad_norm": 0.18671230971813202, |
|
"learning_rate": 8.875545851528385e-06, |
|
"loss": 0.0257, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 0.1754232794046402, |
|
"learning_rate": 8.602620087336245e-06, |
|
"loss": 0.0306, |
|
"step": 8375 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 0.2011016607284546, |
|
"learning_rate": 8.329694323144106e-06, |
|
"loss": 0.0304, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"eval_loss": 0.29112711548805237, |
|
"eval_na_accuracy": 0.7857142686843872, |
|
"eval_ordinal_accuracy": 0.7133776545524597, |
|
"eval_ordinal_mae": 0.3231416344642639, |
|
"eval_runtime": 155.6543, |
|
"eval_samples_per_second": 25.563, |
|
"eval_steps_per_second": 3.199, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 1.1286529302597046, |
|
"learning_rate": 8.056768558951966e-06, |
|
"loss": 0.0285, |
|
"step": 8425 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 0.4878416359424591, |
|
"learning_rate": 7.783842794759825e-06, |
|
"loss": 0.0163, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"grad_norm": 0.5879720449447632, |
|
"learning_rate": 7.510917030567686e-06, |
|
"loss": 0.0433, |
|
"step": 8475 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 0.8792235255241394, |
|
"learning_rate": 7.237991266375546e-06, |
|
"loss": 0.0193, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"eval_loss": 0.2914559841156006, |
|
"eval_na_accuracy": 0.7837837934494019, |
|
"eval_ordinal_accuracy": 0.7165558934211731, |
|
"eval_ordinal_mae": 0.32214629650115967, |
|
"eval_runtime": 155.1881, |
|
"eval_samples_per_second": 25.64, |
|
"eval_steps_per_second": 3.209, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 0.2179727405309677, |
|
"learning_rate": 6.9650655021834055e-06, |
|
"loss": 0.0237, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 0.7128644585609436, |
|
"learning_rate": 6.692139737991267e-06, |
|
"loss": 0.0306, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"grad_norm": 0.2546403110027313, |
|
"learning_rate": 6.4192139737991265e-06, |
|
"loss": 0.0215, |
|
"step": 8575 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.39975956082344055, |
|
"learning_rate": 6.146288209606987e-06, |
|
"loss": 0.0223, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"eval_loss": 0.29310527443885803, |
|
"eval_na_accuracy": 0.7895752787590027, |
|
"eval_ordinal_accuracy": 0.7122219204902649, |
|
"eval_ordinal_mae": 0.32349658012390137, |
|
"eval_runtime": 154.5645, |
|
"eval_samples_per_second": 25.743, |
|
"eval_steps_per_second": 3.222, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"grad_norm": 0.34678587317466736, |
|
"learning_rate": 5.884279475982533e-06, |
|
"loss": 0.0203, |
|
"step": 8625 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 0.46029233932495117, |
|
"learning_rate": 5.611353711790393e-06, |
|
"loss": 0.0252, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 0.3514462113380432, |
|
"learning_rate": 5.338427947598254e-06, |
|
"loss": 0.0182, |
|
"step": 8675 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 0.9231175184249878, |
|
"learning_rate": 5.065502183406113e-06, |
|
"loss": 0.0254, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"eval_loss": 0.2946593761444092, |
|
"eval_na_accuracy": 0.7876448035240173, |
|
"eval_ordinal_accuracy": 0.7174227237701416, |
|
"eval_ordinal_mae": 0.32142174243927, |
|
"eval_runtime": 155.717, |
|
"eval_samples_per_second": 25.553, |
|
"eval_steps_per_second": 3.198, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 0.659959614276886, |
|
"learning_rate": 4.792576419213974e-06, |
|
"loss": 0.0277, |
|
"step": 8725 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"grad_norm": 1.0919593572616577, |
|
"learning_rate": 4.519650655021834e-06, |
|
"loss": 0.0306, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 0.9325423836708069, |
|
"learning_rate": 4.246724890829695e-06, |
|
"loss": 0.0331, |
|
"step": 8775 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.21544720232486725, |
|
"learning_rate": 3.9737991266375545e-06, |
|
"loss": 0.0215, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"eval_loss": 0.293580025434494, |
|
"eval_na_accuracy": 0.7857142686843872, |
|
"eval_ordinal_accuracy": 0.7127997875213623, |
|
"eval_ordinal_mae": 0.3201707899570465, |
|
"eval_runtime": 157.8828, |
|
"eval_samples_per_second": 25.202, |
|
"eval_steps_per_second": 3.154, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 0.43472805619239807, |
|
"learning_rate": 3.7008733624454154e-06, |
|
"loss": 0.0345, |
|
"step": 8825 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"grad_norm": 0.18415190279483795, |
|
"learning_rate": 3.4279475982532755e-06, |
|
"loss": 0.0268, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"grad_norm": 0.8721256852149963, |
|
"learning_rate": 3.155021834061136e-06, |
|
"loss": 0.0258, |
|
"step": 8875 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"grad_norm": 0.1487792730331421, |
|
"learning_rate": 2.8820960698689956e-06, |
|
"loss": 0.0312, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"eval_loss": 0.295600026845932, |
|
"eval_na_accuracy": 0.7857142686843872, |
|
"eval_ordinal_accuracy": 0.7133776545524597, |
|
"eval_ordinal_mae": 0.32104432582855225, |
|
"eval_runtime": 158.1112, |
|
"eval_samples_per_second": 25.166, |
|
"eval_steps_per_second": 3.15, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 0.6491901278495789, |
|
"learning_rate": 2.609170305676856e-06, |
|
"loss": 0.0526, |
|
"step": 8925 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 0.40534159541130066, |
|
"learning_rate": 2.336244541484716e-06, |
|
"loss": 0.0227, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.06649890542030334, |
|
"learning_rate": 2.0633187772925767e-06, |
|
"loss": 0.0157, |
|
"step": 8975 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 0.2696411609649658, |
|
"learning_rate": 1.7903930131004367e-06, |
|
"loss": 0.0189, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"eval_loss": 0.2945779263973236, |
|
"eval_na_accuracy": 0.7876448035240173, |
|
"eval_ordinal_accuracy": 0.7125108242034912, |
|
"eval_ordinal_mae": 0.3210395574569702, |
|
"eval_runtime": 157.9016, |
|
"eval_samples_per_second": 25.199, |
|
"eval_steps_per_second": 3.154, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"grad_norm": 0.16493447124958038, |
|
"learning_rate": 1.517467248908297e-06, |
|
"loss": 0.0204, |
|
"step": 9025 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 0.5132766366004944, |
|
"learning_rate": 1.2445414847161573e-06, |
|
"loss": 0.0182, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 0.06899993121623993, |
|
"learning_rate": 9.716157205240176e-07, |
|
"loss": 0.0194, |
|
"step": 9075 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 0.09663155674934387, |
|
"learning_rate": 6.986899563318777e-07, |
|
"loss": 0.021, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"eval_loss": 0.2948993146419525, |
|
"eval_na_accuracy": 0.7876448035240173, |
|
"eval_ordinal_accuracy": 0.7145333886146545, |
|
"eval_ordinal_mae": 0.31944769620895386, |
|
"eval_runtime": 157.4833, |
|
"eval_samples_per_second": 25.266, |
|
"eval_steps_per_second": 3.162, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"grad_norm": 0.4333021342754364, |
|
"learning_rate": 4.2576419213973797e-07, |
|
"loss": 0.0262, |
|
"step": 9125 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.39392733573913574, |
|
"learning_rate": 1.5283842794759825e-07, |
|
"loss": 0.0247, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 9160, |
|
"total_flos": 1.1353293455817277e+19, |
|
"train_loss": 0.11642740544208273, |
|
"train_runtime": 27830.4973, |
|
"train_samples_per_second": 5.264, |
|
"train_steps_per_second": 0.329 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 9160, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 100, |
|
"total_flos": 1.1353293455817277e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|