albert-base-2-spanish-distilled-qa-mlqa / validation_metrics.json
josecannete's picture
adding model distilled on mlqa
b51fbd2
[
{
"exact_match": 25.8,
"f1": 46.722838294266445,
"validation_loss": 2.948255568742752,
"epoch": 1
},
{
"exact_match": 27.6,
"f1": 48.25081938654576,
"validation_loss": 2.944710910320282,
"epoch": 2
},
{
"exact_match": 26.0,
"f1": 47.46244452638331,
"validation_loss": 2.96934711933136,
"epoch": 3
},
{
"exact_match": 27.4,
"f1": 48.20678201179677,
"validation_loss": 3.0276510566473007,
"epoch": 4
},
{
"exact_match": 26.4,
"f1": 47.197794084655904,
"validation_loss": 3.063481956720352,
"epoch": 5
},
{
"exact_match": 23.4,
"f1": 44.975456709186986,
"validation_loss": 3.084901735186577,
"epoch": 6
},
{
"exact_match": 22.6,
"f1": 43.74081656374764,
"validation_loss": 3.1264059096574783,
"epoch": 7
},
{
"exact_match": 22.6,
"f1": 44.25119042546109,
"validation_loss": 3.058358907699585,
"epoch": 8
},
{
"exact_match": 23.6,
"f1": 45.70690541362367,
"validation_loss": 2.9738192558288574,
"epoch": 9
},
{
"exact_match": 24.0,
"f1": 46.52646761825752,
"validation_loss": 2.9452848583459854,
"epoch": 10
},
{
"exact_match": 25.0,
"f1": 48.21875864127637,
"validation_loss": 2.8925177305936813,
"epoch": 11
},
{
"exact_match": 24.0,
"f1": 45.59643675730025,
"validation_loss": 2.989051178097725,
"epoch": 12
},
{
"exact_match": 25.4,
"f1": 47.13075259446226,
"validation_loss": 2.8926149755716324,
"epoch": 13
},
{
"exact_match": 24.8,
"f1": 47.43411057647901,
"validation_loss": 2.9083966314792633,
"epoch": 14
},
{
"exact_match": 24.0,
"f1": 46.771098812789866,
"validation_loss": 2.927834153175354,
"epoch": 15
},
{
"exact_match": 24.6,
"f1": 46.55756314258861,
"validation_loss": 2.9222313463687897,
"epoch": 16
},
{
"exact_match": 24.4,
"f1": 47.982165321871186,
"validation_loss": 2.8714338839054108,
"epoch": 17
},
{
"exact_match": 24.8,
"f1": 45.460497052158004,
"validation_loss": 2.9583148807287216,
"epoch": 18
},
{
"exact_match": 24.8,
"f1": 47.18767224678249,
"validation_loss": 2.8744203448295593,
"epoch": 19
},
{
"exact_match": 23.6,
"f1": 46.72527578411693,
"validation_loss": 2.9458623826503754,
"epoch": 20
},
{
"exact_match": 24.4,
"f1": 47.62342502115321,
"validation_loss": 2.895026355981827,
"epoch": 21
},
{
"exact_match": 25.2,
"f1": 47.180981487761684,
"validation_loss": 2.9020532816648483,
"epoch": 22
},
{
"exact_match": 24.8,
"f1": 47.27062085852144,
"validation_loss": 2.8940925151109695,
"epoch": 23
},
{
"exact_match": 24.2,
"f1": 47.215759984862146,
"validation_loss": 2.884490445256233,
"epoch": 24
},
{
"exact_match": 24.0,
"f1": 46.80483325229827,
"validation_loss": 2.882307842373848,
"epoch": 25
},
{
"exact_match": 25.2,
"f1": 48.131312525430154,
"validation_loss": 2.8617368042469025,
"epoch": 26
},
{
"exact_match": 25.4,
"f1": 47.590178827962205,
"validation_loss": 2.8828313052654266,
"epoch": 27
},
{
"exact_match": 23.4,
"f1": 47.367778368954845,
"validation_loss": 2.894962042570114,
"epoch": 28
},
{
"exact_match": 25.2,
"f1": 48.03807798762605,
"validation_loss": 2.876130074262619,
"epoch": 29
},
{
"exact_match": 24.8,
"f1": 47.5929736420913,
"validation_loss": 2.872593492269516,
"epoch": 30
},
{
"exact_match": 24.6,
"f1": 46.71973465302511,
"validation_loss": 2.8743386417627335,
"epoch": 31
},
{
"exact_match": 24.2,
"f1": 47.8385603429721,
"validation_loss": 2.8729946613311768,
"epoch": 32
},
{
"exact_match": 24.4,
"f1": 47.47242807654571,
"validation_loss": 2.860194608569145,
"epoch": 33
},
{
"exact_match": 24.8,
"f1": 47.991243999185166,
"validation_loss": 2.8717388808727264,
"epoch": 34
},
{
"exact_match": 24.8,
"f1": 47.67304975448938,
"validation_loss": 2.860971286892891,
"epoch": 35
},
{
"exact_match": 23.6,
"f1": 47.64137886432003,
"validation_loss": 2.8725511580705643,
"epoch": 36
},
{
"exact_match": 24.2,
"f1": 46.82812020691276,
"validation_loss": 2.86672043800354,
"epoch": 37
},
{
"exact_match": 23.6,
"f1": 47.755417113652406,
"validation_loss": 2.8762750178575516,
"epoch": 38
},
{
"exact_match": 24.0,
"f1": 46.91197060488081,
"validation_loss": 2.8765931874513626,
"epoch": 39
},
{
"exact_match": 24.4,
"f1": 47.579544881309594,
"validation_loss": 2.8903532922267914,
"epoch": 40
},
{
"exact_match": 23.2,
"f1": 46.55462398521221,
"validation_loss": 2.8859409987926483,
"epoch": 41
},
{
"exact_match": 23.6,
"f1": 46.6789864201629,
"validation_loss": 2.900849297642708,
"epoch": 42
},
{
"exact_match": 23.2,
"f1": 47.08192684575037,
"validation_loss": 2.889473930001259,
"epoch": 43
},
{
"exact_match": 23.8,
"f1": 46.95175071469188,
"validation_loss": 2.8984376937150955,
"epoch": 44
}
]