albert-base-4-spanish-distilled-qa-mlqa / validation_metrics.json
josecannete's picture
adding model distilled on mlqa
7f64672
[
{
"exact_match": 37.2,
"f1": 59.57842060282067,
"validation_loss": 2.41743141412735,
"epoch": 1
},
{
"exact_match": 35.0,
"f1": 59.76357784672174,
"validation_loss": 2.4413123428821564,
"epoch": 2
},
{
"exact_match": 37.2,
"f1": 60.33021590001654,
"validation_loss": 2.419440597295761,
"epoch": 3
},
{
"exact_match": 37.0,
"f1": 60.876613790878174,
"validation_loss": 2.393815279006958,
"epoch": 4
},
{
"exact_match": 38.6,
"f1": 62.11650390602729,
"validation_loss": 2.279040962457657,
"epoch": 5
},
{
"exact_match": 36.2,
"f1": 60.24461119706418,
"validation_loss": 2.313094735145569,
"epoch": 6
},
{
"exact_match": 39.4,
"f1": 62.563444650375324,
"validation_loss": 2.3069529682397842,
"epoch": 7
},
{
"exact_match": 38.0,
"f1": 61.676504733167725,
"validation_loss": 2.2878972589969635,
"epoch": 8
},
{
"exact_match": 39.8,
"f1": 63.639278114254296,
"validation_loss": 2.2642438858747482,
"epoch": 9
},
{
"exact_match": 38.6,
"f1": 62.84171569640027,
"validation_loss": 2.236086845397949,
"epoch": 10
},
{
"exact_match": 39.2,
"f1": 61.88612229562657,
"validation_loss": 2.2972750663757324,
"epoch": 11
},
{
"exact_match": 40.0,
"f1": 63.41957204484483,
"validation_loss": 2.2380547374486923,
"epoch": 12
},
{
"exact_match": 38.8,
"f1": 63.0855328431726,
"validation_loss": 2.2377340495586395,
"epoch": 13
},
{
"exact_match": 38.6,
"f1": 63.128146216898976,
"validation_loss": 2.2086049765348434,
"epoch": 14
},
{
"exact_match": 40.0,
"f1": 64.54242193361432,
"validation_loss": 2.2143527567386627,
"epoch": 15
},
{
"exact_match": 40.0,
"f1": 63.278631848762295,
"validation_loss": 2.2167883813381195,
"epoch": 16
},
{
"exact_match": 40.2,
"f1": 64.27562472854457,
"validation_loss": 2.2080784142017365,
"epoch": 17
},
{
"exact_match": 40.0,
"f1": 64.15211177156412,
"validation_loss": 2.23178893327713,
"epoch": 18
},
{
"exact_match": 39.6,
"f1": 63.7482440053434,
"validation_loss": 2.216603860259056,
"epoch": 19
},
{
"exact_match": 39.0,
"f1": 62.873221029732164,
"validation_loss": 2.2428862750530243,
"epoch": 20
},
{
"exact_match": 38.6,
"f1": 63.0879279178262,
"validation_loss": 2.206653445959091,
"epoch": 21
},
{
"exact_match": 41.0,
"f1": 64.54332732301185,
"validation_loss": 2.2194953709840775,
"epoch": 22
},
{
"exact_match": 40.6,
"f1": 63.80289079581061,
"validation_loss": 2.203217476606369,
"epoch": 23
},
{
"exact_match": 40.0,
"f1": 63.404288028303924,
"validation_loss": 2.215685695409775,
"epoch": 24
},
{
"exact_match": 38.8,
"f1": 62.93220915086903,
"validation_loss": 2.2293421030044556,
"epoch": 25
},
{
"exact_match": 39.6,
"f1": 63.04610033996446,
"validation_loss": 2.1999125480651855,
"epoch": 26
},
{
"exact_match": 38.8,
"f1": 63.37994688557569,
"validation_loss": 2.19596266746521,
"epoch": 27
},
{
"exact_match": 39.8,
"f1": 63.36716124332587,
"validation_loss": 2.207815259695053,
"epoch": 28
},
{
"exact_match": 39.0,
"f1": 63.20235070703525,
"validation_loss": 2.2127343714237213,
"epoch": 29
},
{
"exact_match": 39.8,
"f1": 64.53722000128786,
"validation_loss": 2.2037834227085114,
"epoch": 30
},
{
"exact_match": 39.0,
"f1": 63.89482166971992,
"validation_loss": 2.202571988105774,
"epoch": 31
},
{
"exact_match": 39.6,
"f1": 63.50426623795006,
"validation_loss": 2.2169955670833588,
"epoch": 32
},
{
"exact_match": 39.2,
"f1": 63.413569332959725,
"validation_loss": 2.2177259027957916,
"epoch": 33
},
{
"exact_match": 40.2,
"f1": 64.15148141858079,
"validation_loss": 2.2170828878879547,
"epoch": 34
},
{
"exact_match": 39.2,
"f1": 63.379632141043025,
"validation_loss": 2.221564292907715,
"epoch": 35
},
{
"exact_match": 38.6,
"f1": 63.822407502125486,
"validation_loss": 2.2223614752292633,
"epoch": 36
},
{
"exact_match": 38.4,
"f1": 63.25330786310625,
"validation_loss": 2.2186082154512405,
"epoch": 37
},
{
"exact_match": 39.4,
"f1": 63.81577942649743,
"validation_loss": 2.2130299359560013,
"epoch": 38
}
]