gpt2-full_labels / meta_data /meta_s42_e10_cvi0.json
Theoreticallyhugo's picture
Training in progress, epoch 1
0e081d3 verified
raw
history blame contribute delete
No virus
1.19 kB
{"B-Claim": {"precision": 0.4259818731117825, "recall": 0.4964788732394366, "f1-score": 0.4585365853658536, "support": 284.0}, "B-MajorClaim": {"precision": 0.6343283582089553, "recall": 0.6028368794326241, "f1-score": 0.6181818181818182, "support": 141.0}, "B-Premise": {"precision": 0.6784741144414169, "recall": 0.7033898305084746, "f1-score": 0.6907073509015257, "support": 708.0}, "I-Claim": {"precision": 0.49383531483927784, "recall": 0.5501594309541329, "f1-score": 0.5204780136906834, "support": 4077.0}, "I-MajorClaim": {"precision": 0.7590822179732314, "recall": 0.5884387351778656, "f1-score": 0.6629557472863902, "support": 2024.0}, "I-Premise": {"precision": 0.8390768750974583, "recall": 0.8798234139960759, "f1-score": 0.8589671961050364, "support": 12232.0}, "O": {"precision": 0.9050880626223092, "recall": 0.8436359951357925, "f1-score": 0.8732822825972937, "support": 9868.0}, "accuracy": 0.7924251721551783, "macro avg": {"precision": 0.6765524023277759, "recall": 0.6663947369206289, "f1-score": 0.6690155705898002, "support": 29334.0}, "weighted avg": {"precision": 0.7989202000893961, "recall": 0.7924251721551783, "f1-score": 0.794117966618848, "support": 29334.0}}