mmomm25 commited on
Commit
ea0ebfa
·
verified ·
1 Parent(s): dcab071

Model save

Browse files
Files changed (1) hide show
  1. README.md +62 -42
README.md CHANGED
@@ -26,19 +26,19 @@ model-index:
26
  - name: Accuracy
27
  type: accuracy
28
  value:
29
- accuracy: 0.9313725490196079
30
  - name: F1
31
  type: f1
32
  value:
33
- f1: 0.9318620831578108
34
  - name: Precision
35
  type: precision
36
  value:
37
- precision: 0.9352588699152988
38
  - name: Recall
39
  type: recall
40
  value:
41
- recall: 0.9330386368950199
42
  ---
43
 
44
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
@@ -48,11 +48,11 @@ should probably proofread and complete it, then remove this comment. -->
48
 
49
  This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset.
50
  It achieves the following results on the evaluation set:
51
- - Loss: 0.3451
52
- - Accuracy: {'accuracy': 0.9313725490196079}
53
- - F1: {'f1': 0.9318620831578108}
54
- - Precision: {'precision': 0.9352588699152988}
55
- - Recall: {'recall': 0.9330386368950199}
56
 
57
  ## Model description
58
 
@@ -80,42 +80,62 @@ The following hyperparameters were used during training:
80
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
81
  - lr_scheduler_type: linear
82
  - lr_scheduler_warmup_ratio: 0.1
83
- - num_epochs: 30
84
 
85
  ### Training results
86
 
87
- | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall |
88
- |:-------------:|:-----:|:----:|:---------------:|:--------------------------------:|:--------------------------:|:---------------------------------:|:------------------------------:|
89
- | 1.1457 | 1.0 | 255 | 1.1588 | {'accuracy': 0.6941176470588235} | {'f1': 0.683792091280419} | {'precision': 0.7377494408375539} | {'recall': 0.6941484490686618} |
90
- | 0.4558 | 2.0 | 510 | 0.4985 | {'accuracy': 0.8470588235294118} | {'f1': 0.8510200857474889} | {'precision': 0.8766865984788879} | {'recall': 0.8454231938274491} |
91
- | 0.3368 | 3.0 | 765 | 0.4429 | {'accuracy': 0.8705882352941177} | {'f1': 0.8713991689505157} | {'precision': 0.8884985994397759} | {'recall': 0.8711626139817629} |
92
- | 0.4402 | 4.0 | 1020 | 0.3947 | {'accuracy': 0.8784313725490196} | {'f1': 0.880471094803992} | {'precision': 0.8952890487430221} | {'recall': 0.8776488582339645} |
93
- | 0.3937 | 5.0 | 1275 | 0.4377 | {'accuracy': 0.8725490196078431} | {'f1': 0.8703780158419334} | {'precision': 0.8900760323128502} | {'recall': 0.8691138648585458} |
94
- | 0.4042 | 6.0 | 1530 | 0.3989 | {'accuracy': 0.8862745098039215} | {'f1': 0.8859196308355288} | {'precision': 0.8972093144987638} | {'recall': 0.8834955381497934} |
95
- | 0.4321 | 7.0 | 1785 | 0.3642 | {'accuracy': 0.8901960784313725} | {'f1': 0.8896787277785286} | {'precision': 0.9002265766283616} | {'recall': 0.8878911425453978} |
96
- | 0.2618 | 8.0 | 2040 | 0.4092 | {'accuracy': 0.8960784313725491} | {'f1': 0.8950352545633242} | {'precision': 0.9033601037449228} | {'recall': 0.8953584093211753} |
97
- | 0.3376 | 9.0 | 2295 | 0.4055 | {'accuracy': 0.8941176470588236} | {'f1': 0.8933906521569781} | {'precision': 0.9014239716079142} | {'recall': 0.8936214051905541} |
98
- | 0.2592 | 10.0 | 2550 | 0.4947 | {'accuracy': 0.8627450980392157} | {'f1': 0.8639402375974159} | {'precision': 0.8837020302136581} | {'recall': 0.8620484568622867} |
99
- | 0.363 | 11.0 | 2805 | 0.4586 | {'accuracy': 0.8862745098039215} | {'f1': 0.8871534026726227} | {'precision': 0.89776885319054} | {'recall': 0.8850722858701582} |
100
- | 0.278 | 12.0 | 3060 | 0.2913 | {'accuracy': 0.9196078431372549} | {'f1': 0.9203616179126926} | {'precision': 0.9237222880261108} | {'recall': 0.920318466993999} |
101
- | 0.1436 | 13.0 | 3315 | 0.4294 | {'accuracy': 0.9} | {'f1': 0.8997771222806092} | {'precision': 0.9095799880746632} | {'recall': 0.901510989010989} |
102
- | 0.2017 | 14.0 | 3570 | 0.3324 | {'accuracy': 0.9274509803921569} | {'f1': 0.9275152403725876} | {'precision': 0.9309834626869261} | {'recall': 0.9275085729872963} |
103
- | 0.017 | 15.0 | 3825 | 0.3194 | {'accuracy': 0.9235294117647059} | {'f1': 0.9236114478832926} | {'precision': 0.9261443200762086} | {'recall': 0.9255596796820201} |
104
- | 0.2747 | 16.0 | 4080 | 0.3214 | {'accuracy': 0.9215686274509803} | {'f1': 0.9215050761511078} | {'precision': 0.923286387036387} | {'recall': 0.921530375652716} |
105
- | 0.0148 | 17.0 | 4335 | 0.4273 | {'accuracy': 0.9} | {'f1': 0.9008866348649631} | {'precision': 0.9097813011730537} | {'recall': 0.9019045670641415} |
106
- | 0.0727 | 18.0 | 4590 | 0.4216 | {'accuracy': 0.907843137254902} | {'f1': 0.908247672636611} | {'precision': 0.9152803626027508} | {'recall': 0.908746882550074} |
107
- | 0.2089 | 19.0 | 4845 | 0.4226 | {'accuracy': 0.888235294117647} | {'f1': 0.8881263753150787} | {'precision': 0.8969831718237314} | {'recall': 0.8921074935702595} |
108
- | 0.1508 | 20.0 | 5100 | 0.6408 | {'accuracy': 0.8431372549019608} | {'f1': 0.8432283100943705} | {'precision': 0.8603972593599677} | {'recall': 0.847403261631985} |
109
- | 0.0059 | 21.0 | 5355 | 0.3542 | {'accuracy': 0.9372549019607843} | {'f1': 0.9374172181902983} | {'precision': 0.9391954672298979} | {'recall': 0.939125457875458} |
110
- | 0.2119 | 22.0 | 5610 | 0.4237 | {'accuracy': 0.9098039215686274} | {'f1': 0.9100028504610442} | {'precision': 0.9157914357666666} | {'recall': 0.912116651079417} |
111
- | 0.0041 | 23.0 | 5865 | 0.3427 | {'accuracy': 0.9215686274509803} | {'f1': 0.9216344394955899} | {'precision': 0.9242607286772045} | {'recall': 0.9239955966019796} |
112
- | 0.0087 | 24.0 | 6120 | 0.3356 | {'accuracy': 0.9274509803921569} | {'f1': 0.9278177751460005} | {'precision': 0.9306401523686116} | {'recall': 0.9293298456862287} |
113
- | 0.0144 | 25.0 | 6375 | 0.3856 | {'accuracy': 0.9137254901960784} | {'f1': 0.9143035926800989} | {'precision': 0.9185103114050482} | {'recall': 0.9161430324994155} |
114
- | 0.0081 | 26.0 | 6630 | 0.3590 | {'accuracy': 0.9235294117647059} | {'f1': 0.924133573024068} | {'precision': 0.9277836853827484} | {'recall': 0.9262391863455693} |
115
- | 0.0495 | 27.0 | 6885 | 0.3014 | {'accuracy': 0.9372549019607843} | {'f1': 0.937621536712251} | {'precision': 0.9404314255346291} | {'recall': 0.9391054867118698} |
116
- | 0.1128 | 28.0 | 7140 | 0.3673 | {'accuracy': 0.9176470588235294} | {'f1': 0.9185519070532611} | {'precision': 0.9232326824268375} | {'recall': 0.919416842023225} |
117
- | 0.0728 | 29.0 | 7395 | 0.3632 | {'accuracy': 0.9215686274509803} | {'f1': 0.9220615472946353} | {'precision': 0.9265730699525729} | {'recall': 0.9235377211441043} |
118
- | 0.0044 | 30.0 | 7650 | 0.3451 | {'accuracy': 0.9313725490196079} | {'f1': 0.9318620831578108} | {'precision': 0.9352588699152988} | {'recall': 0.9330386368950199} |
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
119
 
120
 
121
  ### Framework versions
 
26
  - name: Accuracy
27
  type: accuracy
28
  value:
29
+ accuracy: 0.9705510388437217
30
  - name: F1
31
  type: f1
32
  value:
33
+ f1: 0.9705092081728205
34
  - name: Precision
35
  type: precision
36
  value:
37
+ precision: 0.9710523804561741
38
  - name: Recall
39
  type: recall
40
  value:
41
+ recall: 0.9704181656558507
42
  ---
43
 
44
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
 
48
 
49
  This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the imagefolder dataset.
50
  It achieves the following results on the evaluation set:
51
+ - Loss: 0.1799
52
+ - Accuracy: {'accuracy': 0.9705510388437217}
53
+ - F1: {'f1': 0.9705092081728205}
54
+ - Precision: {'precision': 0.9710523804561741}
55
+ - Recall: {'recall': 0.9704181656558507}
56
 
57
  ## Model description
58
 
 
80
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
81
  - lr_scheduler_type: linear
82
  - lr_scheduler_warmup_ratio: 0.1
83
+ - num_epochs: 50
84
 
85
  ### Training results
86
 
87
+ | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall |
88
+ |:-------------:|:-----:|:------:|:---------------:|:--------------------------------:|:--------------------------:|:---------------------------------:|:------------------------------:|
89
+ | 0.37 | 1.0 | 8302 | 0.3462 | {'accuracy': 0.8933453778982234} | {'f1': 0.8942100052466936} | {'precision': 0.8984250247518094} | {'recall': 0.8931370564158605} |
90
+ | 0.2375 | 2.0 | 16605 | 0.3353 | {'accuracy': 0.9053297199638664} | {'f1': 0.9062005892826234} | {'precision': 0.912717242831991} | {'recall': 0.9052684275828231} |
91
+ | 0.5678 | 3.0 | 24907 | 0.3114 | {'accuracy': 0.9118940078289671} | {'f1': 0.9116597109413729} | {'precision': 0.9165908158739848} | {'recall': 0.9116030141797212} |
92
+ | 0.09 | 4.0 | 33210 | 0.2768 | {'accuracy': 0.9270099367660344} | {'f1': 0.9272025877193879} | {'precision': 0.9305221603080029} | {'recall': 0.9267551810236085} |
93
+ | 0.266 | 5.0 | 41512 | 0.2595 | {'accuracy': 0.9312857573020175} | {'f1': 0.9313123811138734} | {'precision': 0.9327488749607135} | {'recall': 0.931043574955592} |
94
+ | 0.2037 | 6.0 | 49815 | 0.2123 | {'accuracy': 0.9431496537187594} | {'f1': 0.9428749572352995} | {'precision': 0.9435965528419799} | {'recall': 0.9429052318485974} |
95
+ | 0.1487 | 7.0 | 58117 | 0.2282 | {'accuracy': 0.9430292080698585} | {'f1': 0.9430188942480495} | {'precision': 0.9444609819488103} | {'recall': 0.9428880066548226} |
96
+ | 0.1405 | 8.0 | 66420 | 0.2440 | {'accuracy': 0.9454381210478772} | {'f1': 0.9455191951029847} | {'precision': 0.9467893516678145} | {'recall': 0.9453224042508239} |
97
+ | 0.09 | 9.0 | 74722 | 0.2480 | {'accuracy': 0.9436314363143632} | {'f1': 0.9433683232067358} | {'precision': 0.9452971145459653} | {'recall': 0.9433746555197686} |
98
+ | 0.2275 | 10.0 | 83025 | 0.2473 | {'accuracy': 0.946582354712436} | {'f1': 0.9462472081330006} | {'precision': 0.9479482237973264} | {'recall': 0.9463251646491099} |
99
+ | 0.0114 | 11.0 | 91327 | 0.1953 | {'accuracy': 0.9551942186088528} | {'f1': 0.954959353992539} | {'precision': 0.9555671952457011} | {'recall': 0.9550120730050532} |
100
+ | 0.0778 | 12.0 | 99630 | 0.2246 | {'accuracy': 0.948509485094851} | {'f1': 0.9485863094568601} | {'precision': 0.9496017185087666} | {'recall': 0.9484435235390778} |
101
+ | 0.1031 | 13.0 | 107932 | 0.2435 | {'accuracy': 0.9443541102077687} | {'f1': 0.9443461050911817} | {'precision': 0.9453218450441414} | {'recall': 0.9442028500529185} |
102
+ | 0.1419 | 14.0 | 116235 | 0.1751 | {'accuracy': 0.9580849141824752} | {'f1': 0.9580811670883926} | {'precision': 0.9586631550970829} | {'recall': 0.9580178560027687} |
103
+ | 0.0993 | 15.0 | 124537 | 0.2099 | {'accuracy': 0.9542908762420957} | {'f1': 0.9541061721417268} | {'precision': 0.9541191566948424} | {'recall': 0.9541611121516007} |
104
+ | 0.0696 | 16.0 | 132840 | 0.2240 | {'accuracy': 0.955736224028907} | {'f1': 0.9555782982813351} | {'precision': 0.9563626555520048} | {'recall': 0.9555607789866469} |
105
+ | 0.1697 | 17.0 | 141142 | 0.1904 | {'accuracy': 0.9579644685335742} | {'f1': 0.9577653922157884} | {'precision': 0.9581933285912818} | {'recall': 0.9578259452834421} |
106
+ | 0.0429 | 18.0 | 149445 | 0.2102 | {'accuracy': 0.9558566696778079} | {'f1': 0.955829019244906} | {'precision': 0.9570787144559411} | {'recall': 0.955662074541215} |
107
+ | 0.0062 | 19.0 | 157747 | 0.1768 | {'accuracy': 0.9601927130382415} | {'f1': 0.9601350969183112} | {'precision': 0.9605649770988711} | {'recall': 0.960090994011799} |
108
+ | 0.005 | 20.0 | 166050 | 0.1779 | {'accuracy': 0.9624209575429088} | {'f1': 0.9622479573311764} | {'precision': 0.9626782993390144} | {'recall': 0.9622658509657924} |
109
+ | 0.1395 | 21.0 | 174352 | 0.1801 | {'accuracy': 0.961035832580548} | {'f1': 0.9609739947935761} | {'precision': 0.9615134912739316} | {'recall': 0.9609000684385473} |
110
+ | 0.0966 | 22.0 | 182655 | 0.1854 | {'accuracy': 0.9594098163203855} | {'f1': 0.959384693086552} | {'precision': 0.9602665108685822} | {'recall': 0.9592591268355116} |
111
+ | 0.0077 | 23.0 | 190957 | 0.2190 | {'accuracy': 0.9573020174646191} | {'f1': 0.9572877808970253} | {'precision': 0.9580176848865115} | {'recall': 0.9571782999468976} |
112
+ | 0.1032 | 24.0 | 199260 | 0.2281 | {'accuracy': 0.9570009033423668} | {'f1': 0.9568818981129438} | {'precision': 0.9577859752909083} | {'recall': 0.95679636210611} |
113
+ | 0.1106 | 25.0 | 207562 | 0.2017 | {'accuracy': 0.9615778380006023} | {'f1': 0.9615258017857322} | {'precision': 0.9623198062794668} | {'recall': 0.9614196936259853} |
114
+ | 0.0833 | 26.0 | 215865 | 0.2074 | {'accuracy': 0.9618789521228546} | {'f1': 0.9618001985746503} | {'precision': 0.9625802607483476} | {'recall': 0.9617264541173526} |
115
+ | 0.0257 | 27.0 | 224167 | 0.1716 | {'accuracy': 0.9648900933453779} | {'f1': 0.9648046336171575} | {'precision': 0.9653533590655595} | {'recall': 0.9648070647916974} |
116
+ | 0.002 | 28.0 | 232470 | 0.2144 | {'accuracy': 0.9635049683830171} | {'f1': 0.9634863498105041} | {'precision': 0.9646616314066687} | {'recall': 0.9633283402670114} |
117
+ | 0.016 | 29.0 | 240772 | 0.2237 | {'accuracy': 0.959349593495935} | {'f1': 0.9594342688149864} | {'precision': 0.9608554784443832} | {'recall': 0.9591930193477335} |
118
+ | 0.0575 | 30.0 | 249075 | 0.1847 | {'accuracy': 0.9651912074676302} | {'f1': 0.9652324025756626} | {'precision': 0.9661899074568192} | {'recall': 0.9650558808909672} |
119
+ | 0.0997 | 31.0 | 257377 | 0.1798 | {'accuracy': 0.9686841312857573} | {'f1': 0.9686428828918746} | {'precision': 0.9691104091550086} | {'recall': 0.9685623791125} |
120
+ | 0.0017 | 32.0 | 265680 | 0.1985 | {'accuracy': 0.9627822944896116} | {'f1': 0.9626870784433683} | {'precision': 0.963172343077798} | {'recall': 0.962659195203449} |
121
+ | 0.0538 | 33.0 | 273982 | 0.1605 | {'accuracy': 0.9710328214393255} | {'f1': 0.9710267090566379} | {'precision': 0.9715030346291925} | {'recall': 0.9709339306149106} |
122
+ | 0.0023 | 34.0 | 282285 | 0.1832 | {'accuracy': 0.9674194519722975} | {'f1': 0.9673811237591747} | {'precision': 0.9679330625290327} | {'recall': 0.9672934059576415} |
123
+ | 0.0459 | 35.0 | 290587 | 0.1877 | {'accuracy': 0.9657332128876844} | {'f1': 0.965749942670487} | {'precision': 0.9664774134203846} | {'recall': 0.9656335047526519} |
124
+ | 0.0193 | 36.0 | 298890 | 0.1633 | {'accuracy': 0.9677205660945498} | {'f1': 0.9677329659674949} | {'precision': 0.9684419822552822} | {'recall': 0.9675975315398574} |
125
+ | 0.0707 | 37.0 | 307192 | 0.1787 | {'accuracy': 0.9685636856368564} | {'f1': 0.9684895304986225} | {'precision': 0.9689001010469502} | {'recall': 0.9684451099576021} |
126
+ | 0.0985 | 38.0 | 315495 | 0.2076 | {'accuracy': 0.9629629629629629} | {'f1': 0.9630524772042474} | {'precision': 0.9642571257654206} | {'recall': 0.9628345133405821} |
127
+ | 0.0788 | 39.0 | 323797 | 0.1794 | {'accuracy': 0.9702499247214694} | {'f1': 0.9701536210820301} | {'precision': 0.9706833500680011} | {'recall': 0.9700913059580385} |
128
+ | 0.0008 | 40.0 | 332100 | 0.1618 | {'accuracy': 0.9733212887684433} | {'f1': 0.9732738808256685} | {'precision': 0.9736678524998652} | {'recall': 0.9731998786471756} |
129
+ | 0.074 | 41.0 | 340402 | 0.1991 | {'accuracy': 0.9668172237277929} | {'f1': 0.9666853676025186} | {'precision': 0.9673504006462602} | {'recall': 0.9666339730453138} |
130
+ | 0.028 | 42.0 | 348705 | 0.1556 | {'accuracy': 0.9742246311352002} | {'f1': 0.9741506224327396} | {'precision': 0.9743929114728255} | {'recall': 0.9741060958660924} |
131
+ | 0.1092 | 43.0 | 357007 | 0.1567 | {'accuracy': 0.9740439626618489} | {'f1': 0.9739721593463402} | {'precision': 0.9742787951493688} | {'recall': 0.9739217266482031} |
132
+ | 0.0008 | 44.0 | 365310 | 0.1697 | {'accuracy': 0.9707919301415237} | {'f1': 0.9707068184898958} | {'precision': 0.9712158191257935} | {'recall': 0.9706396165347172} |
133
+ | 0.1728 | 45.0 | 373612 | 0.1791 | {'accuracy': 0.9701294790725685} | {'f1': 0.9700180755443455} | {'precision': 0.9704271475318083} | {'recall': 0.9699790872810246} |
134
+ | 0.0004 | 46.0 | 381915 | 0.2024 | {'accuracy': 0.9672387834989461} | {'f1': 0.9672031338307139} | {'precision': 0.9680962843155184} | {'recall': 0.9670672659468575} |
135
+ | 0.0044 | 47.0 | 390217 | 0.1708 | {'accuracy': 0.9721168322794339} | {'f1': 0.9720140881144397} | {'precision': 0.9723799188733908} | {'recall': 0.9719693947081535} |
136
+ | 0.089 | 48.0 | 398520 | 0.1975 | {'accuracy': 0.9686841312857573} | {'f1': 0.9686510789801565} | {'precision': 0.969349692339074} | {'recall': 0.9685439142771983} |
137
+ | 0.0774 | 49.0 | 406822 | 0.1778 | {'accuracy': 0.9709123757904246} | {'f1': 0.9708794409655027} | {'precision': 0.9714408230271825} | {'recall': 0.9707829629677185} |
138
+ | 0.0012 | 50.0 | 415100 | 0.1799 | {'accuracy': 0.9705510388437217} | {'f1': 0.9705092081728205} | {'precision': 0.9710523804561741} | {'recall': 0.9704181656558507} |
139
 
140
 
141
  ### Framework versions