Upload README.md with huggingface_hub
Browse files
README.md
CHANGED
@@ -4,15 +4,21 @@ language:
|
|
4 |
- en
|
5 |
library_name: model2vec
|
6 |
license: mit
|
|
|
|
|
|
|
|
|
7 |
model-index:
|
8 |
- name: M2V_base_glove_subword
|
9 |
results:
|
10 |
-
-
|
11 |
-
|
|
|
12 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
13 |
-
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
14 |
-
split: test
|
15 |
type: mteb/amazon_counterfactual
|
|
|
|
|
|
|
16 |
metrics:
|
17 |
- type: accuracy
|
18 |
value: 66.4167916041979
|
@@ -26,14 +32,14 @@ model-index:
|
|
26 |
value: 72.84623161234782
|
27 |
- type: main_score
|
28 |
value: 66.4167916041979
|
29 |
-
|
30 |
type: Classification
|
31 |
-
|
32 |
-
config: en
|
33 |
name: MTEB AmazonCounterfactualClassification (en)
|
34 |
-
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
35 |
-
split: test
|
36 |
type: mteb/amazon_counterfactual
|
|
|
|
|
|
|
37 |
metrics:
|
38 |
- type: accuracy
|
39 |
value: 68.044776119403
|
@@ -47,14 +53,14 @@ model-index:
|
|
47 |
value: 71.2243167389672
|
48 |
- type: main_score
|
49 |
value: 68.044776119403
|
50 |
-
|
51 |
type: Classification
|
52 |
-
|
53 |
-
config: default
|
54 |
name: MTEB AmazonPolarityClassification (default)
|
55 |
-
revision: e2d317d38cd51312af73b3d32a06d1a08b442046
|
56 |
-
split: test
|
57 |
type: mteb/amazon_polarity
|
|
|
|
|
|
|
58 |
metrics:
|
59 |
- type: accuracy
|
60 |
value: 67.21602499999999
|
@@ -68,14 +74,14 @@ model-index:
|
|
68 |
value: 66.68107362746888
|
69 |
- type: main_score
|
70 |
value: 67.21602499999999
|
71 |
-
|
72 |
type: Classification
|
73 |
-
|
74 |
-
config: en
|
75 |
name: MTEB AmazonReviewsClassification (en)
|
76 |
-
revision: 1399c76144fd37290681b995c656ef9b2e06e26d
|
77 |
-
split: test
|
78 |
type: mteb/amazon_reviews_multi
|
|
|
|
|
|
|
79 |
metrics:
|
80 |
- type: accuracy
|
81 |
value: 32.384
|
@@ -85,14 +91,14 @@ model-index:
|
|
85 |
value: 32.05276706247388
|
86 |
- type: main_score
|
87 |
value: 32.384
|
88 |
-
|
89 |
-
type:
|
90 |
-
|
91 |
-
config: default
|
92 |
name: MTEB ArguAna (default)
|
93 |
-
revision: c22ab2a51041ffd869aaddef7af8d8215647e41a
|
94 |
-
split: test
|
95 |
type: mteb/arguana
|
|
|
|
|
|
|
96 |
metrics:
|
97 |
- type: main_score
|
98 |
value: 29.599999999999998
|
@@ -376,14 +382,14 @@ model-index:
|
|
376 |
value: 28.733999999999998
|
377 |
- type: recall_at_5
|
378 |
value: 36.131
|
379 |
-
|
380 |
-
type:
|
381 |
-
|
382 |
-
config: default
|
383 |
name: MTEB ArxivClusteringP2P (default)
|
384 |
-
revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
|
385 |
-
split: test
|
386 |
type: mteb/arxiv-clustering-p2p
|
|
|
|
|
|
|
387 |
metrics:
|
388 |
- type: main_score
|
389 |
value: 35.46255145204994
|
@@ -391,14 +397,14 @@ model-index:
|
|
391 |
value: 35.46255145204994
|
392 |
- type: v_measure_std
|
393 |
value: 14.146815377034603
|
394 |
-
|
395 |
type: Clustering
|
396 |
-
|
397 |
-
config: default
|
398 |
name: MTEB ArxivClusteringS2S (default)
|
399 |
-
revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
|
400 |
-
split: test
|
401 |
type: mteb/arxiv-clustering-s2s
|
|
|
|
|
|
|
402 |
metrics:
|
403 |
- type: main_score
|
404 |
value: 26.34189987196252
|
@@ -406,14 +412,14 @@ model-index:
|
|
406 |
value: 26.34189987196252
|
407 |
- type: v_measure_std
|
408 |
value: 14.798697652139317
|
409 |
-
|
410 |
-
type:
|
411 |
-
|
412 |
-
config: default
|
413 |
name: MTEB AskUbuntuDupQuestions (default)
|
414 |
-
revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
|
415 |
-
split: test
|
416 |
type: mteb/askubuntudupquestions-reranking
|
|
|
|
|
|
|
417 |
metrics:
|
418 |
- type: main_score
|
419 |
value: 52.85912447389551
|
@@ -433,14 +439,14 @@ model-index:
|
|
433 |
value: 19.44964881732576
|
434 |
- type: nAUC_mrr_std
|
435 |
value: 9.313450884539453
|
436 |
-
|
437 |
-
type:
|
438 |
-
|
439 |
-
config: default
|
440 |
name: MTEB BIOSSES (default)
|
441 |
-
revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
|
442 |
-
split: test
|
443 |
type: mteb/biosses-sts
|
|
|
|
|
|
|
444 |
metrics:
|
445 |
- type: cosine_pearson
|
446 |
value: 73.38282679412139
|
@@ -460,14 +466,14 @@ model-index:
|
|
460 |
value: 73.38282679412139
|
461 |
- type: spearman
|
462 |
value: 75.59389113278942
|
463 |
-
|
464 |
-
type:
|
465 |
-
|
466 |
-
config: default
|
467 |
name: MTEB Banking77Classification (default)
|
468 |
-
revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
|
469 |
-
split: test
|
470 |
type: mteb/banking77
|
|
|
|
|
|
|
471 |
metrics:
|
472 |
- type: accuracy
|
473 |
value: 71.38636363636363
|
@@ -477,14 +483,14 @@ model-index:
|
|
477 |
value: 71.55994805461263
|
478 |
- type: main_score
|
479 |
value: 71.38636363636363
|
480 |
-
|
481 |
-
type:
|
482 |
-
|
483 |
-
config: default
|
484 |
name: MTEB BiorxivClusteringP2P (default)
|
485 |
-
revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
|
486 |
-
split: test
|
487 |
type: mteb/biorxiv-clustering-p2p
|
|
|
|
|
|
|
488 |
metrics:
|
489 |
- type: main_score
|
490 |
value: 31.47309865069476
|
@@ -492,14 +498,14 @@ model-index:
|
|
492 |
value: 31.47309865069476
|
493 |
- type: v_measure_std
|
494 |
value: 0.6360736715097297
|
495 |
-
|
496 |
type: Clustering
|
497 |
-
|
498 |
-
config: default
|
499 |
name: MTEB BiorxivClusteringS2S (default)
|
500 |
-
revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
|
501 |
-
split: test
|
502 |
type: mteb/biorxiv-clustering-s2s
|
|
|
|
|
|
|
503 |
metrics:
|
504 |
- type: main_score
|
505 |
value: 22.58199120148109
|
@@ -507,14 +513,14 @@ model-index:
|
|
507 |
value: 22.58199120148109
|
508 |
- type: v_measure_std
|
509 |
value: 1.1055877138914942
|
510 |
-
|
511 |
-
type:
|
512 |
-
|
513 |
-
config: default
|
514 |
name: MTEB CQADupstackAndroidRetrieval (default)
|
515 |
-
revision: f46a197baaae43b4f621051089b82a364682dfeb
|
516 |
-
split: test
|
517 |
type: mteb/cqadupstack-android
|
|
|
|
|
|
|
518 |
metrics:
|
519 |
- type: main_score
|
520 |
value: 28.518
|
@@ -798,14 +804,14 @@ model-index:
|
|
798 |
value: 26.183
|
799 |
- type: recall_at_5
|
800 |
value: 30.653000000000002
|
801 |
-
|
802 |
type: Retrieval
|
803 |
-
|
804 |
-
config: default
|
805 |
name: MTEB CQADupstackEnglishRetrieval (default)
|
806 |
-
revision: ad9991cb51e31e31e430383c75ffb2885547b5f0
|
807 |
-
split: test
|
808 |
type: mteb/cqadupstack-english
|
|
|
|
|
|
|
809 |
metrics:
|
810 |
- type: main_score
|
811 |
value: 24.698999999999998
|
@@ -1089,14 +1095,14 @@ model-index:
|
|
1089 |
value: 23.247999999999998
|
1090 |
- type: recall_at_5
|
1091 |
value: 25.611
|
1092 |
-
|
1093 |
type: Retrieval
|
1094 |
-
|
1095 |
-
config: default
|
1096 |
name: MTEB CQADupstackGamingRetrieval (default)
|
1097 |
-
revision: 4885aa143210c98657558c04aaf3dc47cfb54340
|
1098 |
-
split: test
|
1099 |
type: mteb/cqadupstack-gaming
|
|
|
|
|
|
|
1100 |
metrics:
|
1101 |
- type: main_score
|
1102 |
value: 34.837
|
@@ -1380,14 +1386,14 @@ model-index:
|
|
1380 |
value: 33.351
|
1381 |
- type: recall_at_5
|
1382 |
value: 39.345
|
1383 |
-
|
1384 |
type: Retrieval
|
1385 |
-
|
1386 |
-
config: default
|
1387 |
name: MTEB CQADupstackGisRetrieval (default)
|
1388 |
-
revision: 5003b3064772da1887988e05400cf3806fe491f2
|
1389 |
-
split: test
|
1390 |
type: mteb/cqadupstack-gis
|
|
|
|
|
|
|
1391 |
metrics:
|
1392 |
- type: main_score
|
1393 |
value: 15.889000000000001
|
@@ -1671,14 +1677,14 @@ model-index:
|
|
1671 |
value: 15.383
|
1672 |
- type: recall_at_5
|
1673 |
value: 18.174
|
1674 |
-
|
1675 |
type: Retrieval
|
1676 |
-
|
1677 |
-
config: default
|
1678 |
name: MTEB CQADupstackMathematicaRetrieval (default)
|
1679 |
-
revision: 90fceea13679c63fe563ded68f3b6f06e50061de
|
1680 |
-
split: test
|
1681 |
type: mteb/cqadupstack-mathematica
|
|
|
|
|
|
|
1682 |
metrics:
|
1683 |
- type: main_score
|
1684 |
value: 11.0
|
@@ -1962,14 +1968,14 @@ model-index:
|
|
1962 |
value: 9.735000000000001
|
1963 |
- type: recall_at_5
|
1964 |
value: 12.595999999999998
|
1965 |
-
|
1966 |
type: Retrieval
|
1967 |
-
|
1968 |
-
config: default
|
1969 |
name: MTEB CQADupstackPhysicsRetrieval (default)
|
1970 |
-
revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4
|
1971 |
-
split: test
|
1972 |
type: mteb/cqadupstack-physics
|
|
|
|
|
|
|
1973 |
metrics:
|
1974 |
- type: main_score
|
1975 |
value: 22.671
|
@@ -2253,14 +2259,14 @@ model-index:
|
|
2253 |
value: 21.583
|
2254 |
- type: recall_at_5
|
2255 |
value: 24.937
|
2256 |
-
|
2257 |
type: Retrieval
|
2258 |
-
|
2259 |
-
config: default
|
2260 |
name: MTEB CQADupstackProgrammersRetrieval (default)
|
2261 |
-
revision: 6184bc1440d2dbc7612be22b50686b8826d22b32
|
2262 |
-
split: test
|
2263 |
type: mteb/cqadupstack-programmers
|
|
|
|
|
|
|
2264 |
metrics:
|
2265 |
- type: main_score
|
2266 |
value: 16.306
|
@@ -2544,27 +2550,27 @@ model-index:
|
|
2544 |
value: 14.935
|
2545 |
- type: recall_at_5
|
2546 |
value: 17.862000000000002
|
2547 |
-
|
2548 |
type: Retrieval
|
2549 |
-
|
2550 |
-
config: default
|
2551 |
name: MTEB CQADupstackRetrieval (default)
|
2552 |
-
revision: CQADupstackRetrieval_is_a_combined_dataset
|
2553 |
-
split: test
|
2554 |
type: CQADupstackRetrieval_is_a_combined_dataset
|
|
|
|
|
|
|
2555 |
metrics:
|
2556 |
- type: main_score
|
2557 |
value: 19.211500000000004
|
2558 |
- type: ndcg_at_10
|
2559 |
value: 19.211500000000004
|
2560 |
-
|
2561 |
type: Retrieval
|
2562 |
-
|
2563 |
-
config: default
|
2564 |
name: MTEB CQADupstackStatsRetrieval (default)
|
2565 |
-
revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a
|
2566 |
-
split: test
|
2567 |
type: mteb/cqadupstack-stats
|
|
|
|
|
|
|
2568 |
metrics:
|
2569 |
- type: main_score
|
2570 |
value: 13.274
|
@@ -2848,14 +2854,14 @@ model-index:
|
|
2848 |
value: 12.113999999999999
|
2849 |
- type: recall_at_5
|
2850 |
value: 14.371
|
2851 |
-
|
2852 |
type: Retrieval
|
2853 |
-
|
2854 |
-
config: default
|
2855 |
name: MTEB CQADupstackTexRetrieval (default)
|
2856 |
-
revision: 46989137a86843e03a6195de44b09deda022eec7
|
2857 |
-
split: test
|
2858 |
type: mteb/cqadupstack-tex
|
|
|
|
|
|
|
2859 |
metrics:
|
2860 |
- type: main_score
|
2861 |
value: 10.994
|
@@ -3139,14 +3145,14 @@ model-index:
|
|
3139 |
value: 10.381
|
3140 |
- type: recall_at_5
|
3141 |
value: 12.354999999999999
|
3142 |
-
|
3143 |
type: Retrieval
|
3144 |
-
|
3145 |
-
config: default
|
3146 |
name: MTEB CQADupstackUnixRetrieval (default)
|
3147 |
-
revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53
|
3148 |
-
split: test
|
3149 |
type: mteb/cqadupstack-unix
|
|
|
|
|
|
|
3150 |
metrics:
|
3151 |
- type: main_score
|
3152 |
value: 17.415
|
@@ -3430,14 +3436,14 @@ model-index:
|
|
3430 |
value: 16.458000000000002
|
3431 |
- type: recall_at_5
|
3432 |
value: 18.087
|
3433 |
-
|
3434 |
type: Retrieval
|
3435 |
-
|
3436 |
-
config: default
|
3437 |
name: MTEB CQADupstackWebmastersRetrieval (default)
|
3438 |
-
revision: 160c094312a0e1facb97e55eeddb698c0abe3571
|
3439 |
-
split: test
|
3440 |
type: mteb/cqadupstack-webmasters
|
|
|
|
|
|
|
3441 |
metrics:
|
3442 |
- type: main_score
|
3443 |
value: 21.398
|
@@ -3721,14 +3727,14 @@ model-index:
|
|
3721 |
value: 20.148
|
3722 |
- type: recall_at_5
|
3723 |
value: 23.296
|
3724 |
-
|
3725 |
type: Retrieval
|
3726 |
-
|
3727 |
-
config: default
|
3728 |
name: MTEB CQADupstackWordpressRetrieval (default)
|
3729 |
-
revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4
|
3730 |
-
split: test
|
3731 |
type: mteb/cqadupstack-wordpress
|
|
|
|
|
|
|
3732 |
metrics:
|
3733 |
- type: main_score
|
3734 |
value: 13.536999999999999
|
@@ -4012,14 +4018,14 @@ model-index:
|
|
4012 |
value: 12.187000000000001
|
4013 |
- type: recall_at_5
|
4014 |
value: 14.868
|
4015 |
-
|
4016 |
type: Retrieval
|
4017 |
-
|
4018 |
-
config: default
|
4019 |
name: MTEB ClimateFEVER (default)
|
4020 |
-
revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380
|
4021 |
-
split: test
|
4022 |
type: mteb/climate-fever
|
|
|
|
|
|
|
4023 |
metrics:
|
4024 |
- type: main_score
|
4025 |
value: 14.015
|
@@ -4303,14 +4309,14 @@ model-index:
|
|
4303 |
value: 9.713
|
4304 |
- type: recall_at_5
|
4305 |
value: 12.415
|
4306 |
-
|
4307 |
type: Retrieval
|
4308 |
-
|
4309 |
-
config: default
|
4310 |
name: MTEB DBPedia (default)
|
4311 |
-
revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659
|
4312 |
-
split: test
|
4313 |
type: mteb/dbpedia
|
|
|
|
|
|
|
4314 |
metrics:
|
4315 |
- type: main_score
|
4316 |
value: 19.899
|
@@ -4594,14 +4600,14 @@ model-index:
|
|
4594 |
value: 6.425
|
4595 |
- type: recall_at_5
|
4596 |
value: 8.818
|
4597 |
-
|
4598 |
-
type:
|
4599 |
-
|
4600 |
-
config: default
|
4601 |
name: MTEB EmotionClassification (default)
|
4602 |
-
revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
|
4603 |
-
split: test
|
4604 |
type: mteb/emotion
|
|
|
|
|
|
|
4605 |
metrics:
|
4606 |
- type: accuracy
|
4607 |
value: 38.339999999999996
|
@@ -4611,14 +4617,14 @@ model-index:
|
|
4611 |
value: 40.51989104726522
|
4612 |
- type: main_score
|
4613 |
value: 38.339999999999996
|
4614 |
-
|
4615 |
-
type:
|
4616 |
-
|
4617 |
-
config: default
|
4618 |
name: MTEB FEVER (default)
|
4619 |
-
revision: bea83ef9e8fb933d90a2f1d5515737465d613e12
|
4620 |
-
split: test
|
4621 |
type: mteb/fever
|
|
|
|
|
|
|
4622 |
metrics:
|
4623 |
- type: main_score
|
4624 |
value: 25.006
|
@@ -4902,14 +4908,14 @@ model-index:
|
|
4902 |
value: 24.082
|
4903 |
- type: recall_at_5
|
4904 |
value: 29.563
|
4905 |
-
|
4906 |
type: Retrieval
|
4907 |
-
|
4908 |
-
config: default
|
4909 |
name: MTEB FiQA2018 (default)
|
4910 |
-
revision: 27a168819829fe9bcd655c2df245fb19452e8e06
|
4911 |
-
split: test
|
4912 |
type: mteb/fiqa
|
|
|
|
|
|
|
4913 |
metrics:
|
4914 |
- type: main_score
|
4915 |
value: 11.167
|
@@ -5193,14 +5199,14 @@ model-index:
|
|
5193 |
value: 8.863999999999999
|
5194 |
- type: recall_at_5
|
5195 |
value: 11.354000000000001
|
5196 |
-
|
5197 |
type: Retrieval
|
5198 |
-
|
5199 |
-
config: default
|
5200 |
name: MTEB HotpotQA (default)
|
5201 |
-
revision: ab518f4d6fcca38d87c25209f94beba119d02014
|
5202 |
-
split: test
|
5203 |
type: mteb/hotpotqa
|
|
|
|
|
|
|
5204 |
metrics:
|
5205 |
- type: main_score
|
5206 |
value: 30.837999999999997
|
@@ -5484,14 +5490,14 @@ model-index:
|
|
5484 |
value: 25.779999999999998
|
5485 |
- type: recall_at_5
|
5486 |
value: 29.250999999999998
|
5487 |
-
|
5488 |
-
type:
|
5489 |
-
|
5490 |
-
config: default
|
5491 |
name: MTEB ImdbClassification (default)
|
5492 |
-
revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
|
5493 |
-
split: test
|
5494 |
type: mteb/imdb
|
|
|
|
|
|
|
5495 |
metrics:
|
5496 |
- type: accuracy
|
5497 |
value: 66.5616
|
@@ -5505,14 +5511,14 @@ model-index:
|
|
5505 |
value: 66.15361405073978
|
5506 |
- type: main_score
|
5507 |
value: 66.5616
|
5508 |
-
|
5509 |
-
type:
|
5510 |
-
|
5511 |
-
config: default
|
5512 |
name: MTEB MSMARCO (default)
|
5513 |
-
revision: c5a29a104738b98a9e76336939199e264163d4a0
|
5514 |
-
split: test
|
5515 |
type: mteb/msmarco
|
|
|
|
|
|
|
5516 |
metrics:
|
5517 |
- type: main_score
|
5518 |
value: 28.034
|
@@ -5796,14 +5802,14 @@ model-index:
|
|
5796 |
value: 2.138
|
5797 |
- type: recall_at_5
|
5798 |
value: 3.3770000000000002
|
5799 |
-
|
5800 |
-
type:
|
5801 |
-
|
5802 |
-
config: en
|
5803 |
name: MTEB MTOPDomainClassification (en)
|
5804 |
-
revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
|
5805 |
-
split: test
|
5806 |
type: mteb/mtop_domain
|
|
|
|
|
|
|
5807 |
metrics:
|
5808 |
- type: accuracy
|
5809 |
value: 84.81988144094848
|
@@ -5813,14 +5819,14 @@ model-index:
|
|
5813 |
value: 84.95181538630469
|
5814 |
- type: main_score
|
5815 |
value: 84.81988144094848
|
5816 |
-
|
5817 |
type: Classification
|
5818 |
-
|
5819 |
-
config: en
|
5820 |
name: MTEB MTOPIntentClassification (en)
|
5821 |
-
revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
|
5822 |
-
split: test
|
5823 |
type: mteb/mtop_intent
|
|
|
|
|
|
|
5824 |
metrics:
|
5825 |
- type: accuracy
|
5826 |
value: 62.41222070223438
|
@@ -5830,14 +5836,14 @@ model-index:
|
|
5830 |
value: 66.23266420473301
|
5831 |
- type: main_score
|
5832 |
value: 62.41222070223438
|
5833 |
-
|
5834 |
type: Classification
|
5835 |
-
|
5836 |
-
config: en
|
5837 |
name: MTEB MassiveIntentClassification (en)
|
5838 |
-
revision: 4672e20407010da34463acc759c162ca9734bca6
|
5839 |
-
split: test
|
5840 |
type: mteb/amazon_massive_intent
|
|
|
|
|
|
|
5841 |
metrics:
|
5842 |
- type: accuracy
|
5843 |
value: 62.50168123739073
|
@@ -5847,14 +5853,14 @@ model-index:
|
|
5847 |
value: 62.787680759907204
|
5848 |
- type: main_score
|
5849 |
value: 62.50168123739073
|
5850 |
-
|
5851 |
type: Classification
|
5852 |
-
|
5853 |
-
config: en
|
5854 |
name: MTEB MassiveScenarioClassification (en)
|
5855 |
-
revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
|
5856 |
-
split: test
|
5857 |
type: mteb/amazon_massive_scenario
|
|
|
|
|
|
|
5858 |
metrics:
|
5859 |
- type: accuracy
|
5860 |
value: 66.09280430396772
|
@@ -5864,14 +5870,14 @@ model-index:
|
|
5864 |
value: 66.15203456480924
|
5865 |
- type: main_score
|
5866 |
value: 66.09280430396772
|
5867 |
-
|
5868 |
-
type:
|
5869 |
-
|
5870 |
-
config: default
|
5871 |
name: MTEB MedrxivClusteringP2P (default)
|
5872 |
-
revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
|
5873 |
-
split: test
|
5874 |
type: mteb/medrxiv-clustering-p2p
|
|
|
|
|
|
|
5875 |
metrics:
|
5876 |
- type: main_score
|
5877 |
value: 26.932942933622616
|
@@ -5879,14 +5885,14 @@ model-index:
|
|
5879 |
value: 26.932942933622616
|
5880 |
- type: v_measure_std
|
5881 |
value: 1.593124055965666
|
5882 |
-
|
5883 |
type: Clustering
|
5884 |
-
|
5885 |
-
config: default
|
5886 |
name: MTEB MedrxivClusteringS2S (default)
|
5887 |
-
revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
|
5888 |
-
split: test
|
5889 |
type: mteb/medrxiv-clustering-s2s
|
|
|
|
|
|
|
5890 |
metrics:
|
5891 |
- type: main_score
|
5892 |
value: 22.9594415386389
|
@@ -5894,14 +5900,14 @@ model-index:
|
|
5894 |
value: 22.9594415386389
|
5895 |
- type: v_measure_std
|
5896 |
value: 1.2719806552652395
|
5897 |
-
|
5898 |
-
type:
|
5899 |
-
|
5900 |
-
config: default
|
5901 |
name: MTEB MindSmallReranking (default)
|
5902 |
-
revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7
|
5903 |
-
split: test
|
5904 |
type: mteb/mind_small
|
|
|
|
|
|
|
5905 |
metrics:
|
5906 |
- type: main_score
|
5907 |
value: 28.527234738258063
|
@@ -5921,14 +5927,14 @@ model-index:
|
|
5921 |
value: -26.591327847291947
|
5922 |
- type: nAUC_mrr_std
|
5923 |
value: -11.52072949105865
|
5924 |
-
|
5925 |
-
type:
|
5926 |
-
|
5927 |
-
config: default
|
5928 |
name: MTEB NFCorpus (default)
|
5929 |
-
revision: ec0fa4fe99da2ff19ca1214b7966684033a58814
|
5930 |
-
split: test
|
5931 |
type: mteb/nfcorpus
|
|
|
|
|
|
|
5932 |
metrics:
|
5933 |
- type: main_score
|
5934 |
value: 23.318
|
@@ -6212,14 +6218,14 @@ model-index:
|
|
6212 |
value: 6.773999999999999
|
6213 |
- type: recall_at_5
|
6214 |
value: 8.713
|
6215 |
-
|
6216 |
type: Retrieval
|
6217 |
-
|
6218 |
-
config: default
|
6219 |
name: MTEB NQ (default)
|
6220 |
-
revision: b774495ed302d8c44a3a7ea25c90dbce03968f31
|
6221 |
-
split: test
|
6222 |
type: mteb/nq
|
|
|
|
|
|
|
6223 |
metrics:
|
6224 |
- type: main_score
|
6225 |
value: 17.682000000000002
|
@@ -6503,14 +6509,14 @@ model-index:
|
|
6503 |
value: 16.338
|
6504 |
- type: recall_at_5
|
6505 |
value: 21.217
|
6506 |
-
|
6507 |
type: Retrieval
|
6508 |
-
|
6509 |
-
config: default
|
6510 |
name: MTEB QuoraRetrieval (default)
|
6511 |
-
revision: e4e08e0b7dbe3c8700f0daef558ff32256715259
|
6512 |
-
split: test
|
6513 |
type: mteb/quora
|
|
|
|
|
|
|
6514 |
metrics:
|
6515 |
- type: main_score
|
6516 |
value: 74.323
|
@@ -6794,14 +6800,14 @@ model-index:
|
|
6794 |
value: 73.137
|
6795 |
- type: recall_at_5
|
6796 |
value: 78.655
|
6797 |
-
|
6798 |
-
type:
|
6799 |
-
|
6800 |
-
config: default
|
6801 |
name: MTEB RedditClustering (default)
|
6802 |
-
revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
|
6803 |
-
split: test
|
6804 |
type: mteb/reddit-clustering
|
|
|
|
|
|
|
6805 |
metrics:
|
6806 |
- type: main_score
|
6807 |
value: 28.89014544508522
|
@@ -6809,14 +6815,14 @@ model-index:
|
|
6809 |
value: 28.89014544508522
|
6810 |
- type: v_measure_std
|
6811 |
value: 4.477854992673074
|
6812 |
-
|
6813 |
type: Clustering
|
6814 |
-
|
6815 |
-
config: default
|
6816 |
name: MTEB RedditClusteringP2P (default)
|
6817 |
-
revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
|
6818 |
-
split: test
|
6819 |
type: mteb/reddit-clustering-p2p
|
|
|
|
|
|
|
6820 |
metrics:
|
6821 |
- type: main_score
|
6822 |
value: 41.588064041506414
|
@@ -6824,14 +6830,14 @@ model-index:
|
|
6824 |
value: 41.588064041506414
|
6825 |
- type: v_measure_std
|
6826 |
value: 12.234957713539355
|
6827 |
-
|
6828 |
-
type:
|
6829 |
-
|
6830 |
-
config: default
|
6831 |
name: MTEB SCIDOCS (default)
|
6832 |
-
revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88
|
6833 |
-
split: test
|
6834 |
type: mteb/scidocs
|
|
|
|
|
|
|
6835 |
metrics:
|
6836 |
- type: main_score
|
6837 |
value: 9.923
|
@@ -7115,14 +7121,14 @@ model-index:
|
|
7115 |
value: 5.295
|
7116 |
- type: recall_at_5
|
7117 |
value: 7.37
|
7118 |
-
|
7119 |
-
type:
|
7120 |
-
|
7121 |
-
config: default
|
7122 |
name: MTEB SICK-R (default)
|
7123 |
-
revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
|
7124 |
-
split: test
|
7125 |
type: mteb/sickr-sts
|
|
|
|
|
|
|
7126 |
metrics:
|
7127 |
- type: cosine_pearson
|
7128 |
value: 75.0950047498747
|
@@ -7142,14 +7148,14 @@ model-index:
|
|
7142 |
value: 75.0950047498747
|
7143 |
- type: spearman
|
7144 |
value: 66.17240782538595
|
7145 |
-
|
7146 |
type: STS
|
7147 |
-
|
7148 |
-
config: default
|
7149 |
name: MTEB STS12 (default)
|
7150 |
-
revision: a0d554a64d88156834ff5ae9920b964011b16384
|
7151 |
-
split: test
|
7152 |
type: mteb/sts12-sts
|
|
|
|
|
|
|
7153 |
metrics:
|
7154 |
- type: cosine_pearson
|
7155 |
value: 70.27191745166907
|
@@ -7169,14 +7175,14 @@ model-index:
|
|
7169 |
value: 70.27191745166907
|
7170 |
- type: spearman
|
7171 |
value: 61.89139464648924
|
7172 |
-
|
7173 |
type: STS
|
7174 |
-
|
7175 |
-
config: default
|
7176 |
name: MTEB STS13 (default)
|
7177 |
-
revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
|
7178 |
-
split: test
|
7179 |
type: mteb/sts13-sts
|
|
|
|
|
|
|
7180 |
metrics:
|
7181 |
- type: cosine_pearson
|
7182 |
value: 70.19582039979868
|
@@ -7196,14 +7202,14 @@ model-index:
|
|
7196 |
value: 70.19582039979868
|
7197 |
- type: spearman
|
7198 |
value: 71.66792475528088
|
7199 |
-
|
7200 |
type: STS
|
7201 |
-
|
7202 |
-
config: default
|
7203 |
name: MTEB STS14 (default)
|
7204 |
-
revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
|
7205 |
-
split: test
|
7206 |
type: mteb/sts14-sts
|
|
|
|
|
|
|
7207 |
metrics:
|
7208 |
- type: cosine_pearson
|
7209 |
value: 69.52140108419252
|
@@ -7223,14 +7229,14 @@ model-index:
|
|
7223 |
value: 69.52140108419252
|
7224 |
- type: spearman
|
7225 |
value: 67.82634222687376
|
7226 |
-
|
7227 |
type: STS
|
7228 |
-
|
7229 |
-
config: default
|
7230 |
name: MTEB STS15 (default)
|
7231 |
-
revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
|
7232 |
-
split: test
|
7233 |
type: mteb/sts15-sts
|
|
|
|
|
|
|
7234 |
metrics:
|
7235 |
- type: cosine_pearson
|
7236 |
value: 73.66221619412464
|
@@ -7250,14 +7256,14 @@ model-index:
|
|
7250 |
value: 73.66221619412464
|
7251 |
- type: spearman
|
7252 |
value: 75.48765072240437
|
7253 |
-
|
7254 |
type: STS
|
7255 |
-
|
7256 |
-
config: default
|
7257 |
name: MTEB STS16 (default)
|
7258 |
-
revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
|
7259 |
-
split: test
|
7260 |
type: mteb/sts16-sts
|
|
|
|
|
|
|
7261 |
metrics:
|
7262 |
- type: cosine_pearson
|
7263 |
value: 62.328630460915925
|
@@ -7277,14 +7283,14 @@ model-index:
|
|
7277 |
value: 62.328630460915925
|
7278 |
- type: spearman
|
7279 |
value: 66.48155706668948
|
7280 |
-
|
7281 |
type: STS
|
7282 |
-
|
7283 |
-
config: fr-en
|
7284 |
name: MTEB STS17 (fr-en)
|
7285 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7286 |
-
split: test
|
7287 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7288 |
metrics:
|
7289 |
- type: cosine_pearson
|
7290 |
value: 21.344883409729785
|
@@ -7304,14 +7310,14 @@ model-index:
|
|
7304 |
value: 21.344883409729785
|
7305 |
- type: spearman
|
7306 |
value: 19.492480027372526
|
7307 |
-
|
7308 |
type: STS
|
7309 |
-
|
7310 |
-
config: es-en
|
7311 |
name: MTEB STS17 (es-en)
|
7312 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7313 |
-
split: test
|
7314 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7315 |
metrics:
|
7316 |
- type: cosine_pearson
|
7317 |
value: 14.966581838953037
|
@@ -7331,14 +7337,14 @@ model-index:
|
|
7331 |
value: 14.966581838953037
|
7332 |
- type: spearman
|
7333 |
value: 13.24509138766898
|
7334 |
-
|
7335 |
type: STS
|
7336 |
-
|
7337 |
-
config: nl-en
|
7338 |
name: MTEB STS17 (nl-en)
|
7339 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7340 |
-
split: test
|
7341 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7342 |
metrics:
|
7343 |
- type: cosine_pearson
|
7344 |
value: 18.309414985775234
|
@@ -7358,14 +7364,14 @@ model-index:
|
|
7358 |
value: 18.309414985775234
|
7359 |
- type: spearman
|
7360 |
value: 14.341489363671842
|
7361 |
-
|
7362 |
type: STS
|
7363 |
-
|
7364 |
-
config: en-de
|
7365 |
name: MTEB STS17 (en-de)
|
7366 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7367 |
-
split: test
|
7368 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7369 |
metrics:
|
7370 |
- type: cosine_pearson
|
7371 |
value: 21.301586456013037
|
@@ -7385,14 +7391,14 @@ model-index:
|
|
7385 |
value: 21.301586456013037
|
7386 |
- type: spearman
|
7387 |
value: 22.571419522164376
|
7388 |
-
|
7389 |
type: STS
|
7390 |
-
|
7391 |
-
config: it-en
|
7392 |
name: MTEB STS17 (it-en)
|
7393 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7394 |
-
split: test
|
7395 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7396 |
metrics:
|
7397 |
- type: cosine_pearson
|
7398 |
value: 16.140292893693204
|
@@ -7412,14 +7418,14 @@ model-index:
|
|
7412 |
value: 16.140292893693204
|
7413 |
- type: spearman
|
7414 |
value: 10.216376215477217
|
7415 |
-
|
7416 |
type: STS
|
7417 |
-
|
7418 |
-
config: en-en
|
7419 |
name: MTEB STS17 (en-en)
|
7420 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7421 |
-
split: test
|
7422 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7423 |
metrics:
|
7424 |
- type: cosine_pearson
|
7425 |
value: 78.42242639560023
|
@@ -7439,14 +7445,14 @@ model-index:
|
|
7439 |
value: 78.42242639560023
|
7440 |
- type: spearman
|
7441 |
value: 80.2472005970173
|
7442 |
-
|
7443 |
type: STS
|
7444 |
-
|
7445 |
-
config: en-ar
|
7446 |
name: MTEB STS17 (en-ar)
|
7447 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7448 |
-
split: test
|
7449 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7450 |
metrics:
|
7451 |
- type: cosine_pearson
|
7452 |
value: -5.762967943082491
|
@@ -7466,14 +7472,14 @@ model-index:
|
|
7466 |
value: -5.762967943082491
|
7467 |
- type: spearman
|
7468 |
value: -6.184248227377756
|
7469 |
-
|
7470 |
type: STS
|
7471 |
-
|
7472 |
-
config: en-tr
|
7473 |
name: MTEB STS17 (en-tr)
|
7474 |
-
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7475 |
-
split: test
|
7476 |
type: mteb/sts17-crosslingual-sts
|
|
|
|
|
|
|
7477 |
metrics:
|
7478 |
- type: cosine_pearson
|
7479 |
value: -8.666319610669559
|
@@ -7493,14 +7499,14 @@ model-index:
|
|
7493 |
value: -8.666319610669559
|
7494 |
- type: spearman
|
7495 |
value: -10.0877070299522
|
7496 |
-
|
7497 |
type: STS
|
7498 |
-
|
7499 |
-
config: es-en
|
7500 |
name: MTEB STS22 (es-en)
|
7501 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7502 |
-
split: test
|
7503 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7504 |
metrics:
|
7505 |
- type: cosine_pearson
|
7506 |
value: 16.880423266497427
|
@@ -7520,14 +7526,14 @@ model-index:
|
|
7520 |
value: 16.880423266497427
|
7521 |
- type: spearman
|
7522 |
value: 18.497107178067477
|
7523 |
-
|
7524 |
type: STS
|
7525 |
-
|
7526 |
-
config: en
|
7527 |
name: MTEB STS22 (en)
|
7528 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7529 |
-
split: test
|
7530 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7531 |
metrics:
|
7532 |
- type: cosine_pearson
|
7533 |
value: 44.98861387948161
|
@@ -7547,14 +7553,14 @@ model-index:
|
|
7547 |
value: 44.98861387948161
|
7548 |
- type: spearman
|
7549 |
value: 59.04270974068145
|
7550 |
-
|
7551 |
type: STS
|
7552 |
-
|
7553 |
-
config: de-en
|
7554 |
name: MTEB STS22 (de-en)
|
7555 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7556 |
-
split: test
|
7557 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7558 |
metrics:
|
7559 |
- type: cosine_pearson
|
7560 |
value: 26.505168004689462
|
@@ -7574,14 +7580,14 @@ model-index:
|
|
7574 |
value: 26.505168004689462
|
7575 |
- type: spearman
|
7576 |
value: 28.591720613248732
|
7577 |
-
|
7578 |
type: STS
|
7579 |
-
|
7580 |
-
config: zh-en
|
7581 |
name: MTEB STS22 (zh-en)
|
7582 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7583 |
-
split: test
|
7584 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7585 |
metrics:
|
7586 |
- type: cosine_pearson
|
7587 |
value: 11.552622364692777
|
@@ -7601,14 +7607,14 @@ model-index:
|
|
7601 |
value: 11.552622364692777
|
7602 |
- type: spearman
|
7603 |
value: 10.973019756392695
|
7604 |
-
|
7605 |
type: STS
|
7606 |
-
|
7607 |
-
config: pl-en
|
7608 |
name: MTEB STS22 (pl-en)
|
7609 |
-
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7610 |
-
split: test
|
7611 |
type: mteb/sts22-crosslingual-sts
|
|
|
|
|
|
|
7612 |
metrics:
|
7613 |
- type: cosine_pearson
|
7614 |
value: 10.466988163502029
|
@@ -7628,14 +7634,14 @@ model-index:
|
|
7628 |
value: 10.466988163502029
|
7629 |
- type: spearman
|
7630 |
value: -0.21879166839686814
|
7631 |
-
|
7632 |
type: STS
|
7633 |
-
|
7634 |
-
config: default
|
7635 |
name: MTEB STSBenchmark (default)
|
7636 |
-
revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
|
7637 |
-
split: test
|
7638 |
type: mteb/stsbenchmark-sts
|
|
|
|
|
|
|
7639 |
metrics:
|
7640 |
- type: cosine_pearson
|
7641 |
value: 66.80057012864974
|
@@ -7655,14 +7661,14 @@ model-index:
|
|
7655 |
value: 66.80057012864974
|
7656 |
- type: spearman
|
7657 |
value: 66.52235871936412
|
7658 |
-
|
7659 |
-
type:
|
7660 |
-
|
7661 |
-
config: default
|
7662 |
name: MTEB SciDocsRR (default)
|
7663 |
-
revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
|
7664 |
-
split: test
|
7665 |
type: mteb/scidocs-reranking
|
|
|
|
|
|
|
7666 |
metrics:
|
7667 |
- type: main_score
|
7668 |
value: 71.11074203128574
|
@@ -7682,14 +7688,14 @@ model-index:
|
|
7682 |
value: 72.52180244204617
|
7683 |
- type: nAUC_mrr_std
|
7684 |
value: 64.6185657337566
|
7685 |
-
|
7686 |
-
type:
|
7687 |
-
|
7688 |
-
config: default
|
7689 |
name: MTEB SciFact (default)
|
7690 |
-
revision: 0228b52cf27578f30900b9e5271d331663a030d7
|
7691 |
-
split: test
|
7692 |
type: mteb/scifact
|
|
|
|
|
|
|
7693 |
metrics:
|
7694 |
- type: main_score
|
7695 |
value: 50.663000000000004
|
@@ -7973,14 +7979,14 @@ model-index:
|
|
7973 |
value: 52.983000000000004
|
7974 |
- type: recall_at_5
|
7975 |
value: 58.428000000000004
|
7976 |
-
|
7977 |
-
type:
|
7978 |
-
|
7979 |
-
config: default
|
7980 |
name: MTEB SprintDuplicateQuestions (default)
|
7981 |
-
revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
|
7982 |
-
split: test
|
7983 |
type: mteb/sprintduplicatequestions-pairclassification
|
|
|
|
|
|
|
7984 |
metrics:
|
7985 |
- type: cosine_accuracy
|
7986 |
value: 99.6029702970297
|
@@ -8064,14 +8070,14 @@ model-index:
|
|
8064 |
value: 78.80597014925374
|
8065 |
- type: similarity_recall
|
8066 |
value: 79.2
|
8067 |
-
|
8068 |
-
type:
|
8069 |
-
|
8070 |
-
config: default
|
8071 |
name: MTEB StackExchangeClustering (default)
|
8072 |
-
revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
|
8073 |
-
split: test
|
8074 |
type: mteb/stackexchange-clustering
|
|
|
|
|
|
|
8075 |
metrics:
|
8076 |
- type: main_score
|
8077 |
value: 40.01875953666112
|
@@ -8079,14 +8085,14 @@ model-index:
|
|
8079 |
value: 40.01875953666112
|
8080 |
- type: v_measure_std
|
8081 |
value: 4.519991014119391
|
8082 |
-
|
8083 |
type: Clustering
|
8084 |
-
|
8085 |
-
config: default
|
8086 |
name: MTEB StackExchangeClusteringP2P (default)
|
8087 |
-
revision: 815ca46b2622cec33ccafc3735d572c266efdb44
|
8088 |
-
split: test
|
8089 |
type: mteb/stackexchange-clustering-p2p
|
|
|
|
|
|
|
8090 |
metrics:
|
8091 |
- type: main_score
|
8092 |
value: 28.81354037080584
|
@@ -8094,14 +8100,14 @@ model-index:
|
|
8094 |
value: 28.81354037080584
|
8095 |
- type: v_measure_std
|
8096 |
value: 1.4144350664362755
|
8097 |
-
|
8098 |
-
type:
|
8099 |
-
|
8100 |
-
config: default
|
8101 |
name: MTEB StackOverflowDupQuestions (default)
|
8102 |
-
revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
|
8103 |
-
split: test
|
8104 |
type: mteb/stackoverflowdupquestions-reranking
|
|
|
|
|
|
|
8105 |
metrics:
|
8106 |
- type: main_score
|
8107 |
value: 44.09716409649705
|
@@ -8121,14 +8127,14 @@ model-index:
|
|
8121 |
value: 17.207604918830953
|
8122 |
- type: nAUC_mrr_std
|
8123 |
value: 6.664790258906265
|
8124 |
-
|
8125 |
-
type:
|
8126 |
-
|
8127 |
-
config: default
|
8128 |
name: MTEB SummEval (default)
|
8129 |
-
revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
|
8130 |
-
split: test
|
8131 |
type: mteb/summeval
|
|
|
|
|
|
|
8132 |
metrics:
|
8133 |
- type: cosine_pearson
|
8134 |
value: 29.294245469087553
|
@@ -8144,14 +8150,14 @@ model-index:
|
|
8144 |
value: 29.294245469087553
|
8145 |
- type: spearman
|
8146 |
value: 30.080488918284974
|
8147 |
-
|
8148 |
-
type:
|
8149 |
-
|
8150 |
-
config: default
|
8151 |
name: MTEB TRECCOVID (default)
|
8152 |
-
revision: bb9466bac8153a0349341eb1b22e06409e78ef4e
|
8153 |
-
split: test
|
8154 |
type: mteb/trec-covid
|
|
|
|
|
|
|
8155 |
metrics:
|
8156 |
- type: main_score
|
8157 |
value: 39.983999999999995
|
@@ -8435,14 +8441,14 @@ model-index:
|
|
8435 |
value: 0.302
|
8436 |
- type: recall_at_5
|
8437 |
value: 0.482
|
8438 |
-
|
8439 |
type: Retrieval
|
8440 |
-
|
8441 |
-
config: default
|
8442 |
name: MTEB Touche2020 (default)
|
8443 |
-
revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f
|
8444 |
-
split: test
|
8445 |
type: mteb/touche2020
|
|
|
|
|
|
|
8446 |
metrics:
|
8447 |
- type: main_score
|
8448 |
value: 13.753000000000002
|
@@ -8726,14 +8732,14 @@ model-index:
|
|
8726 |
value: 3.024
|
8727 |
- type: recall_at_5
|
8728 |
value: 4.776
|
8729 |
-
|
8730 |
-
type:
|
8731 |
-
|
8732 |
-
config: default
|
8733 |
name: MTEB ToxicConversationsClassification (default)
|
8734 |
-
revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
|
8735 |
-
split: test
|
8736 |
type: mteb/toxic_conversations_50k
|
|
|
|
|
|
|
8737 |
metrics:
|
8738 |
- type: accuracy
|
8739 |
value: 65.6884765625
|
@@ -8747,14 +8753,14 @@ model-index:
|
|
8747 |
value: 73.34420433686675
|
8748 |
- type: main_score
|
8749 |
value: 65.6884765625
|
8750 |
-
|
8751 |
type: Classification
|
8752 |
-
|
8753 |
-
config: default
|
8754 |
name: MTEB TweetSentimentExtractionClassification (default)
|
8755 |
-
revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
|
8756 |
-
split: test
|
8757 |
type: mteb/tweet_sentiment_extraction
|
|
|
|
|
|
|
8758 |
metrics:
|
8759 |
- type: accuracy
|
8760 |
value: 49.83305036785513
|
@@ -8764,14 +8770,14 @@ model-index:
|
|
8764 |
value: 49.32130156716104
|
8765 |
- type: main_score
|
8766 |
value: 49.83305036785513
|
8767 |
-
|
8768 |
-
type:
|
8769 |
-
|
8770 |
-
config: default
|
8771 |
name: MTEB TwentyNewsgroupsClustering (default)
|
8772 |
-
revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
|
8773 |
-
split: test
|
8774 |
type: mteb/twentynewsgroups-clustering
|
|
|
|
|
|
|
8775 |
metrics:
|
8776 |
- type: main_score
|
8777 |
value: 25.27920179659098
|
@@ -8779,14 +8785,14 @@ model-index:
|
|
8779 |
value: 25.27920179659098
|
8780 |
- type: v_measure_std
|
8781 |
value: 2.092324622279832
|
8782 |
-
|
8783 |
-
type:
|
8784 |
-
|
8785 |
-
config: default
|
8786 |
name: MTEB TwitterSemEval2015 (default)
|
8787 |
-
revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
|
8788 |
-
split: test
|
8789 |
type: mteb/twittersemeval2015-pairclassification
|
|
|
|
|
|
|
8790 |
metrics:
|
8791 |
- type: cosine_accuracy
|
8792 |
value: 82.19586338439531
|
@@ -8870,14 +8876,14 @@ model-index:
|
|
8870 |
value: 54.3767840152236
|
8871 |
- type: similarity_recall
|
8872 |
value: 60.31662269129288
|
8873 |
-
|
8874 |
type: PairClassification
|
8875 |
-
|
8876 |
-
config: default
|
8877 |
name: MTEB TwitterURLCorpus (default)
|
8878 |
-
revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
|
8879 |
-
split: test
|
8880 |
type: mteb/twitterurlcorpus-pairclassification
|
|
|
|
|
|
|
8881 |
metrics:
|
8882 |
- type: cosine_accuracy
|
8883 |
value: 85.86758256684907
|
@@ -8961,13 +8967,6 @@ model-index:
|
|
8961 |
value: 69.39391707784078
|
8962 |
- type: similarity_recall
|
8963 |
value: 72.55158607945796
|
8964 |
-
task:
|
8965 |
-
type: PairClassification
|
8966 |
-
model_name: minishlab/M2V_base_glove_subword
|
8967 |
-
tags:
|
8968 |
-
- embeddings
|
8969 |
-
- static-embeddings
|
8970 |
-
- mteb
|
8971 |
---
|
8972 |
|
8973 |
# M2V_base_glove_subword Model Card
|
|
|
4 |
- en
|
5 |
library_name: model2vec
|
6 |
license: mit
|
7 |
+
tags:
|
8 |
+
- embeddings
|
9 |
+
- static-embeddings
|
10 |
+
- mteb
|
11 |
model-index:
|
12 |
- name: M2V_base_glove_subword
|
13 |
results:
|
14 |
+
- task:
|
15 |
+
type: Classification
|
16 |
+
dataset:
|
17 |
name: MTEB AmazonCounterfactualClassification (en-ext)
|
|
|
|
|
18 |
type: mteb/amazon_counterfactual
|
19 |
+
config: en-ext
|
20 |
+
split: test
|
21 |
+
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
22 |
metrics:
|
23 |
- type: accuracy
|
24 |
value: 66.4167916041979
|
|
|
32 |
value: 72.84623161234782
|
33 |
- type: main_score
|
34 |
value: 66.4167916041979
|
35 |
+
- task:
|
36 |
type: Classification
|
37 |
+
dataset:
|
|
|
38 |
name: MTEB AmazonCounterfactualClassification (en)
|
|
|
|
|
39 |
type: mteb/amazon_counterfactual
|
40 |
+
config: en
|
41 |
+
split: test
|
42 |
+
revision: e8379541af4e31359cca9fbcf4b00f2671dba205
|
43 |
metrics:
|
44 |
- type: accuracy
|
45 |
value: 68.044776119403
|
|
|
53 |
value: 71.2243167389672
|
54 |
- type: main_score
|
55 |
value: 68.044776119403
|
56 |
+
- task:
|
57 |
type: Classification
|
58 |
+
dataset:
|
|
|
59 |
name: MTEB AmazonPolarityClassification (default)
|
|
|
|
|
60 |
type: mteb/amazon_polarity
|
61 |
+
config: default
|
62 |
+
split: test
|
63 |
+
revision: e2d317d38cd51312af73b3d32a06d1a08b442046
|
64 |
metrics:
|
65 |
- type: accuracy
|
66 |
value: 67.21602499999999
|
|
|
74 |
value: 66.68107362746888
|
75 |
- type: main_score
|
76 |
value: 67.21602499999999
|
77 |
+
- task:
|
78 |
type: Classification
|
79 |
+
dataset:
|
|
|
80 |
name: MTEB AmazonReviewsClassification (en)
|
|
|
|
|
81 |
type: mteb/amazon_reviews_multi
|
82 |
+
config: en
|
83 |
+
split: test
|
84 |
+
revision: 1399c76144fd37290681b995c656ef9b2e06e26d
|
85 |
metrics:
|
86 |
- type: accuracy
|
87 |
value: 32.384
|
|
|
91 |
value: 32.05276706247388
|
92 |
- type: main_score
|
93 |
value: 32.384
|
94 |
+
- task:
|
95 |
+
type: Retrieval
|
96 |
+
dataset:
|
|
|
97 |
name: MTEB ArguAna (default)
|
|
|
|
|
98 |
type: mteb/arguana
|
99 |
+
config: default
|
100 |
+
split: test
|
101 |
+
revision: c22ab2a51041ffd869aaddef7af8d8215647e41a
|
102 |
metrics:
|
103 |
- type: main_score
|
104 |
value: 29.599999999999998
|
|
|
382 |
value: 28.733999999999998
|
383 |
- type: recall_at_5
|
384 |
value: 36.131
|
385 |
+
- task:
|
386 |
+
type: Clustering
|
387 |
+
dataset:
|
|
|
388 |
name: MTEB ArxivClusteringP2P (default)
|
|
|
|
|
389 |
type: mteb/arxiv-clustering-p2p
|
390 |
+
config: default
|
391 |
+
split: test
|
392 |
+
revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
|
393 |
metrics:
|
394 |
- type: main_score
|
395 |
value: 35.46255145204994
|
|
|
397 |
value: 35.46255145204994
|
398 |
- type: v_measure_std
|
399 |
value: 14.146815377034603
|
400 |
+
- task:
|
401 |
type: Clustering
|
402 |
+
dataset:
|
|
|
403 |
name: MTEB ArxivClusteringS2S (default)
|
|
|
|
|
404 |
type: mteb/arxiv-clustering-s2s
|
405 |
+
config: default
|
406 |
+
split: test
|
407 |
+
revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
|
408 |
metrics:
|
409 |
- type: main_score
|
410 |
value: 26.34189987196252
|
|
|
412 |
value: 26.34189987196252
|
413 |
- type: v_measure_std
|
414 |
value: 14.798697652139317
|
415 |
+
- task:
|
416 |
+
type: Reranking
|
417 |
+
dataset:
|
|
|
418 |
name: MTEB AskUbuntuDupQuestions (default)
|
|
|
|
|
419 |
type: mteb/askubuntudupquestions-reranking
|
420 |
+
config: default
|
421 |
+
split: test
|
422 |
+
revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
|
423 |
metrics:
|
424 |
- type: main_score
|
425 |
value: 52.85912447389551
|
|
|
439 |
value: 19.44964881732576
|
440 |
- type: nAUC_mrr_std
|
441 |
value: 9.313450884539453
|
442 |
+
- task:
|
443 |
+
type: STS
|
444 |
+
dataset:
|
|
|
445 |
name: MTEB BIOSSES (default)
|
|
|
|
|
446 |
type: mteb/biosses-sts
|
447 |
+
config: default
|
448 |
+
split: test
|
449 |
+
revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
|
450 |
metrics:
|
451 |
- type: cosine_pearson
|
452 |
value: 73.38282679412139
|
|
|
466 |
value: 73.38282679412139
|
467 |
- type: spearman
|
468 |
value: 75.59389113278942
|
469 |
+
- task:
|
470 |
+
type: Classification
|
471 |
+
dataset:
|
|
|
472 |
name: MTEB Banking77Classification (default)
|
|
|
|
|
473 |
type: mteb/banking77
|
474 |
+
config: default
|
475 |
+
split: test
|
476 |
+
revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
|
477 |
metrics:
|
478 |
- type: accuracy
|
479 |
value: 71.38636363636363
|
|
|
483 |
value: 71.55994805461263
|
484 |
- type: main_score
|
485 |
value: 71.38636363636363
|
486 |
+
- task:
|
487 |
+
type: Clustering
|
488 |
+
dataset:
|
|
|
489 |
name: MTEB BiorxivClusteringP2P (default)
|
|
|
|
|
490 |
type: mteb/biorxiv-clustering-p2p
|
491 |
+
config: default
|
492 |
+
split: test
|
493 |
+
revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
|
494 |
metrics:
|
495 |
- type: main_score
|
496 |
value: 31.47309865069476
|
|
|
498 |
value: 31.47309865069476
|
499 |
- type: v_measure_std
|
500 |
value: 0.6360736715097297
|
501 |
+
- task:
|
502 |
type: Clustering
|
503 |
+
dataset:
|
|
|
504 |
name: MTEB BiorxivClusteringS2S (default)
|
|
|
|
|
505 |
type: mteb/biorxiv-clustering-s2s
|
506 |
+
config: default
|
507 |
+
split: test
|
508 |
+
revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
|
509 |
metrics:
|
510 |
- type: main_score
|
511 |
value: 22.58199120148109
|
|
|
513 |
value: 22.58199120148109
|
514 |
- type: v_measure_std
|
515 |
value: 1.1055877138914942
|
516 |
+
- task:
|
517 |
+
type: Retrieval
|
518 |
+
dataset:
|
|
|
519 |
name: MTEB CQADupstackAndroidRetrieval (default)
|
|
|
|
|
520 |
type: mteb/cqadupstack-android
|
521 |
+
config: default
|
522 |
+
split: test
|
523 |
+
revision: f46a197baaae43b4f621051089b82a364682dfeb
|
524 |
metrics:
|
525 |
- type: main_score
|
526 |
value: 28.518
|
|
|
804 |
value: 26.183
|
805 |
- type: recall_at_5
|
806 |
value: 30.653000000000002
|
807 |
+
- task:
|
808 |
type: Retrieval
|
809 |
+
dataset:
|
|
|
810 |
name: MTEB CQADupstackEnglishRetrieval (default)
|
|
|
|
|
811 |
type: mteb/cqadupstack-english
|
812 |
+
config: default
|
813 |
+
split: test
|
814 |
+
revision: ad9991cb51e31e31e430383c75ffb2885547b5f0
|
815 |
metrics:
|
816 |
- type: main_score
|
817 |
value: 24.698999999999998
|
|
|
1095 |
value: 23.247999999999998
|
1096 |
- type: recall_at_5
|
1097 |
value: 25.611
|
1098 |
+
- task:
|
1099 |
type: Retrieval
|
1100 |
+
dataset:
|
|
|
1101 |
name: MTEB CQADupstackGamingRetrieval (default)
|
|
|
|
|
1102 |
type: mteb/cqadupstack-gaming
|
1103 |
+
config: default
|
1104 |
+
split: test
|
1105 |
+
revision: 4885aa143210c98657558c04aaf3dc47cfb54340
|
1106 |
metrics:
|
1107 |
- type: main_score
|
1108 |
value: 34.837
|
|
|
1386 |
value: 33.351
|
1387 |
- type: recall_at_5
|
1388 |
value: 39.345
|
1389 |
+
- task:
|
1390 |
type: Retrieval
|
1391 |
+
dataset:
|
|
|
1392 |
name: MTEB CQADupstackGisRetrieval (default)
|
|
|
|
|
1393 |
type: mteb/cqadupstack-gis
|
1394 |
+
config: default
|
1395 |
+
split: test
|
1396 |
+
revision: 5003b3064772da1887988e05400cf3806fe491f2
|
1397 |
metrics:
|
1398 |
- type: main_score
|
1399 |
value: 15.889000000000001
|
|
|
1677 |
value: 15.383
|
1678 |
- type: recall_at_5
|
1679 |
value: 18.174
|
1680 |
+
- task:
|
1681 |
type: Retrieval
|
1682 |
+
dataset:
|
|
|
1683 |
name: MTEB CQADupstackMathematicaRetrieval (default)
|
|
|
|
|
1684 |
type: mteb/cqadupstack-mathematica
|
1685 |
+
config: default
|
1686 |
+
split: test
|
1687 |
+
revision: 90fceea13679c63fe563ded68f3b6f06e50061de
|
1688 |
metrics:
|
1689 |
- type: main_score
|
1690 |
value: 11.0
|
|
|
1968 |
value: 9.735000000000001
|
1969 |
- type: recall_at_5
|
1970 |
value: 12.595999999999998
|
1971 |
+
- task:
|
1972 |
type: Retrieval
|
1973 |
+
dataset:
|
|
|
1974 |
name: MTEB CQADupstackPhysicsRetrieval (default)
|
|
|
|
|
1975 |
type: mteb/cqadupstack-physics
|
1976 |
+
config: default
|
1977 |
+
split: test
|
1978 |
+
revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4
|
1979 |
metrics:
|
1980 |
- type: main_score
|
1981 |
value: 22.671
|
|
|
2259 |
value: 21.583
|
2260 |
- type: recall_at_5
|
2261 |
value: 24.937
|
2262 |
+
- task:
|
2263 |
type: Retrieval
|
2264 |
+
dataset:
|
|
|
2265 |
name: MTEB CQADupstackProgrammersRetrieval (default)
|
|
|
|
|
2266 |
type: mteb/cqadupstack-programmers
|
2267 |
+
config: default
|
2268 |
+
split: test
|
2269 |
+
revision: 6184bc1440d2dbc7612be22b50686b8826d22b32
|
2270 |
metrics:
|
2271 |
- type: main_score
|
2272 |
value: 16.306
|
|
|
2550 |
value: 14.935
|
2551 |
- type: recall_at_5
|
2552 |
value: 17.862000000000002
|
2553 |
+
- task:
|
2554 |
type: Retrieval
|
2555 |
+
dataset:
|
|
|
2556 |
name: MTEB CQADupstackRetrieval (default)
|
|
|
|
|
2557 |
type: CQADupstackRetrieval_is_a_combined_dataset
|
2558 |
+
config: default
|
2559 |
+
split: test
|
2560 |
+
revision: CQADupstackRetrieval_is_a_combined_dataset
|
2561 |
metrics:
|
2562 |
- type: main_score
|
2563 |
value: 19.211500000000004
|
2564 |
- type: ndcg_at_10
|
2565 |
value: 19.211500000000004
|
2566 |
+
- task:
|
2567 |
type: Retrieval
|
2568 |
+
dataset:
|
|
|
2569 |
name: MTEB CQADupstackStatsRetrieval (default)
|
|
|
|
|
2570 |
type: mteb/cqadupstack-stats
|
2571 |
+
config: default
|
2572 |
+
split: test
|
2573 |
+
revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a
|
2574 |
metrics:
|
2575 |
- type: main_score
|
2576 |
value: 13.274
|
|
|
2854 |
value: 12.113999999999999
|
2855 |
- type: recall_at_5
|
2856 |
value: 14.371
|
2857 |
+
- task:
|
2858 |
type: Retrieval
|
2859 |
+
dataset:
|
|
|
2860 |
name: MTEB CQADupstackTexRetrieval (default)
|
|
|
|
|
2861 |
type: mteb/cqadupstack-tex
|
2862 |
+
config: default
|
2863 |
+
split: test
|
2864 |
+
revision: 46989137a86843e03a6195de44b09deda022eec7
|
2865 |
metrics:
|
2866 |
- type: main_score
|
2867 |
value: 10.994
|
|
|
3145 |
value: 10.381
|
3146 |
- type: recall_at_5
|
3147 |
value: 12.354999999999999
|
3148 |
+
- task:
|
3149 |
type: Retrieval
|
3150 |
+
dataset:
|
|
|
3151 |
name: MTEB CQADupstackUnixRetrieval (default)
|
|
|
|
|
3152 |
type: mteb/cqadupstack-unix
|
3153 |
+
config: default
|
3154 |
+
split: test
|
3155 |
+
revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53
|
3156 |
metrics:
|
3157 |
- type: main_score
|
3158 |
value: 17.415
|
|
|
3436 |
value: 16.458000000000002
|
3437 |
- type: recall_at_5
|
3438 |
value: 18.087
|
3439 |
+
- task:
|
3440 |
type: Retrieval
|
3441 |
+
dataset:
|
|
|
3442 |
name: MTEB CQADupstackWebmastersRetrieval (default)
|
|
|
|
|
3443 |
type: mteb/cqadupstack-webmasters
|
3444 |
+
config: default
|
3445 |
+
split: test
|
3446 |
+
revision: 160c094312a0e1facb97e55eeddb698c0abe3571
|
3447 |
metrics:
|
3448 |
- type: main_score
|
3449 |
value: 21.398
|
|
|
3727 |
value: 20.148
|
3728 |
- type: recall_at_5
|
3729 |
value: 23.296
|
3730 |
+
- task:
|
3731 |
type: Retrieval
|
3732 |
+
dataset:
|
|
|
3733 |
name: MTEB CQADupstackWordpressRetrieval (default)
|
|
|
|
|
3734 |
type: mteb/cqadupstack-wordpress
|
3735 |
+
config: default
|
3736 |
+
split: test
|
3737 |
+
revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4
|
3738 |
metrics:
|
3739 |
- type: main_score
|
3740 |
value: 13.536999999999999
|
|
|
4018 |
value: 12.187000000000001
|
4019 |
- type: recall_at_5
|
4020 |
value: 14.868
|
4021 |
+
- task:
|
4022 |
type: Retrieval
|
4023 |
+
dataset:
|
|
|
4024 |
name: MTEB ClimateFEVER (default)
|
|
|
|
|
4025 |
type: mteb/climate-fever
|
4026 |
+
config: default
|
4027 |
+
split: test
|
4028 |
+
revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380
|
4029 |
metrics:
|
4030 |
- type: main_score
|
4031 |
value: 14.015
|
|
|
4309 |
value: 9.713
|
4310 |
- type: recall_at_5
|
4311 |
value: 12.415
|
4312 |
+
- task:
|
4313 |
type: Retrieval
|
4314 |
+
dataset:
|
|
|
4315 |
name: MTEB DBPedia (default)
|
|
|
|
|
4316 |
type: mteb/dbpedia
|
4317 |
+
config: default
|
4318 |
+
split: test
|
4319 |
+
revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659
|
4320 |
metrics:
|
4321 |
- type: main_score
|
4322 |
value: 19.899
|
|
|
4600 |
value: 6.425
|
4601 |
- type: recall_at_5
|
4602 |
value: 8.818
|
4603 |
+
- task:
|
4604 |
+
type: Classification
|
4605 |
+
dataset:
|
|
|
4606 |
name: MTEB EmotionClassification (default)
|
|
|
|
|
4607 |
type: mteb/emotion
|
4608 |
+
config: default
|
4609 |
+
split: test
|
4610 |
+
revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
|
4611 |
metrics:
|
4612 |
- type: accuracy
|
4613 |
value: 38.339999999999996
|
|
|
4617 |
value: 40.51989104726522
|
4618 |
- type: main_score
|
4619 |
value: 38.339999999999996
|
4620 |
+
- task:
|
4621 |
+
type: Retrieval
|
4622 |
+
dataset:
|
|
|
4623 |
name: MTEB FEVER (default)
|
|
|
|
|
4624 |
type: mteb/fever
|
4625 |
+
config: default
|
4626 |
+
split: test
|
4627 |
+
revision: bea83ef9e8fb933d90a2f1d5515737465d613e12
|
4628 |
metrics:
|
4629 |
- type: main_score
|
4630 |
value: 25.006
|
|
|
4908 |
value: 24.082
|
4909 |
- type: recall_at_5
|
4910 |
value: 29.563
|
4911 |
+
- task:
|
4912 |
type: Retrieval
|
4913 |
+
dataset:
|
|
|
4914 |
name: MTEB FiQA2018 (default)
|
|
|
|
|
4915 |
type: mteb/fiqa
|
4916 |
+
config: default
|
4917 |
+
split: test
|
4918 |
+
revision: 27a168819829fe9bcd655c2df245fb19452e8e06
|
4919 |
metrics:
|
4920 |
- type: main_score
|
4921 |
value: 11.167
|
|
|
5199 |
value: 8.863999999999999
|
5200 |
- type: recall_at_5
|
5201 |
value: 11.354000000000001
|
5202 |
+
- task:
|
5203 |
type: Retrieval
|
5204 |
+
dataset:
|
|
|
5205 |
name: MTEB HotpotQA (default)
|
|
|
|
|
5206 |
type: mteb/hotpotqa
|
5207 |
+
config: default
|
5208 |
+
split: test
|
5209 |
+
revision: ab518f4d6fcca38d87c25209f94beba119d02014
|
5210 |
metrics:
|
5211 |
- type: main_score
|
5212 |
value: 30.837999999999997
|
|
|
5490 |
value: 25.779999999999998
|
5491 |
- type: recall_at_5
|
5492 |
value: 29.250999999999998
|
5493 |
+
- task:
|
5494 |
+
type: Classification
|
5495 |
+
dataset:
|
|
|
5496 |
name: MTEB ImdbClassification (default)
|
|
|
|
|
5497 |
type: mteb/imdb
|
5498 |
+
config: default
|
5499 |
+
split: test
|
5500 |
+
revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
|
5501 |
metrics:
|
5502 |
- type: accuracy
|
5503 |
value: 66.5616
|
|
|
5511 |
value: 66.15361405073978
|
5512 |
- type: main_score
|
5513 |
value: 66.5616
|
5514 |
+
- task:
|
5515 |
+
type: Retrieval
|
5516 |
+
dataset:
|
|
|
5517 |
name: MTEB MSMARCO (default)
|
|
|
|
|
5518 |
type: mteb/msmarco
|
5519 |
+
config: default
|
5520 |
+
split: test
|
5521 |
+
revision: c5a29a104738b98a9e76336939199e264163d4a0
|
5522 |
metrics:
|
5523 |
- type: main_score
|
5524 |
value: 28.034
|
|
|
5802 |
value: 2.138
|
5803 |
- type: recall_at_5
|
5804 |
value: 3.3770000000000002
|
5805 |
+
- task:
|
5806 |
+
type: Classification
|
5807 |
+
dataset:
|
|
|
5808 |
name: MTEB MTOPDomainClassification (en)
|
|
|
|
|
5809 |
type: mteb/mtop_domain
|
5810 |
+
config: en
|
5811 |
+
split: test
|
5812 |
+
revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
|
5813 |
metrics:
|
5814 |
- type: accuracy
|
5815 |
value: 84.81988144094848
|
|
|
5819 |
value: 84.95181538630469
|
5820 |
- type: main_score
|
5821 |
value: 84.81988144094848
|
5822 |
+
- task:
|
5823 |
type: Classification
|
5824 |
+
dataset:
|
|
|
5825 |
name: MTEB MTOPIntentClassification (en)
|
|
|
|
|
5826 |
type: mteb/mtop_intent
|
5827 |
+
config: en
|
5828 |
+
split: test
|
5829 |
+
revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
|
5830 |
metrics:
|
5831 |
- type: accuracy
|
5832 |
value: 62.41222070223438
|
|
|
5836 |
value: 66.23266420473301
|
5837 |
- type: main_score
|
5838 |
value: 62.41222070223438
|
5839 |
+
- task:
|
5840 |
type: Classification
|
5841 |
+
dataset:
|
|
|
5842 |
name: MTEB MassiveIntentClassification (en)
|
|
|
|
|
5843 |
type: mteb/amazon_massive_intent
|
5844 |
+
config: en
|
5845 |
+
split: test
|
5846 |
+
revision: 4672e20407010da34463acc759c162ca9734bca6
|
5847 |
metrics:
|
5848 |
- type: accuracy
|
5849 |
value: 62.50168123739073
|
|
|
5853 |
value: 62.787680759907204
|
5854 |
- type: main_score
|
5855 |
value: 62.50168123739073
|
5856 |
+
- task:
|
5857 |
type: Classification
|
5858 |
+
dataset:
|
|
|
5859 |
name: MTEB MassiveScenarioClassification (en)
|
|
|
|
|
5860 |
type: mteb/amazon_massive_scenario
|
5861 |
+
config: en
|
5862 |
+
split: test
|
5863 |
+
revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
|
5864 |
metrics:
|
5865 |
- type: accuracy
|
5866 |
value: 66.09280430396772
|
|
|
5870 |
value: 66.15203456480924
|
5871 |
- type: main_score
|
5872 |
value: 66.09280430396772
|
5873 |
+
- task:
|
5874 |
+
type: Clustering
|
5875 |
+
dataset:
|
|
|
5876 |
name: MTEB MedrxivClusteringP2P (default)
|
|
|
|
|
5877 |
type: mteb/medrxiv-clustering-p2p
|
5878 |
+
config: default
|
5879 |
+
split: test
|
5880 |
+
revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
|
5881 |
metrics:
|
5882 |
- type: main_score
|
5883 |
value: 26.932942933622616
|
|
|
5885 |
value: 26.932942933622616
|
5886 |
- type: v_measure_std
|
5887 |
value: 1.593124055965666
|
5888 |
+
- task:
|
5889 |
type: Clustering
|
5890 |
+
dataset:
|
|
|
5891 |
name: MTEB MedrxivClusteringS2S (default)
|
|
|
|
|
5892 |
type: mteb/medrxiv-clustering-s2s
|
5893 |
+
config: default
|
5894 |
+
split: test
|
5895 |
+
revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
|
5896 |
metrics:
|
5897 |
- type: main_score
|
5898 |
value: 22.9594415386389
|
|
|
5900 |
value: 22.9594415386389
|
5901 |
- type: v_measure_std
|
5902 |
value: 1.2719806552652395
|
5903 |
+
- task:
|
5904 |
+
type: Reranking
|
5905 |
+
dataset:
|
|
|
5906 |
name: MTEB MindSmallReranking (default)
|
|
|
|
|
5907 |
type: mteb/mind_small
|
5908 |
+
config: default
|
5909 |
+
split: test
|
5910 |
+
revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7
|
5911 |
metrics:
|
5912 |
- type: main_score
|
5913 |
value: 28.527234738258063
|
|
|
5927 |
value: -26.591327847291947
|
5928 |
- type: nAUC_mrr_std
|
5929 |
value: -11.52072949105865
|
5930 |
+
- task:
|
5931 |
+
type: Retrieval
|
5932 |
+
dataset:
|
|
|
5933 |
name: MTEB NFCorpus (default)
|
|
|
|
|
5934 |
type: mteb/nfcorpus
|
5935 |
+
config: default
|
5936 |
+
split: test
|
5937 |
+
revision: ec0fa4fe99da2ff19ca1214b7966684033a58814
|
5938 |
metrics:
|
5939 |
- type: main_score
|
5940 |
value: 23.318
|
|
|
6218 |
value: 6.773999999999999
|
6219 |
- type: recall_at_5
|
6220 |
value: 8.713
|
6221 |
+
- task:
|
6222 |
type: Retrieval
|
6223 |
+
dataset:
|
|
|
6224 |
name: MTEB NQ (default)
|
|
|
|
|
6225 |
type: mteb/nq
|
6226 |
+
config: default
|
6227 |
+
split: test
|
6228 |
+
revision: b774495ed302d8c44a3a7ea25c90dbce03968f31
|
6229 |
metrics:
|
6230 |
- type: main_score
|
6231 |
value: 17.682000000000002
|
|
|
6509 |
value: 16.338
|
6510 |
- type: recall_at_5
|
6511 |
value: 21.217
|
6512 |
+
- task:
|
6513 |
type: Retrieval
|
6514 |
+
dataset:
|
|
|
6515 |
name: MTEB QuoraRetrieval (default)
|
|
|
|
|
6516 |
type: mteb/quora
|
6517 |
+
config: default
|
6518 |
+
split: test
|
6519 |
+
revision: e4e08e0b7dbe3c8700f0daef558ff32256715259
|
6520 |
metrics:
|
6521 |
- type: main_score
|
6522 |
value: 74.323
|
|
|
6800 |
value: 73.137
|
6801 |
- type: recall_at_5
|
6802 |
value: 78.655
|
6803 |
+
- task:
|
6804 |
+
type: Clustering
|
6805 |
+
dataset:
|
|
|
6806 |
name: MTEB RedditClustering (default)
|
|
|
|
|
6807 |
type: mteb/reddit-clustering
|
6808 |
+
config: default
|
6809 |
+
split: test
|
6810 |
+
revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
|
6811 |
metrics:
|
6812 |
- type: main_score
|
6813 |
value: 28.89014544508522
|
|
|
6815 |
value: 28.89014544508522
|
6816 |
- type: v_measure_std
|
6817 |
value: 4.477854992673074
|
6818 |
+
- task:
|
6819 |
type: Clustering
|
6820 |
+
dataset:
|
|
|
6821 |
name: MTEB RedditClusteringP2P (default)
|
|
|
|
|
6822 |
type: mteb/reddit-clustering-p2p
|
6823 |
+
config: default
|
6824 |
+
split: test
|
6825 |
+
revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
|
6826 |
metrics:
|
6827 |
- type: main_score
|
6828 |
value: 41.588064041506414
|
|
|
6830 |
value: 41.588064041506414
|
6831 |
- type: v_measure_std
|
6832 |
value: 12.234957713539355
|
6833 |
+
- task:
|
6834 |
+
type: Retrieval
|
6835 |
+
dataset:
|
|
|
6836 |
name: MTEB SCIDOCS (default)
|
|
|
|
|
6837 |
type: mteb/scidocs
|
6838 |
+
config: default
|
6839 |
+
split: test
|
6840 |
+
revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88
|
6841 |
metrics:
|
6842 |
- type: main_score
|
6843 |
value: 9.923
|
|
|
7121 |
value: 5.295
|
7122 |
- type: recall_at_5
|
7123 |
value: 7.37
|
7124 |
+
- task:
|
7125 |
+
type: STS
|
7126 |
+
dataset:
|
|
|
7127 |
name: MTEB SICK-R (default)
|
|
|
|
|
7128 |
type: mteb/sickr-sts
|
7129 |
+
config: default
|
7130 |
+
split: test
|
7131 |
+
revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
|
7132 |
metrics:
|
7133 |
- type: cosine_pearson
|
7134 |
value: 75.0950047498747
|
|
|
7148 |
value: 75.0950047498747
|
7149 |
- type: spearman
|
7150 |
value: 66.17240782538595
|
7151 |
+
- task:
|
7152 |
type: STS
|
7153 |
+
dataset:
|
|
|
7154 |
name: MTEB STS12 (default)
|
|
|
|
|
7155 |
type: mteb/sts12-sts
|
7156 |
+
config: default
|
7157 |
+
split: test
|
7158 |
+
revision: a0d554a64d88156834ff5ae9920b964011b16384
|
7159 |
metrics:
|
7160 |
- type: cosine_pearson
|
7161 |
value: 70.27191745166907
|
|
|
7175 |
value: 70.27191745166907
|
7176 |
- type: spearman
|
7177 |
value: 61.89139464648924
|
7178 |
+
- task:
|
7179 |
type: STS
|
7180 |
+
dataset:
|
|
|
7181 |
name: MTEB STS13 (default)
|
|
|
|
|
7182 |
type: mteb/sts13-sts
|
7183 |
+
config: default
|
7184 |
+
split: test
|
7185 |
+
revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
|
7186 |
metrics:
|
7187 |
- type: cosine_pearson
|
7188 |
value: 70.19582039979868
|
|
|
7202 |
value: 70.19582039979868
|
7203 |
- type: spearman
|
7204 |
value: 71.66792475528088
|
7205 |
+
- task:
|
7206 |
type: STS
|
7207 |
+
dataset:
|
|
|
7208 |
name: MTEB STS14 (default)
|
|
|
|
|
7209 |
type: mteb/sts14-sts
|
7210 |
+
config: default
|
7211 |
+
split: test
|
7212 |
+
revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
|
7213 |
metrics:
|
7214 |
- type: cosine_pearson
|
7215 |
value: 69.52140108419252
|
|
|
7229 |
value: 69.52140108419252
|
7230 |
- type: spearman
|
7231 |
value: 67.82634222687376
|
7232 |
+
- task:
|
7233 |
type: STS
|
7234 |
+
dataset:
|
|
|
7235 |
name: MTEB STS15 (default)
|
|
|
|
|
7236 |
type: mteb/sts15-sts
|
7237 |
+
config: default
|
7238 |
+
split: test
|
7239 |
+
revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
|
7240 |
metrics:
|
7241 |
- type: cosine_pearson
|
7242 |
value: 73.66221619412464
|
|
|
7256 |
value: 73.66221619412464
|
7257 |
- type: spearman
|
7258 |
value: 75.48765072240437
|
7259 |
+
- task:
|
7260 |
type: STS
|
7261 |
+
dataset:
|
|
|
7262 |
name: MTEB STS16 (default)
|
|
|
|
|
7263 |
type: mteb/sts16-sts
|
7264 |
+
config: default
|
7265 |
+
split: test
|
7266 |
+
revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
|
7267 |
metrics:
|
7268 |
- type: cosine_pearson
|
7269 |
value: 62.328630460915925
|
|
|
7283 |
value: 62.328630460915925
|
7284 |
- type: spearman
|
7285 |
value: 66.48155706668948
|
7286 |
+
- task:
|
7287 |
type: STS
|
7288 |
+
dataset:
|
|
|
7289 |
name: MTEB STS17 (fr-en)
|
|
|
|
|
7290 |
type: mteb/sts17-crosslingual-sts
|
7291 |
+
config: fr-en
|
7292 |
+
split: test
|
7293 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7294 |
metrics:
|
7295 |
- type: cosine_pearson
|
7296 |
value: 21.344883409729785
|
|
|
7310 |
value: 21.344883409729785
|
7311 |
- type: spearman
|
7312 |
value: 19.492480027372526
|
7313 |
+
- task:
|
7314 |
type: STS
|
7315 |
+
dataset:
|
|
|
7316 |
name: MTEB STS17 (es-en)
|
|
|
|
|
7317 |
type: mteb/sts17-crosslingual-sts
|
7318 |
+
config: es-en
|
7319 |
+
split: test
|
7320 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7321 |
metrics:
|
7322 |
- type: cosine_pearson
|
7323 |
value: 14.966581838953037
|
|
|
7337 |
value: 14.966581838953037
|
7338 |
- type: spearman
|
7339 |
value: 13.24509138766898
|
7340 |
+
- task:
|
7341 |
type: STS
|
7342 |
+
dataset:
|
|
|
7343 |
name: MTEB STS17 (nl-en)
|
|
|
|
|
7344 |
type: mteb/sts17-crosslingual-sts
|
7345 |
+
config: nl-en
|
7346 |
+
split: test
|
7347 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7348 |
metrics:
|
7349 |
- type: cosine_pearson
|
7350 |
value: 18.309414985775234
|
|
|
7364 |
value: 18.309414985775234
|
7365 |
- type: spearman
|
7366 |
value: 14.341489363671842
|
7367 |
+
- task:
|
7368 |
type: STS
|
7369 |
+
dataset:
|
|
|
7370 |
name: MTEB STS17 (en-de)
|
|
|
|
|
7371 |
type: mteb/sts17-crosslingual-sts
|
7372 |
+
config: en-de
|
7373 |
+
split: test
|
7374 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7375 |
metrics:
|
7376 |
- type: cosine_pearson
|
7377 |
value: 21.301586456013037
|
|
|
7391 |
value: 21.301586456013037
|
7392 |
- type: spearman
|
7393 |
value: 22.571419522164376
|
7394 |
+
- task:
|
7395 |
type: STS
|
7396 |
+
dataset:
|
|
|
7397 |
name: MTEB STS17 (it-en)
|
|
|
|
|
7398 |
type: mteb/sts17-crosslingual-sts
|
7399 |
+
config: it-en
|
7400 |
+
split: test
|
7401 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7402 |
metrics:
|
7403 |
- type: cosine_pearson
|
7404 |
value: 16.140292893693204
|
|
|
7418 |
value: 16.140292893693204
|
7419 |
- type: spearman
|
7420 |
value: 10.216376215477217
|
7421 |
+
- task:
|
7422 |
type: STS
|
7423 |
+
dataset:
|
|
|
7424 |
name: MTEB STS17 (en-en)
|
|
|
|
|
7425 |
type: mteb/sts17-crosslingual-sts
|
7426 |
+
config: en-en
|
7427 |
+
split: test
|
7428 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7429 |
metrics:
|
7430 |
- type: cosine_pearson
|
7431 |
value: 78.42242639560023
|
|
|
7445 |
value: 78.42242639560023
|
7446 |
- type: spearman
|
7447 |
value: 80.2472005970173
|
7448 |
+
- task:
|
7449 |
type: STS
|
7450 |
+
dataset:
|
|
|
7451 |
name: MTEB STS17 (en-ar)
|
|
|
|
|
7452 |
type: mteb/sts17-crosslingual-sts
|
7453 |
+
config: en-ar
|
7454 |
+
split: test
|
7455 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7456 |
metrics:
|
7457 |
- type: cosine_pearson
|
7458 |
value: -5.762967943082491
|
|
|
7472 |
value: -5.762967943082491
|
7473 |
- type: spearman
|
7474 |
value: -6.184248227377756
|
7475 |
+
- task:
|
7476 |
type: STS
|
7477 |
+
dataset:
|
|
|
7478 |
name: MTEB STS17 (en-tr)
|
|
|
|
|
7479 |
type: mteb/sts17-crosslingual-sts
|
7480 |
+
config: en-tr
|
7481 |
+
split: test
|
7482 |
+
revision: faeb762787bd10488a50c8b5be4a3b82e411949c
|
7483 |
metrics:
|
7484 |
- type: cosine_pearson
|
7485 |
value: -8.666319610669559
|
|
|
7499 |
value: -8.666319610669559
|
7500 |
- type: spearman
|
7501 |
value: -10.0877070299522
|
7502 |
+
- task:
|
7503 |
type: STS
|
7504 |
+
dataset:
|
|
|
7505 |
name: MTEB STS22 (es-en)
|
|
|
|
|
7506 |
type: mteb/sts22-crosslingual-sts
|
7507 |
+
config: es-en
|
7508 |
+
split: test
|
7509 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7510 |
metrics:
|
7511 |
- type: cosine_pearson
|
7512 |
value: 16.880423266497427
|
|
|
7526 |
value: 16.880423266497427
|
7527 |
- type: spearman
|
7528 |
value: 18.497107178067477
|
7529 |
+
- task:
|
7530 |
type: STS
|
7531 |
+
dataset:
|
|
|
7532 |
name: MTEB STS22 (en)
|
|
|
|
|
7533 |
type: mteb/sts22-crosslingual-sts
|
7534 |
+
config: en
|
7535 |
+
split: test
|
7536 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7537 |
metrics:
|
7538 |
- type: cosine_pearson
|
7539 |
value: 44.98861387948161
|
|
|
7553 |
value: 44.98861387948161
|
7554 |
- type: spearman
|
7555 |
value: 59.04270974068145
|
7556 |
+
- task:
|
7557 |
type: STS
|
7558 |
+
dataset:
|
|
|
7559 |
name: MTEB STS22 (de-en)
|
|
|
|
|
7560 |
type: mteb/sts22-crosslingual-sts
|
7561 |
+
config: de-en
|
7562 |
+
split: test
|
7563 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7564 |
metrics:
|
7565 |
- type: cosine_pearson
|
7566 |
value: 26.505168004689462
|
|
|
7580 |
value: 26.505168004689462
|
7581 |
- type: spearman
|
7582 |
value: 28.591720613248732
|
7583 |
+
- task:
|
7584 |
type: STS
|
7585 |
+
dataset:
|
|
|
7586 |
name: MTEB STS22 (zh-en)
|
|
|
|
|
7587 |
type: mteb/sts22-crosslingual-sts
|
7588 |
+
config: zh-en
|
7589 |
+
split: test
|
7590 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7591 |
metrics:
|
7592 |
- type: cosine_pearson
|
7593 |
value: 11.552622364692777
|
|
|
7607 |
value: 11.552622364692777
|
7608 |
- type: spearman
|
7609 |
value: 10.973019756392695
|
7610 |
+
- task:
|
7611 |
type: STS
|
7612 |
+
dataset:
|
|
|
7613 |
name: MTEB STS22 (pl-en)
|
|
|
|
|
7614 |
type: mteb/sts22-crosslingual-sts
|
7615 |
+
config: pl-en
|
7616 |
+
split: test
|
7617 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
7618 |
metrics:
|
7619 |
- type: cosine_pearson
|
7620 |
value: 10.466988163502029
|
|
|
7634 |
value: 10.466988163502029
|
7635 |
- type: spearman
|
7636 |
value: -0.21879166839686814
|
7637 |
+
- task:
|
7638 |
type: STS
|
7639 |
+
dataset:
|
|
|
7640 |
name: MTEB STSBenchmark (default)
|
|
|
|
|
7641 |
type: mteb/stsbenchmark-sts
|
7642 |
+
config: default
|
7643 |
+
split: test
|
7644 |
+
revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
|
7645 |
metrics:
|
7646 |
- type: cosine_pearson
|
7647 |
value: 66.80057012864974
|
|
|
7661 |
value: 66.80057012864974
|
7662 |
- type: spearman
|
7663 |
value: 66.52235871936412
|
7664 |
+
- task:
|
7665 |
+
type: Reranking
|
7666 |
+
dataset:
|
|
|
7667 |
name: MTEB SciDocsRR (default)
|
|
|
|
|
7668 |
type: mteb/scidocs-reranking
|
7669 |
+
config: default
|
7670 |
+
split: test
|
7671 |
+
revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
|
7672 |
metrics:
|
7673 |
- type: main_score
|
7674 |
value: 71.11074203128574
|
|
|
7688 |
value: 72.52180244204617
|
7689 |
- type: nAUC_mrr_std
|
7690 |
value: 64.6185657337566
|
7691 |
+
- task:
|
7692 |
+
type: Retrieval
|
7693 |
+
dataset:
|
|
|
7694 |
name: MTEB SciFact (default)
|
|
|
|
|
7695 |
type: mteb/scifact
|
7696 |
+
config: default
|
7697 |
+
split: test
|
7698 |
+
revision: 0228b52cf27578f30900b9e5271d331663a030d7
|
7699 |
metrics:
|
7700 |
- type: main_score
|
7701 |
value: 50.663000000000004
|
|
|
7979 |
value: 52.983000000000004
|
7980 |
- type: recall_at_5
|
7981 |
value: 58.428000000000004
|
7982 |
+
- task:
|
7983 |
+
type: PairClassification
|
7984 |
+
dataset:
|
|
|
7985 |
name: MTEB SprintDuplicateQuestions (default)
|
|
|
|
|
7986 |
type: mteb/sprintduplicatequestions-pairclassification
|
7987 |
+
config: default
|
7988 |
+
split: test
|
7989 |
+
revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
|
7990 |
metrics:
|
7991 |
- type: cosine_accuracy
|
7992 |
value: 99.6029702970297
|
|
|
8070 |
value: 78.80597014925374
|
8071 |
- type: similarity_recall
|
8072 |
value: 79.2
|
8073 |
+
- task:
|
8074 |
+
type: Clustering
|
8075 |
+
dataset:
|
|
|
8076 |
name: MTEB StackExchangeClustering (default)
|
|
|
|
|
8077 |
type: mteb/stackexchange-clustering
|
8078 |
+
config: default
|
8079 |
+
split: test
|
8080 |
+
revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
|
8081 |
metrics:
|
8082 |
- type: main_score
|
8083 |
value: 40.01875953666112
|
|
|
8085 |
value: 40.01875953666112
|
8086 |
- type: v_measure_std
|
8087 |
value: 4.519991014119391
|
8088 |
+
- task:
|
8089 |
type: Clustering
|
8090 |
+
dataset:
|
|
|
8091 |
name: MTEB StackExchangeClusteringP2P (default)
|
|
|
|
|
8092 |
type: mteb/stackexchange-clustering-p2p
|
8093 |
+
config: default
|
8094 |
+
split: test
|
8095 |
+
revision: 815ca46b2622cec33ccafc3735d572c266efdb44
|
8096 |
metrics:
|
8097 |
- type: main_score
|
8098 |
value: 28.81354037080584
|
|
|
8100 |
value: 28.81354037080584
|
8101 |
- type: v_measure_std
|
8102 |
value: 1.4144350664362755
|
8103 |
+
- task:
|
8104 |
+
type: Reranking
|
8105 |
+
dataset:
|
|
|
8106 |
name: MTEB StackOverflowDupQuestions (default)
|
|
|
|
|
8107 |
type: mteb/stackoverflowdupquestions-reranking
|
8108 |
+
config: default
|
8109 |
+
split: test
|
8110 |
+
revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
|
8111 |
metrics:
|
8112 |
- type: main_score
|
8113 |
value: 44.09716409649705
|
|
|
8127 |
value: 17.207604918830953
|
8128 |
- type: nAUC_mrr_std
|
8129 |
value: 6.664790258906265
|
8130 |
+
- task:
|
8131 |
+
type: Summarization
|
8132 |
+
dataset:
|
|
|
8133 |
name: MTEB SummEval (default)
|
|
|
|
|
8134 |
type: mteb/summeval
|
8135 |
+
config: default
|
8136 |
+
split: test
|
8137 |
+
revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
|
8138 |
metrics:
|
8139 |
- type: cosine_pearson
|
8140 |
value: 29.294245469087553
|
|
|
8150 |
value: 29.294245469087553
|
8151 |
- type: spearman
|
8152 |
value: 30.080488918284974
|
8153 |
+
- task:
|
8154 |
+
type: Retrieval
|
8155 |
+
dataset:
|
|
|
8156 |
name: MTEB TRECCOVID (default)
|
|
|
|
|
8157 |
type: mteb/trec-covid
|
8158 |
+
config: default
|
8159 |
+
split: test
|
8160 |
+
revision: bb9466bac8153a0349341eb1b22e06409e78ef4e
|
8161 |
metrics:
|
8162 |
- type: main_score
|
8163 |
value: 39.983999999999995
|
|
|
8441 |
value: 0.302
|
8442 |
- type: recall_at_5
|
8443 |
value: 0.482
|
8444 |
+
- task:
|
8445 |
type: Retrieval
|
8446 |
+
dataset:
|
|
|
8447 |
name: MTEB Touche2020 (default)
|
|
|
|
|
8448 |
type: mteb/touche2020
|
8449 |
+
config: default
|
8450 |
+
split: test
|
8451 |
+
revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f
|
8452 |
metrics:
|
8453 |
- type: main_score
|
8454 |
value: 13.753000000000002
|
|
|
8732 |
value: 3.024
|
8733 |
- type: recall_at_5
|
8734 |
value: 4.776
|
8735 |
+
- task:
|
8736 |
+
type: Classification
|
8737 |
+
dataset:
|
|
|
8738 |
name: MTEB ToxicConversationsClassification (default)
|
|
|
|
|
8739 |
type: mteb/toxic_conversations_50k
|
8740 |
+
config: default
|
8741 |
+
split: test
|
8742 |
+
revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
|
8743 |
metrics:
|
8744 |
- type: accuracy
|
8745 |
value: 65.6884765625
|
|
|
8753 |
value: 73.34420433686675
|
8754 |
- type: main_score
|
8755 |
value: 65.6884765625
|
8756 |
+
- task:
|
8757 |
type: Classification
|
8758 |
+
dataset:
|
|
|
8759 |
name: MTEB TweetSentimentExtractionClassification (default)
|
|
|
|
|
8760 |
type: mteb/tweet_sentiment_extraction
|
8761 |
+
config: default
|
8762 |
+
split: test
|
8763 |
+
revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
|
8764 |
metrics:
|
8765 |
- type: accuracy
|
8766 |
value: 49.83305036785513
|
|
|
8770 |
value: 49.32130156716104
|
8771 |
- type: main_score
|
8772 |
value: 49.83305036785513
|
8773 |
+
- task:
|
8774 |
+
type: Clustering
|
8775 |
+
dataset:
|
|
|
8776 |
name: MTEB TwentyNewsgroupsClustering (default)
|
|
|
|
|
8777 |
type: mteb/twentynewsgroups-clustering
|
8778 |
+
config: default
|
8779 |
+
split: test
|
8780 |
+
revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
|
8781 |
metrics:
|
8782 |
- type: main_score
|
8783 |
value: 25.27920179659098
|
|
|
8785 |
value: 25.27920179659098
|
8786 |
- type: v_measure_std
|
8787 |
value: 2.092324622279832
|
8788 |
+
- task:
|
8789 |
+
type: PairClassification
|
8790 |
+
dataset:
|
|
|
8791 |
name: MTEB TwitterSemEval2015 (default)
|
|
|
|
|
8792 |
type: mteb/twittersemeval2015-pairclassification
|
8793 |
+
config: default
|
8794 |
+
split: test
|
8795 |
+
revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
|
8796 |
metrics:
|
8797 |
- type: cosine_accuracy
|
8798 |
value: 82.19586338439531
|
|
|
8876 |
value: 54.3767840152236
|
8877 |
- type: similarity_recall
|
8878 |
value: 60.31662269129288
|
8879 |
+
- task:
|
8880 |
type: PairClassification
|
8881 |
+
dataset:
|
|
|
8882 |
name: MTEB TwitterURLCorpus (default)
|
|
|
|
|
8883 |
type: mteb/twitterurlcorpus-pairclassification
|
8884 |
+
config: default
|
8885 |
+
split: test
|
8886 |
+
revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
|
8887 |
metrics:
|
8888 |
- type: cosine_accuracy
|
8889 |
value: 85.86758256684907
|
|
|
8967 |
value: 69.39391707784078
|
8968 |
- type: similarity_recall
|
8969 |
value: 72.55158607945796
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8970 |
---
|
8971 |
|
8972 |
# M2V_base_glove_subword Model Card
|